File size: 319 Bytes
f765f16
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
models:
  - model: migtissera/Tess-2.0-Llama-3-8B
    # No parameters necessary for base model
  - model: gradientai/Llama-3-8B-Instruct-Gradient-4194k
    parameters:
      density: 0.5
      weight: 0.5
merge_method: dare_linear
base_model: migtissera/Tess-2.0-Llama-3-8B
parameters:
  int8_mask: true
dtype: bfloat16