File size: 497 Bytes
14eb1bb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
merge_method: ties
base_model: Qwen/Qwen2.5-14B
models:
  - model: chargoddard/qwamma-14b-merge-v1
    parameters:
      density: 1.0
      weight: 1.0
  - model: arcee-train/Qwen2.5-14B-Instruct_arcee-qwen2-14B-v0.2
    parameters:
      density: 0.66
      weight:
        - filter: mlp
          value: [0, 0.3, 0.6, 0.1]
        - filter: self_attn
          value: [0, 0, 0.2, 0.1]
        - value: 0.1
parameters:
    normalize: false
    int8_mask: true
dtype: float32
out_dtype: bfloat16