File size: 3,340 Bytes
828d5b0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
base_model: /content/drive/MyDrive/evolve-merge-v2/input_models/llma3_manydata_our_data_rope_3108389863
dtype: bfloat16
merge_method: dare_ties
parameters:
  int8_mask: 1.0
  normalize: 1.0
slices:
- sources:
  - layer_range: [0, 8]
    model: /content/drive/MyDrive/evolve-merge-v2/input_models/llma3_manydata_our_data_rope_3108389863
    parameters:
      density: 0.7653375506603464
      weight: 0.13767610478325062
  - layer_range: [0, 8]
    model: /content/drive/MyDrive/evolve-merge-v2/input_models/llma3_manydata_not_our_data_rope_1173759365
    parameters:
      density: 0.7336602489449524
      weight: 0.3666639544856324
  - layer_range: [0, 8]
    model: /content/drive/MyDrive/evolve-merge-v2/input_models/llama3_sft_many_chat_3038212730
    parameters:
      density: 1.0
      weight: 0.3030835610677404
- sources:
  - layer_range: [8, 16]
    model: /content/drive/MyDrive/evolve-merge-v2/input_models/llma3_manydata_our_data_rope_3108389863
    parameters:
      density: 0.9861387586510485
      weight: 0.3948174181228292
  - layer_range: [8, 16]
    model: /content/drive/MyDrive/evolve-merge-v2/input_models/llma3_manydata_not_our_data_rope_1173759365
    parameters:
      density: 0.8413699662162298
      weight: 0.45739982954282526
  - layer_range: [8, 16]
    model: /content/drive/MyDrive/evolve-merge-v2/input_models/llama3_sft_many_chat_3038212730
    parameters:
      density: 1.0
      weight: 0.30274586211044396
- sources:
  - layer_range: [16, 24]
    model: /content/drive/MyDrive/evolve-merge-v2/input_models/llma3_manydata_our_data_rope_3108389863
    parameters:
      density: 0.9503146891835705
      weight: 0.2849061463174477
  - layer_range: [16, 24]
    model: /content/drive/MyDrive/evolve-merge-v2/input_models/llma3_manydata_not_our_data_rope_1173759365
    parameters:
      density: 0.832031377573231
      weight: 0.6047693096979141
  - layer_range: [16, 24]
    model: /content/drive/MyDrive/evolve-merge-v2/input_models/llama3_sft_many_chat_3038212730
    parameters:
      density: 0.9442991059236329
      weight: 0.4002445342115458
- sources:
  - layer_range: [24, 32]
    model: /content/drive/MyDrive/evolve-merge-v2/input_models/llma3_manydata_our_data_rope_3108389863
    parameters:
      density: 0.8517897851608993
      weight: 0.3362716927810899
  - layer_range: [24, 32]
    model: /content/drive/MyDrive/evolve-merge-v2/input_models/llma3_manydata_not_our_data_rope_1173759365
    parameters:
      density: 1.0
      weight: 0.2909336827183003
  - layer_range: [24, 32]
    model: /content/drive/MyDrive/evolve-merge-v2/input_models/llama3_sft_many_chat_3038212730
    parameters:
      density: 1.0
      weight: 0.3474712168573882
- sources:
  - layer_range: [32, 40]
    model: /content/drive/MyDrive/evolve-merge-v2/input_models/llma3_manydata_our_data_rope_3108389863
    parameters:
      density: 1.0
      weight: 0.27727322046805786
  - layer_range: [32, 40]
    model: /content/drive/MyDrive/evolve-merge-v2/input_models/llma3_manydata_not_our_data_rope_1173759365
    parameters:
      density: 0.8394275769864135
      weight: 0.4724670213437233
  - layer_range: [32, 40]
    model: /content/drive/MyDrive/evolve-merge-v2/input_models/llama3_sft_many_chat_3038212730
    parameters:
      density: 1.0
      weight: 0.31333702280148296
tokenizer_source: base