pkbiswas commited on
Commit
41b3057
1 Parent(s): 5204d25

End of training

Browse files
README.md CHANGED
@@ -1,10 +1,11 @@
1
  ---
2
- license: other
3
- base_model: microsoft/phi-1_5
4
  tags:
5
  - generated_from_trainer
6
  datasets:
7
  - scitldr
 
8
  model-index:
9
  - name: uplimit-project-3-phi-1.5
10
  results: []
@@ -15,9 +16,9 @@ should probably proofread and complete it, then remove this comment. -->
15
 
16
  # uplimit-project-3-phi-1.5
17
 
18
- This model is a fine-tuned version of [microsoft/phi-1_5](https://huggingface.co/microsoft/phi-1_5) on the scitldr dataset.
19
  It achieves the following results on the evaluation set:
20
- - Loss: 2.6196
21
 
22
  ## Model description
23
 
@@ -36,30 +37,35 @@ More information needed
36
  ### Training hyperparameters
37
 
38
  The following hyperparameters were used during training:
39
- - learning_rate: 0.001
40
  - train_batch_size: 1
41
  - eval_batch_size: 1
42
  - seed: 42
43
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
44
  - lr_scheduler_type: linear
45
- - num_epochs: 2
 
 
46
 
47
  ### Training results
48
 
49
  | Training Loss | Epoch | Step | Validation Loss |
50
  |:-------------:|:-----:|:----:|:---------------:|
51
- | 2.5348 | 0.25 | 500 | 2.6490 |
52
- | 2.5595 | 0.5 | 1000 | 2.6399 |
53
- | 2.5485 | 0.75 | 1500 | 2.6266 |
54
- | 2.4935 | 1.0 | 2000 | 2.6108 |
55
- | 2.2928 | 1.26 | 2500 | 2.6303 |
56
- | 2.3625 | 1.51 | 3000 | 2.6258 |
57
- | 2.3473 | 1.76 | 3500 | 2.6196 |
 
 
58
 
59
 
60
  ### Framework versions
61
 
62
- - Transformers 4.35.0
63
- - Pytorch 2.1.0+cu118
64
- - Datasets 2.14.6
65
- - Tokenizers 0.14.1
 
 
1
  ---
2
+ license: apache-2.0
3
+ library_name: peft
4
  tags:
5
  - generated_from_trainer
6
  datasets:
7
  - scitldr
8
+ base_model: mistralai/Mistral-7B-Instruct-v0.2
9
  model-index:
10
  - name: uplimit-project-3-phi-1.5
11
  results: []
 
16
 
17
  # uplimit-project-3-phi-1.5
18
 
19
+ This model is a fine-tuned version of [mistralai/Mistral-7B-Instruct-v0.2](https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2) on the scitldr dataset.
20
  It achieves the following results on the evaluation set:
21
+ - Loss: 2.1059
22
 
23
  ## Model description
24
 
 
37
  ### Training hyperparameters
38
 
39
  The following hyperparameters were used during training:
40
+ - learning_rate: 0.0002
41
  - train_batch_size: 1
42
  - eval_batch_size: 1
43
  - seed: 42
44
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
45
  - lr_scheduler_type: linear
46
+ - lr_scheduler_warmup_steps: 2
47
+ - num_epochs: 1
48
+ - mixed_precision_training: Native AMP
49
 
50
  ### Training results
51
 
52
  | Training Loss | Epoch | Step | Validation Loss |
53
  |:-------------:|:-----:|:----:|:---------------:|
54
+ | 2.0732 | 0.1 | 200 | 2.1863 |
55
+ | 2.1324 | 0.2 | 400 | 2.1925 |
56
+ | 2.103 | 0.3 | 600 | 2.1876 |
57
+ | 2.0766 | 0.4 | 800 | 2.1737 |
58
+ | 2.0825 | 0.5 | 1000 | 2.1555 |
59
+ | 2.0731 | 0.6 | 1200 | 2.1465 |
60
+ | 2.0819 | 0.7 | 1400 | 2.1355 |
61
+ | 1.9802 | 0.8 | 1600 | 2.1223 |
62
+ | 2.0466 | 0.9 | 1800 | 2.1059 |
63
 
64
 
65
  ### Framework versions
66
 
67
+ - PEFT 0.9.0
68
+ - Transformers 4.38.2
69
+ - Pytorch 2.2.1+cu121
70
+ - Datasets 2.18.0
71
+ - Tokenizers 0.15.2
adapter_config.json CHANGED
@@ -1,22 +1,31 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "microsoft/phi-1_5",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
- "lora_alpha": 16,
 
12
  "lora_dropout": 0.05,
 
 
13
  "modules_to_save": null,
14
  "peft_type": "LORA",
15
- "r": 8,
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
- "Wqkv"
 
 
 
 
20
  ],
21
- "task_type": "CAUSAL_LM"
 
 
22
  }
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "mistralai/Mistral-7B-Instruct-v0.2",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
+ "loftq_config": {},
12
+ "lora_alpha": 64,
13
  "lora_dropout": 0.05,
14
+ "megatron_config": null,
15
+ "megatron_core": "megatron.core",
16
  "modules_to_save": null,
17
  "peft_type": "LORA",
18
+ "r": 16,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "o_proj",
23
+ "k_proj",
24
+ "v_proj",
25
+ "gate_proj",
26
+ "q_proj"
27
  ],
28
+ "task_type": "CAUSAL_LM",
29
+ "use_dora": false,
30
+ "use_rslora": false
31
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:95e1ce17ed5191911c18f4b383a94def199df23dfd6761eeb06eb133ded06396
3
- size 6297216
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea233419c405e0eb879f67a9a1f35d1d2121778b05b38fe553a276b81f6f6130
3
+ size 92317600
runs/Mar18_17-44-42_0431c431ff0e/events.out.tfevents.1710783891.0431c431ff0e.1163.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7423bf282a1356c232342176ac3425532255244c219cff1d1fe7775c7d02c210
3
+ size 5065
runs/Mar18_17-45-57_0431c431ff0e/events.out.tfevents.1710783964.0431c431ff0e.1163.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0a3ef77a72fe35773e01c069518d3d3e48f508b41bc075288f2fd63df4def48
3
+ size 16080
special_tokens_map.json CHANGED
@@ -1,6 +1,24 @@
1
  {
2
- "bos_token": "<|endoftext|>",
3
- "eos_token": "<|endoftext|>",
4
- "pad_token": "<|endoftext|>",
5
- "unk_token": "<|endoftext|>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  }
 
1
  {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "</s>",
17
+ "unk_token": {
18
+ "content": "<unk>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
3
+ size 493443
tokenizer_config.json CHANGED
@@ -1,324 +1,43 @@
1
  {
2
- "add_prefix_space": false,
 
3
  "added_tokens_decoder": {
4
- "50256": {
5
- "content": "<|endoftext|>",
6
  "lstrip": false,
7
  "normalized": false,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
11
  },
12
- "50257": {
13
- "content": " ",
14
  "lstrip": false,
15
- "normalized": true,
16
- "rstrip": false,
17
- "single_word": false,
18
- "special": false
19
- },
20
- "50258": {
21
- "content": " ",
22
- "lstrip": false,
23
- "normalized": true,
24
- "rstrip": false,
25
- "single_word": false,
26
- "special": false
27
- },
28
- "50259": {
29
- "content": " ",
30
- "lstrip": false,
31
- "normalized": true,
32
- "rstrip": false,
33
- "single_word": false,
34
- "special": false
35
- },
36
- "50260": {
37
- "content": " ",
38
- "lstrip": false,
39
- "normalized": true,
40
- "rstrip": false,
41
- "single_word": false,
42
- "special": false
43
- },
44
- "50261": {
45
- "content": " ",
46
- "lstrip": false,
47
- "normalized": true,
48
- "rstrip": false,
49
- "single_word": false,
50
- "special": false
51
- },
52
- "50262": {
53
- "content": " ",
54
- "lstrip": false,
55
- "normalized": true,
56
- "rstrip": false,
57
- "single_word": false,
58
- "special": false
59
- },
60
- "50263": {
61
- "content": " ",
62
- "lstrip": false,
63
- "normalized": true,
64
- "rstrip": false,
65
- "single_word": false,
66
- "special": false
67
- },
68
- "50264": {
69
- "content": " ",
70
- "lstrip": false,
71
- "normalized": true,
72
- "rstrip": false,
73
- "single_word": false,
74
- "special": false
75
- },
76
- "50265": {
77
- "content": " ",
78
- "lstrip": false,
79
- "normalized": true,
80
- "rstrip": false,
81
- "single_word": false,
82
- "special": false
83
- },
84
- "50266": {
85
- "content": " ",
86
- "lstrip": false,
87
- "normalized": true,
88
- "rstrip": false,
89
- "single_word": false,
90
- "special": false
91
- },
92
- "50267": {
93
- "content": " ",
94
- "lstrip": false,
95
- "normalized": true,
96
- "rstrip": false,
97
- "single_word": false,
98
- "special": false
99
- },
100
- "50268": {
101
- "content": " ",
102
- "lstrip": false,
103
- "normalized": true,
104
- "rstrip": false,
105
- "single_word": false,
106
- "special": false
107
- },
108
- "50269": {
109
- "content": " ",
110
- "lstrip": false,
111
- "normalized": true,
112
- "rstrip": false,
113
- "single_word": false,
114
- "special": false
115
- },
116
- "50270": {
117
- "content": " ",
118
- "lstrip": false,
119
- "normalized": true,
120
- "rstrip": false,
121
- "single_word": false,
122
- "special": false
123
- },
124
- "50271": {
125
- "content": " ",
126
- "lstrip": false,
127
- "normalized": true,
128
- "rstrip": false,
129
- "single_word": false,
130
- "special": false
131
- },
132
- "50272": {
133
- "content": " ",
134
- "lstrip": false,
135
- "normalized": true,
136
- "rstrip": false,
137
- "single_word": false,
138
- "special": false
139
- },
140
- "50273": {
141
- "content": " ",
142
- "lstrip": false,
143
- "normalized": true,
144
- "rstrip": false,
145
- "single_word": false,
146
- "special": false
147
- },
148
- "50274": {
149
- "content": " ",
150
- "lstrip": false,
151
- "normalized": true,
152
- "rstrip": false,
153
- "single_word": false,
154
- "special": false
155
- },
156
- "50275": {
157
- "content": " ",
158
- "lstrip": false,
159
- "normalized": true,
160
- "rstrip": false,
161
- "single_word": false,
162
- "special": false
163
- },
164
- "50276": {
165
- "content": " ",
166
- "lstrip": false,
167
- "normalized": true,
168
- "rstrip": false,
169
- "single_word": false,
170
- "special": false
171
- },
172
- "50277": {
173
- "content": " ",
174
- "lstrip": false,
175
- "normalized": true,
176
- "rstrip": false,
177
- "single_word": false,
178
- "special": false
179
- },
180
- "50278": {
181
- "content": " ",
182
- "lstrip": false,
183
- "normalized": true,
184
- "rstrip": false,
185
- "single_word": false,
186
- "special": false
187
- },
188
- "50279": {
189
- "content": " ",
190
- "lstrip": false,
191
- "normalized": true,
192
- "rstrip": false,
193
- "single_word": false,
194
- "special": false
195
- },
196
- "50280": {
197
- "content": " ",
198
- "lstrip": false,
199
- "normalized": true,
200
- "rstrip": false,
201
- "single_word": false,
202
- "special": false
203
- },
204
- "50281": {
205
- "content": " ",
206
- "lstrip": false,
207
- "normalized": true,
208
- "rstrip": false,
209
- "single_word": false,
210
- "special": false
211
- },
212
- "50282": {
213
- "content": " ",
214
- "lstrip": false,
215
- "normalized": true,
216
- "rstrip": false,
217
- "single_word": false,
218
- "special": false
219
- },
220
- "50283": {
221
- "content": " ",
222
- "lstrip": false,
223
- "normalized": true,
224
- "rstrip": false,
225
- "single_word": false,
226
- "special": false
227
- },
228
- "50284": {
229
- "content": " ",
230
- "lstrip": false,
231
- "normalized": true,
232
- "rstrip": false,
233
- "single_word": false,
234
- "special": false
235
- },
236
- "50285": {
237
- "content": " ",
238
- "lstrip": false,
239
- "normalized": true,
240
- "rstrip": false,
241
- "single_word": false,
242
- "special": false
243
- },
244
- "50286": {
245
- "content": " ",
246
- "lstrip": false,
247
- "normalized": true,
248
- "rstrip": false,
249
- "single_word": false,
250
- "special": false
251
- },
252
- "50287": {
253
- "content": "\t\t\t\t\t\t\t\t\t",
254
- "lstrip": false,
255
- "normalized": true,
256
- "rstrip": false,
257
- "single_word": false,
258
- "special": false
259
- },
260
- "50288": {
261
- "content": "\t\t\t\t\t\t\t\t",
262
- "lstrip": false,
263
- "normalized": true,
264
- "rstrip": false,
265
- "single_word": false,
266
- "special": false
267
- },
268
- "50289": {
269
- "content": "\t\t\t\t\t\t\t",
270
- "lstrip": false,
271
- "normalized": true,
272
- "rstrip": false,
273
- "single_word": false,
274
- "special": false
275
- },
276
- "50290": {
277
- "content": "\t\t\t\t\t\t",
278
- "lstrip": false,
279
- "normalized": true,
280
- "rstrip": false,
281
- "single_word": false,
282
- "special": false
283
- },
284
- "50291": {
285
- "content": "\t\t\t\t\t",
286
- "lstrip": false,
287
- "normalized": true,
288
- "rstrip": false,
289
- "single_word": false,
290
- "special": false
291
- },
292
- "50292": {
293
- "content": "\t\t\t\t",
294
- "lstrip": false,
295
- "normalized": true,
296
- "rstrip": false,
297
- "single_word": false,
298
- "special": false
299
- },
300
- "50293": {
301
- "content": "\t\t\t",
302
- "lstrip": false,
303
- "normalized": true,
304
  "rstrip": false,
305
  "single_word": false,
306
- "special": false
307
  },
308
- "50294": {
309
- "content": "\t\t",
310
  "lstrip": false,
311
- "normalized": true,
312
  "rstrip": false,
313
  "single_word": false,
314
- "special": false
315
  }
316
  },
317
- "bos_token": "<|endoftext|>",
318
- "clean_up_tokenization_spaces": true,
319
- "eos_token": "<|endoftext|>",
320
- "model_max_length": 2048,
321
- "pad_token": "<|endoftext|>",
322
- "tokenizer_class": "CodeGenTokenizer",
323
- "unk_token": "<|endoftext|>"
 
 
 
 
 
 
324
  }
 
1
  {
2
+ "add_bos_token": true,
3
+ "add_eos_token": true,
4
  "added_tokens_decoder": {
5
+ "0": {
6
+ "content": "<unk>",
7
  "lstrip": false,
8
  "normalized": false,
9
  "rstrip": false,
10
  "single_word": false,
11
  "special": true
12
  },
13
+ "1": {
14
+ "content": "<s>",
15
  "lstrip": false,
16
+ "normalized": false,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  "rstrip": false,
18
  "single_word": false,
19
+ "special": true
20
  },
21
+ "2": {
22
+ "content": "</s>",
23
  "lstrip": false,
24
+ "normalized": false,
25
  "rstrip": false,
26
  "single_word": false,
27
+ "special": true
28
  }
29
  },
30
+ "additional_special_tokens": [],
31
+ "bos_token": "<s>",
32
+ "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
33
+ "clean_up_tokenization_spaces": false,
34
+ "eos_token": "</s>",
35
+ "legacy": true,
36
+ "model_max_length": 1000000000000000019884624838656,
37
+ "pad_token": "</s>",
38
+ "sp_model_kwargs": {},
39
+ "spaces_between_special_tokens": false,
40
+ "tokenizer_class": "LlamaTokenizer",
41
+ "unk_token": "<unk>",
42
+ "use_default_system_prompt": false
43
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0732dae4e7d2cb622d638826941eb8367e7323fdc74a59f1d7aaf6f233695874
3
- size 4600
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6646272d0b84574cfbf6b998c07ba21b002b33b24e459747db8682137eba7f8
3
+ size 4920