ewof commited on
Commit
995d663
1 Parent(s): ce80ffe

updated to mistral 7b base 0.2 from 0.1

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/home/models/mistralai_Mistral-7B-v0.1",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
@@ -16,11 +16,11 @@
16
  "num_hidden_layers": 32,
17
  "num_key_value_heads": 8,
18
  "rms_norm_eps": 1e-05,
19
- "rope_theta": 10000.0,
20
- "sliding_window": 4096,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "bfloat16",
23
- "transformers_version": "4.36.0.dev0",
24
  "use_cache": false,
25
  "vocab_size": 32003
26
- }
 
1
  {
2
+ "_name_or_path": "alpindale/Mistral-7B-v0.2-hf",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
 
16
  "num_hidden_layers": 32,
17
  "num_key_value_heads": 8,
18
  "rms_norm_eps": 1e-05,
19
+ "rope_theta": 1000000.0,
20
+ "sliding_window": null,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "bfloat16",
23
+ "transformers_version": "4.40.0.dev0",
24
  "use_cache": false,
25
  "vocab_size": 32003
26
+ }
generation_config.json CHANGED
@@ -1,6 +1,7 @@
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
 
4
  "eos_token_id": 2,
5
- "transformers_version": "4.36.0.dev0"
6
  }
 
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
+ "do_sample": true,
5
  "eos_token_id": 2,
6
+ "transformers_version": "4.40.0.dev0"
7
  }
pytorch_model-00001-of-00003.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:57e3ff4e359c24b9ddb50188db67dc269461098059a321f5e6ada664d7b326ed
3
- size 4943209721
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e72d16b6c5238232f53e86b82e951291b80e920081b505dab5a6274ea81c157e
3
+ size 4943210208
pytorch_model-00002-of-00003.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1ba02a5b2913a0043cca86b1b7fbe35869ea6bf550c469a6df62c43fe1a39e89
3
- size 4999844257
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:837e5f23ac81b7fd4c1521ecb9a159fbfb8759b46ab22d7220f6870946bc8333
3
+ size 4999844744
pytorch_model-00003-of-00003.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:52468fc1339ce12df44b39ee162a610d827daac9f360ab75199e752340260928
3
- size 4540561439
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6547b942cb325ab46442f272ba17ca78f10088c59c5484d3cda354bd897f086f
3
+ size 4540561990
tokenizer_config.json CHANGED
@@ -1,6 +1,7 @@
1
  {
2
  "add_bos_token": true,
3
  "add_eos_token": false,
 
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
@@ -51,7 +52,6 @@
51
  "special": false
52
  }
53
  },
54
- "additional_special_tokens": [],
55
  "bos_token": "<s>",
56
  "clean_up_tokenization_spaces": false,
57
  "eos_token": "</s>",
@@ -61,7 +61,6 @@
61
  "sp_model_kwargs": {},
62
  "spaces_between_special_tokens": false,
63
  "tokenizer_class": "LlamaTokenizer",
64
- "trust_remote_code": true,
65
  "unk_token": "<unk>",
66
  "use_default_system_prompt": false,
67
  "use_fast": true
 
1
  {
2
  "add_bos_token": true,
3
  "add_eos_token": false,
4
+ "add_prefix_space": true,
5
  "added_tokens_decoder": {
6
  "0": {
7
  "content": "<unk>",
 
52
  "special": false
53
  }
54
  },
 
55
  "bos_token": "<s>",
56
  "clean_up_tokenization_spaces": false,
57
  "eos_token": "</s>",
 
61
  "sp_model_kwargs": {},
62
  "spaces_between_special_tokens": false,
63
  "tokenizer_class": "LlamaTokenizer",
 
64
  "unk_token": "<unk>",
65
  "use_default_system_prompt": false,
66
  "use_fast": true