helloollel commited on
Commit
cb7301c
1 Parent(s): 0977b85

apply lmsys/vicuna-13b-delta-v1.1

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/content/drive/MyDrive/AI/fastchat/13B_hf",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -17,7 +17,7 @@
17
  "rms_norm_eps": 1e-06,
18
  "tie_word_embeddings": false,
19
  "torch_dtype": "float16",
20
- "transformers_version": "4.28.0.dev0",
21
  "use_cache": true,
22
- "vocab_size": 32001
23
  }
 
1
  {
2
+ "_name_or_path": "/content/drive/MyDrive/AI/llama/13B_hf",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
17
  "rms_norm_eps": 1e-06,
18
  "tie_word_embeddings": false,
19
  "torch_dtype": "float16",
20
+ "transformers_version": "4.29.0.dev0",
21
  "use_cache": true,
22
+ "vocab_size": 32000
23
  }
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.28.0.dev0"
7
  }
 
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.29.0.dev0"
7
  }
pytorch_model-00001-of-00003.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0bac14d265d543d2ec08047c274bfdf5eda35140cf20d3c70dd31c02dbca62e6
3
- size 9948738670
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e754ec47918eb6569468a1fbdc68ee376202eb4e34c97a05951d894e195d296
3
+ size 9948728430
pytorch_model-00002-of-00003.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:668c7ef449e4aafd76cb4f63fc3b0bcbfa165bb004352ede1b727eb0d59aa8ef
3
  size 9904165024
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eecea1120efcd762af48bf54d7d5ff9ef3128cc33f144533dfc5a926fb6c541c
3
  size 9904165024
pytorch_model-00003-of-00003.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:53a19ed5c724554b5315902b26519708e166d57827d559fb1f1cec1c57db3da9
3
- size 6506673929
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf1ed63a11c0d9176006fe49914eaa911f0e73c2aaf614c11f8534ec934d7a89
3
+ size 6506663689
pytorch_model.bin.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 26031759360
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "pytorch_model-00003-of-00003.bin",
 
1
  {
2
  "metadata": {
3
+ "total_size": 26031738880
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "pytorch_model-00003-of-00003.bin",
tokenizer_config.json CHANGED
@@ -21,7 +21,6 @@
21
  "model_max_length": 1000000000000000019884624838656,
22
  "pad_token": null,
23
  "sp_model_kwargs": {},
24
- "special_tokens_map_file": "/root/.cache/huggingface/hub/models--lmsys--vicuna-13b-delta-v0/snapshots/573f08f953463f85b9bdabad64165b2a9e1cd66e/special_tokens_map.json",
25
  "tokenizer_class": "LlamaTokenizer",
26
  "unk_token": {
27
  "__type": "AddedToken",
 
21
  "model_max_length": 1000000000000000019884624838656,
22
  "pad_token": null,
23
  "sp_model_kwargs": {},
 
24
  "tokenizer_class": "LlamaTokenizer",
25
  "unk_token": {
26
  "__type": "AddedToken",