Mr-Vicky-01 commited on
Commit
b98dbbb
1 Parent(s): 945aa53

Delete tokenizer_config.json

Browse files
Files changed (1) hide show
  1. tokenizer_config.json +0 -56
tokenizer_config.json DELETED
@@ -1,56 +0,0 @@
1
- {
2
- "add_bos_token": false,
3
- "add_eos_token": true,
4
- "add_prefix_space": false,
5
- "added_tokens_decoder": {
6
- "50256": {
7
- "content": "<|endoftext|>",
8
- "lstrip": false,
9
- "normalized": true,
10
- "rstrip": false,
11
- "single_word": false,
12
- "special": true
13
- },
14
- "50257": {
15
- "content": "[PAD]",
16
- "lstrip": false,
17
- "normalized": false,
18
- "rstrip": false,
19
- "single_word": false,
20
- "special": true
21
- },
22
- "50258": {
23
- "content": "<sos>",
24
- "lstrip": false,
25
- "normalized": false,
26
- "rstrip": false,
27
- "single_word": false,
28
- "special": true
29
- },
30
- "50259": {
31
- "content": "<eos>",
32
- "lstrip": false,
33
- "normalized": false,
34
- "rstrip": false,
35
- "single_word": false,
36
- "special": true
37
- }
38
- },
39
- "additional_special_tokens": [
40
- "<sos>",
41
- "<eos>"
42
- ],
43
- "bos_token": "<|endoftext|>",
44
- "clean_up_tokenization_spaces": true,
45
- "eos_token": "<|endoftext|>",
46
- "errors": "replace",
47
- "max_length": 512,
48
- "model_max_length": 512,
49
- "pad_token": "<|endoftext|>",
50
- "padding_side": "right",
51
- "stride": 0,
52
- "tokenizer_class": "GPT2Tokenizer",
53
- "truncation_side": "right",
54
- "truncation_strategy": "longest_first",
55
- "unk_token": "<|endoftext|>"
56
- }