Undi95 commited on
Commit
acfc2c6
1 Parent(s): 9f49b22

Update tokenizer_config.json

Browse files
Files changed (1) hide show
  1. tokenizer_config.json +33 -5
tokenizer_config.json CHANGED
@@ -1,9 +1,11 @@
1
  {
 
 
2
  "added_tokens_decoder": {
3
  "0": {
4
  "content": "<unk>",
5
  "lstrip": false,
6
- "normalized": false,
7
  "rstrip": false,
8
  "single_word": false,
9
  "special": true
@@ -11,7 +13,7 @@
11
  "1": {
12
  "content": "<s>",
13
  "lstrip": false,
14
- "normalized": false,
15
  "rstrip": false,
16
  "single_word": false,
17
  "special": true
@@ -23,19 +25,45 @@
23
  "rstrip": false,
24
  "single_word": false,
25
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
  }
27
  },
28
  "additional_special_tokens": [
29
  "<unk>",
30
  "<s>",
31
- "</s>"
 
 
32
  ],
33
  "bos_token": "<s>",
34
  "clean_up_tokenization_spaces": false,
35
- "eos_token": "</s>",
36
  "legacy": true,
37
  "model_max_length": 1000000000000000019884624838656,
38
- "pad_token": null,
39
  "sp_model_kwargs": {},
40
  "spaces_between_special_tokens": false,
41
  "tokenizer_class": "LlamaTokenizer",
 
1
  {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
7
  "lstrip": false,
8
+ "normalized": true,
9
  "rstrip": false,
10
  "single_word": false,
11
  "special": true
 
13
  "1": {
14
  "content": "<s>",
15
  "lstrip": false,
16
+ "normalized": true,
17
  "rstrip": false,
18
  "single_word": false,
19
  "special": true
 
25
  "rstrip": false,
26
  "single_word": false,
27
  "special": true
28
+ },
29
+ "32000": {
30
+ "content": "<|im_end|>",
31
+ "lstrip": false,
32
+ "normalized": true,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "32001": {
38
+ "content": "<|im_start|>",
39
+ "lstrip": true,
40
+ "normalized": false,
41
+ "rstrip": true,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "32002": {
46
+ "content": "[PAD]",
47
+ "lstrip": true,
48
+ "normalized": false,
49
+ "rstrip": true,
50
+ "single_word": false,
51
+ "special": true
52
  }
53
  },
54
  "additional_special_tokens": [
55
  "<unk>",
56
  "<s>",
57
+ "</s>",
58
+ "<|im_end|>",
59
+ "<|im_start|>"
60
  ],
61
  "bos_token": "<s>",
62
  "clean_up_tokenization_spaces": false,
63
+ "eos_token": "<|im_end|>",
64
  "legacy": true,
65
  "model_max_length": 1000000000000000019884624838656,
66
+ "pad_token": "[PAD]",
67
  "sp_model_kwargs": {},
68
  "spaces_between_special_tokens": false,
69
  "tokenizer_class": "LlamaTokenizer",