Mustain commited on
Commit
d0f2100
1 Parent(s): 40b8f48

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +7 -1
  2. tokenizer_config.json +1 -9
special_tokens_map.json CHANGED
@@ -16,7 +16,13 @@
16
  "rstrip": false,
17
  "single_word": false
18
  },
19
- "pad_token": "<|endoftext|>",
 
 
 
 
 
 
20
  "unk_token": {
21
  "content": "<|unknown|>",
22
  "lstrip": false,
 
16
  "rstrip": false,
17
  "single_word": false
18
  },
19
+ "pad_token": {
20
+ "content": "<|pad|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false
25
+ },
26
  "unk_token": {
27
  "content": "<|unknown|>",
28
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -41,14 +41,6 @@
41
  "rstrip": false,
42
  "single_word": false,
43
  "special": true
44
- },
45
- "65535": {
46
- "content": "[PAD]",
47
- "lstrip": false,
48
- "normalized": false,
49
- "rstrip": false,
50
- "single_word": false,
51
- "special": true
52
  }
53
  },
54
  "additional_special_tokens": [
@@ -59,7 +51,7 @@
59
  "eos_token": "<|endoftext|>",
60
  "legacy": true,
61
  "model_max_length": 8192,
62
- "pad_token": "<|endoftext|>",
63
  "sp_model_kwargs": {},
64
  "spaces_between_special_tokens": false,
65
  "tokenizer_class": "LlamaTokenizer",
 
41
  "rstrip": false,
42
  "single_word": false,
43
  "special": true
 
 
 
 
 
 
 
 
44
  }
45
  },
46
  "additional_special_tokens": [
 
51
  "eos_token": "<|endoftext|>",
52
  "legacy": true,
53
  "model_max_length": 8192,
54
+ "pad_token": "<|pad|>",
55
  "sp_model_kwargs": {},
56
  "spaces_between_special_tokens": false,
57
  "tokenizer_class": "LlamaTokenizer",