leonardlin commited on
Commit
49bf4a5
1 Parent(s): 00ad931

fixed tokenizer - original Gamma tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +3 -28
  2. tokenizer.json +0 -0
  3. tokenizer_config.json +1 -10
special_tokens_map.json CHANGED
@@ -1,30 +1,5 @@
1
  {
2
- "bos_token": {
3
- "content": "<s>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "</s>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": {
17
- "content": "<unk>",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- },
23
- "unk_token": {
24
- "content": "<unk>",
25
- "lstrip": false,
26
- "normalized": false,
27
- "rstrip": false,
28
- "single_word": false
29
- }
30
  }
 
1
  {
2
+ "bos_token": "<s>",
3
+ "eos_token": "</s>",
4
+ "unk_token": "<unk>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -23,14 +23,6 @@
23
  "rstrip": false,
24
  "single_word": false,
25
  "special": true
26
- },
27
- "120073": {
28
- "content": "<|extra_idx|>",
29
- "lstrip": false,
30
- "normalized": false,
31
- "rstrip": false,
32
- "single_word": false,
33
- "special": false
34
  }
35
  },
36
  "additional_special_tokens": [],
@@ -39,8 +31,7 @@
39
  "eos_token": "</s>",
40
  "legacy": true,
41
  "model_max_length": 1000000000000000019884624838656,
42
- "pad_token": "<unk>",
43
- "padding_side": "right",
44
  "sp_model_kwargs": {},
45
  "spaces_between_special_tokens": false,
46
  "tokenizer_class": "LlamaTokenizer",
 
23
  "rstrip": false,
24
  "single_word": false,
25
  "special": true
 
 
 
 
 
 
 
 
26
  }
27
  },
28
  "additional_special_tokens": [],
 
31
  "eos_token": "</s>",
32
  "legacy": true,
33
  "model_max_length": 1000000000000000019884624838656,
34
+ "pad_token": null,
 
35
  "sp_model_kwargs": {},
36
  "spaces_between_special_tokens": false,
37
  "tokenizer_class": "LlamaTokenizer",