Abhaykoul commited on
Commit
7d1f4ad
1 Parent(s): 4de9f0d

Update tokenization_HelpingAI_fast.py

Browse files
Files changed (1) hide show
  1. tokenization_HelpingAI_fast.py +140 -140
tokenization_HelpingAI_fast.py CHANGED
@@ -1,141 +1,141 @@
1
- # Made by KingNish
2
-
3
- import json
4
- from typing import List, Optional, Tuple
5
-
6
- from tokenizers import pre_tokenizers, processors
7
-
8
- from ...tokenization_utils_fast import PreTrainedTokenizerFast
9
- from ...utils import logging
10
-
11
-
12
- logger = logging.get_logger(__name__)
13
-
14
- VOCAB_FILES_NAMES = {"vocab_file": "vocab.json", "merges_file": "merges.txt", "tokenizer_file": "tokenizer.json"}
15
-
16
-
17
- class HelpingAITokenizerFast(PreTrainedTokenizerFast):
18
- vocab_files_names = VOCAB_FILES_NAMES
19
- model_input_names = ["input_ids", "attention_mask"]
20
-
21
- def __init__(
22
- self,
23
- vocab_file=None,
24
- merges_file=None,
25
- tokenizer_file=None,
26
- unk_token="<|endoftext|>",
27
- bos_token="<|im_start|>",
28
- eos_token="<|im_end|>",
29
- pad_token="<|im_end|>",
30
- add_bos_token=False,
31
- add_eos_token=False,
32
- add_prefix_space=False,
33
- **kwargs,
34
- ):
35
- super().__init__(
36
- vocab_file,
37
- merges_file,
38
- tokenizer_file=tokenizer_file,
39
- unk_token=unk_token,
40
- bos_token=bos_token,
41
- eos_token=eos_token,
42
- pad_token=pad_token,
43
- add_bos_token=add_bos_token,
44
- add_eos_token=add_eos_token,
45
- add_prefix_space=add_prefix_space,
46
- **kwargs,
47
- )
48
-
49
- self._add_bos_token = add_bos_token
50
- self._add_eos_token = add_eos_token
51
- self.update_post_processor()
52
-
53
- pre_tok_state = json.loads(self.backend_tokenizer.pre_tokenizer.__getstate__())
54
- if pre_tok_state.get("add_prefix_space", add_prefix_space) != add_prefix_space:
55
- pre_tok_class = getattr(pre_tokenizers, pre_tok_state.pop("type"))
56
- pre_tok_state["add_prefix_space"] = add_prefix_space
57
- self.backend_tokenizer.pre_tokenizer = pre_tok_class(**pre_tok_state)
58
-
59
- self.add_prefix_space = add_prefix_space
60
-
61
- @property
62
- def add_eos_token(self):
63
- return self._add_eos_token
64
-
65
- @property
66
- def add_bos_token(self):
67
- return self._add_bos_token
68
-
69
- @add_eos_token.setter
70
- def add_eos_token(self, value):
71
- self._add_eos_token = value
72
- self.update_post_processor()
73
-
74
- @add_bos_token.setter
75
- def add_bos_token(self, value):
76
- self._add_bos_token = value
77
- self.update_post_processor()
78
-
79
- def update_post_processor(self):
80
- bos = self.bos_token
81
- bos_token_id = self.bos_token_id
82
- if bos is None and self.add_bos_token:
83
- raise ValueError("add_bos_token = True but bos_token = None")
84
-
85
- eos = self.eos_token
86
- eos_token_id = self.eos_token_id
87
- if eos is None and self.add_eos_token:
88
- raise ValueError("add_eos_token = True but eos_token = None")
89
-
90
- single = f"{(bos+':0 ') if self.add_bos_token else ''}$A:0{(' '+eos+':0') if self.add_eos_token else ''}"
91
- pair = f"{single}{(' '+bos+':1') if self.add_bos_token else ''} $B:1{(' '+eos+':1') if self.add_eos_token else ''}"
92
-
93
- special_tokens = []
94
- if self.add_bos_token:
95
- special_tokens.append((bos, bos_token_id))
96
- if self.add_eos_token:
97
- special_tokens.append((eos, eos_token_id))
98
- self._tokenizer.post_processor = processors.TemplateProcessing(
99
- single=single, pair=pair, special_tokens=special_tokens
100
- )
101
-
102
- def get_special_tokens_mask(
103
- self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, already_has_special_tokens: bool = False
104
- ) -> List[int]:
105
- if already_has_special_tokens:
106
- return super().get_special_tokens_mask(
107
- token_ids_0=token_ids_0, token_ids_1=token_ids_1, already_has_special_tokens=True
108
- )
109
-
110
- bos_token_id = [1] if self.add_bos_token else []
111
- eos_token_id = [1] if self.add_eos_token else []
112
-
113
- if token_ids_1 is None:
114
- return bos_token_id + ([0] * len(token_ids_0)) + eos_token_id
115
- return (
116
- bos_token_id
117
- + ([0] * len(token_ids_0))
118
- + eos_token_id
119
- + bos_token_id
120
- + ([0] * len(token_ids_1))
121
- + eos_token_id
122
- )
123
-
124
- def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None):
125
- bos_token_id = [self.bos_token_id] if self.add_bos_token else []
126
- eos_token_id = [self.eos_token_id] if self.add_eos_token else []
127
-
128
- output = bos_token_id + token_ids_0 + eos_token_id
129
-
130
- if token_ids_1 is not None:
131
- output = output + bos_token_id + token_ids_1 + eos_token_id
132
-
133
- return output
134
-
135
- def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]:
136
- files = self._tokenizer.model.save(save_directory, name=filename_prefix)
137
- return tuple(files)
138
-
139
- @property
140
- def default_chat_template(self):
141
  return "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}"
 
1
+ # Made by KingNish
2
+
3
+ import json
4
+ from typing import List, Optional, Tuple
5
+
6
+ from tokenizers import pre_tokenizers, processors
7
+
8
+ from transformers.tokenization_utils_fast import PreTrainedTokenizerFast
9
+ from transformers.utils import logging
10
+
11
+
12
+ logger = logging.get_logger(__name__)
13
+
14
+ VOCAB_FILES_NAMES = {"vocab_file": "vocab.json", "merges_file": "merges.txt", "tokenizer_file": "tokenizer.json"}
15
+
16
+
17
+ class HelpingAITokenizerFast(PreTrainedTokenizerFast):
18
+ vocab_files_names = VOCAB_FILES_NAMES
19
+ model_input_names = ["input_ids", "attention_mask"]
20
+
21
+ def __init__(
22
+ self,
23
+ vocab_file=None,
24
+ merges_file=None,
25
+ tokenizer_file=None,
26
+ unk_token="<|endoftext|>",
27
+ bos_token="<|im_start|>",
28
+ eos_token="<|im_end|>",
29
+ pad_token="<|im_end|>",
30
+ add_bos_token=False,
31
+ add_eos_token=False,
32
+ add_prefix_space=False,
33
+ **kwargs,
34
+ ):
35
+ super().__init__(
36
+ vocab_file,
37
+ merges_file,
38
+ tokenizer_file=tokenizer_file,
39
+ unk_token=unk_token,
40
+ bos_token=bos_token,
41
+ eos_token=eos_token,
42
+ pad_token=pad_token,
43
+ add_bos_token=add_bos_token,
44
+ add_eos_token=add_eos_token,
45
+ add_prefix_space=add_prefix_space,
46
+ **kwargs,
47
+ )
48
+
49
+ self._add_bos_token = add_bos_token
50
+ self._add_eos_token = add_eos_token
51
+ self.update_post_processor()
52
+
53
+ pre_tok_state = json.loads(self.backend_tokenizer.pre_tokenizer.__getstate__())
54
+ if pre_tok_state.get("add_prefix_space", add_prefix_space) != add_prefix_space:
55
+ pre_tok_class = getattr(pre_tokenizers, pre_tok_state.pop("type"))
56
+ pre_tok_state["add_prefix_space"] = add_prefix_space
57
+ self.backend_tokenizer.pre_tokenizer = pre_tok_class(**pre_tok_state)
58
+
59
+ self.add_prefix_space = add_prefix_space
60
+
61
+ @property
62
+ def add_eos_token(self):
63
+ return self._add_eos_token
64
+
65
+ @property
66
+ def add_bos_token(self):
67
+ return self._add_bos_token
68
+
69
+ @add_eos_token.setter
70
+ def add_eos_token(self, value):
71
+ self._add_eos_token = value
72
+ self.update_post_processor()
73
+
74
+ @add_bos_token.setter
75
+ def add_bos_token(self, value):
76
+ self._add_bos_token = value
77
+ self.update_post_processor()
78
+
79
+ def update_post_processor(self):
80
+ bos = self.bos_token
81
+ bos_token_id = self.bos_token_id
82
+ if bos is None and self.add_bos_token:
83
+ raise ValueError("add_bos_token = True but bos_token = None")
84
+
85
+ eos = self.eos_token
86
+ eos_token_id = self.eos_token_id
87
+ if eos is None and self.add_eos_token:
88
+ raise ValueError("add_eos_token = True but eos_token = None")
89
+
90
+ single = f"{(bos+':0 ') if self.add_bos_token else ''}$A:0{(' '+eos+':0') if self.add_eos_token else ''}"
91
+ pair = f"{single}{(' '+bos+':1') if self.add_bos_token else ''} $B:1{(' '+eos+':1') if self.add_eos_token else ''}"
92
+
93
+ special_tokens = []
94
+ if self.add_bos_token:
95
+ special_tokens.append((bos, bos_token_id))
96
+ if self.add_eos_token:
97
+ special_tokens.append((eos, eos_token_id))
98
+ self._tokenizer.post_processor = processors.TemplateProcessing(
99
+ single=single, pair=pair, special_tokens=special_tokens
100
+ )
101
+
102
+ def get_special_tokens_mask(
103
+ self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, already_has_special_tokens: bool = False
104
+ ) -> List[int]:
105
+ if already_has_special_tokens:
106
+ return super().get_special_tokens_mask(
107
+ token_ids_0=token_ids_0, token_ids_1=token_ids_1, already_has_special_tokens=True
108
+ )
109
+
110
+ bos_token_id = [1] if self.add_bos_token else []
111
+ eos_token_id = [1] if self.add_eos_token else []
112
+
113
+ if token_ids_1 is None:
114
+ return bos_token_id + ([0] * len(token_ids_0)) + eos_token_id
115
+ return (
116
+ bos_token_id
117
+ + ([0] * len(token_ids_0))
118
+ + eos_token_id
119
+ + bos_token_id
120
+ + ([0] * len(token_ids_1))
121
+ + eos_token_id
122
+ )
123
+
124
+ def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None):
125
+ bos_token_id = [self.bos_token_id] if self.add_bos_token else []
126
+ eos_token_id = [self.eos_token_id] if self.add_eos_token else []
127
+
128
+ output = bos_token_id + token_ids_0 + eos_token_id
129
+
130
+ if token_ids_1 is not None:
131
+ output = output + bos_token_id + token_ids_1 + eos_token_id
132
+
133
+ return output
134
+
135
+ def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]:
136
+ files = self._tokenizer.model.save(save_directory, name=filename_prefix)
137
+ return tuple(files)
138
+
139
+ @property
140
+ def default_chat_template(self):
141
  return "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}"