dazednaut commited on
Commit
2321e21
1 Parent(s): a69a6d4

Upload 2 files

Browse files
Files changed (1) hide show
  1. tokenizer_config.json +5 -13
tokenizer_config.json CHANGED
@@ -11,7 +11,7 @@
11
  "special": true
12
  },
13
  "1": {
14
- "content": "<eos>",
15
  "lstrip": false,
16
  "normalized": false,
17
  "rstrip": false,
@@ -851,7 +851,7 @@
851
  "special": false
852
  },
853
  "106": {
854
- "content": "<start_of_turn>",
855
  "lstrip": false,
856
  "normalized": false,
857
  "rstrip": false,
@@ -1995,20 +1995,12 @@
1995
  "special": false
1996
  }
1997
  },
1998
- "additional_special_tokens": [
1999
- "<start_of_turn>",
2000
- "<end_of_turn>"
2001
- ],
2002
  "bos_token": "<bos>",
2003
- "chat_template": "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if (message['role'] == 'assistant') %}{% set role = 'model' %}{% else %}{% set role = message['role'] %}{% endif %}{{ '<start_of_turn>' + role + '\n' + message['content'] | trim + '<end_of_turn>\n' }}{% endfor %}{% if add_generation_prompt %}{{'<start_of_turn>model\n'}}{% endif %}",
2004
  "clean_up_tokenization_spaces": false,
2005
- "eos_token": "<eos>",
2006
  "model_max_length": 1000000000000000019884624838656,
2007
  "pad_token": "<pad>",
2008
- "padding_side": "left",
2009
- "sp_model_kwargs": {},
2010
- "spaces_between_special_tokens": false,
2011
  "tokenizer_class": "GemmaTokenizer",
2012
- "unk_token": "<unk>",
2013
- "use_default_system_prompt": false
2014
  }
 
11
  "special": true
12
  },
13
  "1": {
14
+ "content": "<|im_end|>",
15
  "lstrip": false,
16
  "normalized": false,
17
  "rstrip": false,
 
851
  "special": false
852
  },
853
  "106": {
854
+ "content": "<|im_start|>",
855
  "lstrip": false,
856
  "normalized": false,
857
  "rstrip": false,
 
1995
  "special": false
1996
  }
1997
  },
 
 
 
 
1998
  "bos_token": "<bos>",
1999
+ "chat_template": "{% if 'role' in messages[0] %}{{ bos_token }}{% for message in messages %}{% if message['role'] == 'user' %}{{'<|im_start|>user\n' + message['content'] + '<|im_end|>\n'}}{% elif message['role'] == 'assistant' %}{{'<|im_start|>assistant\n' + message['content'] + '<|im_end|>\n' }}{% else %}{{ '<|im_start|>system\n' + message['content'] + '<|im_end|>\n' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}{% else %}{{ bos_token }}{% for message in messages %}{% if message['from'] == 'human' %}{{'<|im_start|>user\n' + message['value'] + '<|im_end|>\n'}}{% elif message['from'] == 'gpt' %}{{'<|im_start|>assistant\n' + message['value'] + '<|im_end|>\n' }}{% else %}{{ '<|im_start|>system\n' + message['value'] + '<|im_end|>\n' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}{% endif %}",
2000
  "clean_up_tokenization_spaces": false,
2001
+ "eos_token": "<|im_end|>",
2002
  "model_max_length": 1000000000000000019884624838656,
2003
  "pad_token": "<pad>",
 
 
 
2004
  "tokenizer_class": "GemmaTokenizer",
2005
+ "unk_token": "<unk>"
 
2006
  }