danielhanchen commited on
Commit
0a88658
1 Parent(s): 4955843

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +4 -0
  2. tokenizer_config.json +4 -0
special_tokens_map.json CHANGED
@@ -1,4 +1,8 @@
1
  {
 
 
 
 
2
  "bos_token": {
3
  "content": "<bos>",
4
  "lstrip": false,
 
1
  {
2
+ "additional_special_tokens": [
3
+ "<start_of_turn>",
4
+ "<end_of_turn>"
5
+ ],
6
  "bos_token": {
7
  "content": "<bos>",
8
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -1995,6 +1995,10 @@
1995
  "special": false
1996
  }
1997
  },
 
 
 
 
1998
  "bos_token": "<bos>",
1999
  "chat_template": "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if (message['role'] == 'assistant') %}{% set role = 'model' %}{% else %}{% set role = message['role'] %}{% endif %}{{ '<start_of_turn>' + role + '\n' + message['content'] | trim + '<end_of_turn>\n' }}{% endfor %}{% if add_generation_prompt %}{{'<start_of_turn>model\n'}}{% endif %}",
2000
  "clean_up_tokenization_spaces": false,
 
1995
  "special": false
1996
  }
1997
  },
1998
+ "additional_special_tokens": [
1999
+ "<start_of_turn>",
2000
+ "<end_of_turn>"
2001
+ ],
2002
  "bos_token": "<bos>",
2003
  "chat_template": "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if (message['role'] == 'assistant') %}{% set role = 'model' %}{% else %}{% set role = message['role'] %}{% endif %}{{ '<start_of_turn>' + role + '\n' + message['content'] | trim + '<end_of_turn>\n' }}{% endfor %}{% if add_generation_prompt %}{{'<start_of_turn>model\n'}}{% endif %}",
2004
  "clean_up_tokenization_spaces": false,