Update tokenizer_config.json
Browse files- tokenizer_config.json +1 -0
tokenizer_config.json
CHANGED
@@ -2005,6 +2005,7 @@
|
|
2005 |
"model_max_length": 1000000000000000019884624838656,
|
2006 |
"pad_token": "<pad>",
|
2007 |
"sp_model_kwargs": {},
|
|
|
2008 |
"spaces_between_special_tokens": false,
|
2009 |
"tokenizer_class": "GemmaTokenizer",
|
2010 |
"unk_token": "<unk>",
|
|
|
2005 |
"model_max_length": 1000000000000000019884624838656,
|
2006 |
"pad_token": "<pad>",
|
2007 |
"sp_model_kwargs": {},
|
2008 |
+
"chat_template": "{% for message in messages %}{% if message['role'] == 'system' and message['content'] %}{{'<|system|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'user' %}{{'<|user|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'assistant' %}{{'<|assistant|>\n' + message['content'] + '<|end|>\n'}}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|>\n' }}{% else %}{{ eos_token }}{% endif %}",
|
2009 |
"spaces_between_special_tokens": false,
|
2010 |
"tokenizer_class": "GemmaTokenizer",
|
2011 |
"unk_token": "<unk>",
|