Fizzarolli
commited on
fix tokenizer
Browse files- tokenizer_config.json +1 -1
tokenizer_config.json
CHANGED
@@ -8023,7 +8023,7 @@
|
|
8023 |
"bos_token": "<s>",
|
8024 |
"chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
|
8025 |
"clean_up_tokenization_spaces": false,
|
8026 |
-
"eos_token": "
|
8027 |
"model_max_length": 1000000000000000019884624838656,
|
8028 |
"pad_token": "<pad>",
|
8029 |
"tokenizer_class": "PreTrainedTokenizerFast",
|
|
|
8023 |
"bos_token": "<s>",
|
8024 |
"chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
|
8025 |
"clean_up_tokenization_spaces": false,
|
8026 |
+
"eos_token": "<|im_end|>",
|
8027 |
"model_max_length": 1000000000000000019884624838656,
|
8028 |
"pad_token": "<pad>",
|
8029 |
"tokenizer_class": "PreTrainedTokenizerFast",
|