Upload tokenizer
Browse files- tokenizer_config.json +3 -0
tokenizer_config.json
CHANGED
@@ -186,8 +186,11 @@
|
|
186 |
"eos_token": "<|end▁of▁sentence|>",
|
187 |
"extra_special_tokens": {},
|
188 |
"legacy": true,
|
|
|
189 |
"model_max_length": 131072,
|
|
|
190 |
"pad_token": "<|vision_pad|>",
|
|
|
191 |
"padding_side": "left",
|
192 |
"sp_model_kwargs": {},
|
193 |
"tokenizer_class": "LlamaTokenizer",
|
|
|
186 |
"eos_token": "<|end▁of▁sentence|>",
|
187 |
"extra_special_tokens": {},
|
188 |
"legacy": true,
|
189 |
+
"max_length": null,
|
190 |
"model_max_length": 131072,
|
191 |
+
"pad_to_multiple_of": null,
|
192 |
"pad_token": "<|vision_pad|>",
|
193 |
+
"pad_token_type_id": 0,
|
194 |
"padding_side": "left",
|
195 |
"sp_model_kwargs": {},
|
196 |
"tokenizer_class": "LlamaTokenizer",
|