Upload tokenizer
Browse files- tokenizer.json +1 -1
- tokenizer_config.json +1 -1
tokenizer.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 11422174
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cf535eea1ed9374c2bc3436625216c18fc703fc37aae8e019b3e85cc658e0395
|
3 |
size 11422174
|
tokenizer_config.json
CHANGED
@@ -200,7 +200,7 @@
|
|
200 |
"eos_token": "<|im_end|>",
|
201 |
"errors": "replace",
|
202 |
"extra_special_tokens": {},
|
203 |
-
"max_length":
|
204 |
"model_max_length": 131072,
|
205 |
"pad_to_multiple_of": null,
|
206 |
"pad_token": "<|endoftext|>",
|
|
|
200 |
"eos_token": "<|im_end|>",
|
201 |
"errors": "replace",
|
202 |
"extra_special_tokens": {},
|
203 |
+
"max_length": 768,
|
204 |
"model_max_length": 131072,
|
205 |
"pad_to_multiple_of": null,
|
206 |
"pad_token": "<|endoftext|>",
|