Phi-4-mini-reasoning-unsloth-bnb-4bit / tokenizer_config.json
danielhanchen's picture
Add files using upload-large-folder tool
2bed060 verified
raw
history blame contribute delete
3.43 kB
{
"add_bos_token": false,
"add_eos_token": false,
"add_prefix_space": false,
"added_tokens_decoder": {
"3251": {
"content": "\u00ef\u00bf\u00bd",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"199999": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"200018": {
"content": "<|endofprompt|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"200019": {
"content": "<|assistant|>",
"lstrip": false,
"normalized": false,
"rstrip": true,
"single_word": false,
"special": true
},
"200020": {
"content": "<|end|>",
"lstrip": false,
"normalized": false,
"rstrip": true,
"single_word": false,
"special": true
},
"200021": {
"content": "<|user|>",
"lstrip": false,
"normalized": false,
"rstrip": true,
"single_word": false,
"special": true
},
"200022": {
"content": "<|system|>",
"lstrip": false,
"normalized": false,
"rstrip": true,
"single_word": false,
"special": true
},
"200023": {
"content": "<|tool|>",
"lstrip": false,
"normalized": false,
"rstrip": true,
"single_word": false,
"special": false
},
"200024": {
"content": "<|/tool|>",
"lstrip": false,
"normalized": false,
"rstrip": true,
"single_word": false,
"special": false
},
"200025": {
"content": "<|tool_call|>",
"lstrip": false,
"normalized": false,
"rstrip": true,
"single_word": false,
"special": false
},
"200026": {
"content": "<|/tool_call|>",
"lstrip": false,
"normalized": false,
"rstrip": true,
"single_word": false,
"special": false
},
"200027": {
"content": "<|tool_response|>",
"lstrip": false,
"normalized": false,
"rstrip": true,
"single_word": false,
"special": false
},
"200028": {
"content": "<|tag|>",
"lstrip": false,
"normalized": false,
"rstrip": true,
"single_word": false,
"special": true
}
},
"bos_token": "<|endoftext|>",
"clean_up_tokenization_spaces": false,
"eos_token": "<|endoftext|>",
"extra_special_tokens": {},
"max_length": 1024,
"model_max_length": 131072,
"pad_token": "<|endofprompt|>",
"padding_side": "left",
"stride": 0,
"tokenizer_class": "GPT2Tokenizer",
"truncation_side": "right",
"truncation_strategy": "longest_first",
"unk_token": "\u00ef\u00bf\u00bd",
"chat_template": "{{ '<|system|>Your name is Phi, an AI math expert developed by Microsoft.' }}{% for message in messages %}{% if message['role'] == 'system' %} {{ message['content'] }}{% if 'tools' in message and message['tools'] is not none %}{{ '<|tool|>' + message['tools'] + '<|/tool|>' }}{% endif %}{% endif %}{% endfor %}{{ '<|end|>' }}{% for message in messages %}{% if message['role'] != 'system' %}{{ '<|' + message['role'] + '|>' + message['content'] + '<|end|>' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|>' }}{% else %}{{ eos_token }}{% endif %}"
}