shubhrapandit commited on
Commit
0d21073
·
1 Parent(s): 152b8d1

Update files to match base model keeping quant_config intact

Browse files
added_tokens.json DELETED
@@ -1,24 +0,0 @@
1
- {
2
- "</tool_call>": 151658,
3
- "<tool_call>": 151657,
4
- "<|box_end|>": 151649,
5
- "<|box_start|>": 151648,
6
- "<|endoftext|>": 151643,
7
- "<|file_sep|>": 151664,
8
- "<|fim_middle|>": 151660,
9
- "<|fim_pad|>": 151662,
10
- "<|fim_prefix|>": 151659,
11
- "<|fim_suffix|>": 151661,
12
- "<|im_end|>": 151645,
13
- "<|im_start|>": 151644,
14
- "<|image_pad|>": 151655,
15
- "<|object_ref_end|>": 151647,
16
- "<|object_ref_start|>": 151646,
17
- "<|quad_end|>": 151651,
18
- "<|quad_start|>": 151650,
19
- "<|repo_name|>": 151663,
20
- "<|video_pad|>": 151656,
21
- "<|vision_end|>": 151653,
22
- "<|vision_pad|>": 151654,
23
- "<|vision_start|>": 151652
24
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
chat_template.json CHANGED
@@ -1,3 +1,3 @@
1
  {
2
- "chat_template": "{% set image_count = namespace(value=0) %}{% set video_count = namespace(value=0) %}{% for message in messages %}{% if loop.first and message['role'] != 'system' %}<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n{% endif %}<|im_start|>{{ message['role'] }}\n{% if message['content'] is string %}{{ message['content'] }}<|im_end|>\n{% else %}{% for content in message['content'] %}{% if content['type'] == 'image' or 'image' in content or 'image_url' in content %}{% set image_count.value = image_count.value + 1 %}{% if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<|vision_start|><|image_pad|><|vision_end|>{% elif content['type'] == 'video' or 'video' in content %}{% set video_count.value = video_count.value + 1 %}{% if add_vision_id %}Video {{ video_count.value }}: {% endif %}<|vision_start|><|video_pad|><|vision_end|>{% elif 'text' in content %}{{ content['text'] }}{% endif %}{% endfor %}<|im_end|>\n{% endif %}{% endfor %}{% if add_generation_prompt %}<|im_start|>assistant\n{% endif %}"
3
- }
 
1
  {
2
+ "chat_template": "{% set image_count = namespace(value=0) %}{% set video_count = namespace(value=0) %}{% for message in messages %}{% if loop.first and message['role'] != 'system' %}<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n{% endif %}<|im_start|>{{ message['role'] }}\n{% if message['content'] is string %}{{ message['content'] }}<|im_end|>\n{% else %}{% for content in message['content'] %}{% if content['type'] == 'image' or 'image' in content or 'image_url' in content %}{% set image_count.value = image_count.value + 1 %}{% if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<|vision_start|><|image_pad|><|vision_end|>{% elif content['type'] == 'video' or 'video' in content %}{% set video_count.value = video_count.value + 1 %}{% if add_vision_id %}Video {{ video_count.value }}: {% endif %}<|vision_start|><|video_pad|><|vision_end|>{% elif 'text' in content %}{{ content['text'] }}{% endif %}{% endfor %}<|im_end|>\n{% endif %}{% endfor %}{% if add_generation_prompt %}<|im_start|>assistant\n{% endif %}"
3
+ }
config.json CHANGED
@@ -1,14 +1,17 @@
1
  {
2
- "_name_or_path": "Qwen/Qwen2.5-VL-72B-Instruct",
3
  "architectures": [
4
  "Qwen2_5_VLForConditionalGeneration"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 151643,
8
  "eos_token_id": 151645,
 
 
 
 
 
9
  "hidden_act": "silu",
10
  "hidden_size": 8192,
11
- "image_token_id": 151655,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 29568,
14
  "max_position_embeddings": 128000,
@@ -17,239 +20,42 @@
17
  "num_attention_heads": 64,
18
  "num_hidden_layers": 80,
19
  "num_key_value_heads": 8,
20
- "quantization_config": {
21
- "config_groups": {
22
- "group_0": {
23
- "input_activations": {
24
- "actorder": null,
25
- "block_structure": null,
26
- "dynamic": true,
27
- "group_size": null,
28
- "num_bits": 8,
29
- "observer": null,
30
- "observer_kwargs": {},
31
- "strategy": "token",
32
- "symmetric": true,
33
- "type": "int"
34
- },
35
- "output_activations": null,
36
- "targets": [
37
- "Linear"
38
- ],
39
- "weights": {
40
- "actorder": null,
41
- "block_structure": null,
42
- "dynamic": false,
43
- "group_size": null,
44
- "num_bits": 8,
45
- "observer": "minmax",
46
- "observer_kwargs": {},
47
- "strategy": "channel",
48
- "symmetric": true,
49
- "type": "int"
50
- }
51
- }
52
- },
53
- "format": "int-quantized",
54
- "global_compression_ratio": 1.323305644571225,
55
- "ignore": [
56
- "visual.blocks.0.attn.qkv",
57
- "visual.blocks.0.attn.proj",
58
- "visual.blocks.0.mlp.gate_proj",
59
- "visual.blocks.0.mlp.up_proj",
60
- "visual.blocks.0.mlp.down_proj",
61
- "visual.blocks.1.attn.qkv",
62
- "visual.blocks.1.attn.proj",
63
- "visual.blocks.1.mlp.gate_proj",
64
- "visual.blocks.1.mlp.up_proj",
65
- "visual.blocks.1.mlp.down_proj",
66
- "visual.blocks.2.attn.qkv",
67
- "visual.blocks.2.attn.proj",
68
- "visual.blocks.2.mlp.gate_proj",
69
- "visual.blocks.2.mlp.up_proj",
70
- "visual.blocks.2.mlp.down_proj",
71
- "visual.blocks.3.attn.qkv",
72
- "visual.blocks.3.attn.proj",
73
- "visual.blocks.3.mlp.gate_proj",
74
- "visual.blocks.3.mlp.up_proj",
75
- "visual.blocks.3.mlp.down_proj",
76
- "visual.blocks.4.attn.qkv",
77
- "visual.blocks.4.attn.proj",
78
- "visual.blocks.4.mlp.gate_proj",
79
- "visual.blocks.4.mlp.up_proj",
80
- "visual.blocks.4.mlp.down_proj",
81
- "visual.blocks.5.attn.qkv",
82
- "visual.blocks.5.attn.proj",
83
- "visual.blocks.5.mlp.gate_proj",
84
- "visual.blocks.5.mlp.up_proj",
85
- "visual.blocks.5.mlp.down_proj",
86
- "visual.blocks.6.attn.qkv",
87
- "visual.blocks.6.attn.proj",
88
- "visual.blocks.6.mlp.gate_proj",
89
- "visual.blocks.6.mlp.up_proj",
90
- "visual.blocks.6.mlp.down_proj",
91
- "visual.blocks.7.attn.qkv",
92
- "visual.blocks.7.attn.proj",
93
- "visual.blocks.7.mlp.gate_proj",
94
- "visual.blocks.7.mlp.up_proj",
95
- "visual.blocks.7.mlp.down_proj",
96
- "visual.blocks.8.attn.qkv",
97
- "visual.blocks.8.attn.proj",
98
- "visual.blocks.8.mlp.gate_proj",
99
- "visual.blocks.8.mlp.up_proj",
100
- "visual.blocks.8.mlp.down_proj",
101
- "visual.blocks.9.attn.qkv",
102
- "visual.blocks.9.attn.proj",
103
- "visual.blocks.9.mlp.gate_proj",
104
- "visual.blocks.9.mlp.up_proj",
105
- "visual.blocks.9.mlp.down_proj",
106
- "visual.blocks.10.attn.qkv",
107
- "visual.blocks.10.attn.proj",
108
- "visual.blocks.10.mlp.gate_proj",
109
- "visual.blocks.10.mlp.up_proj",
110
- "visual.blocks.10.mlp.down_proj",
111
- "visual.blocks.11.attn.qkv",
112
- "visual.blocks.11.attn.proj",
113
- "visual.blocks.11.mlp.gate_proj",
114
- "visual.blocks.11.mlp.up_proj",
115
- "visual.blocks.11.mlp.down_proj",
116
- "visual.blocks.12.attn.qkv",
117
- "visual.blocks.12.attn.proj",
118
- "visual.blocks.12.mlp.gate_proj",
119
- "visual.blocks.12.mlp.up_proj",
120
- "visual.blocks.12.mlp.down_proj",
121
- "visual.blocks.13.attn.qkv",
122
- "visual.blocks.13.attn.proj",
123
- "visual.blocks.13.mlp.gate_proj",
124
- "visual.blocks.13.mlp.up_proj",
125
- "visual.blocks.13.mlp.down_proj",
126
- "visual.blocks.14.attn.qkv",
127
- "visual.blocks.14.attn.proj",
128
- "visual.blocks.14.mlp.gate_proj",
129
- "visual.blocks.14.mlp.up_proj",
130
- "visual.blocks.14.mlp.down_proj",
131
- "visual.blocks.15.attn.qkv",
132
- "visual.blocks.15.attn.proj",
133
- "visual.blocks.15.mlp.gate_proj",
134
- "visual.blocks.15.mlp.up_proj",
135
- "visual.blocks.15.mlp.down_proj",
136
- "visual.blocks.16.attn.qkv",
137
- "visual.blocks.16.attn.proj",
138
- "visual.blocks.16.mlp.gate_proj",
139
- "visual.blocks.16.mlp.up_proj",
140
- "visual.blocks.16.mlp.down_proj",
141
- "visual.blocks.17.attn.qkv",
142
- "visual.blocks.17.attn.proj",
143
- "visual.blocks.17.mlp.gate_proj",
144
- "visual.blocks.17.mlp.up_proj",
145
- "visual.blocks.17.mlp.down_proj",
146
- "visual.blocks.18.attn.qkv",
147
- "visual.blocks.18.attn.proj",
148
- "visual.blocks.18.mlp.gate_proj",
149
- "visual.blocks.18.mlp.up_proj",
150
- "visual.blocks.18.mlp.down_proj",
151
- "visual.blocks.19.attn.qkv",
152
- "visual.blocks.19.attn.proj",
153
- "visual.blocks.19.mlp.gate_proj",
154
- "visual.blocks.19.mlp.up_proj",
155
- "visual.blocks.19.mlp.down_proj",
156
- "visual.blocks.20.attn.qkv",
157
- "visual.blocks.20.attn.proj",
158
- "visual.blocks.20.mlp.gate_proj",
159
- "visual.blocks.20.mlp.up_proj",
160
- "visual.blocks.20.mlp.down_proj",
161
- "visual.blocks.21.attn.qkv",
162
- "visual.blocks.21.attn.proj",
163
- "visual.blocks.21.mlp.gate_proj",
164
- "visual.blocks.21.mlp.up_proj",
165
- "visual.blocks.21.mlp.down_proj",
166
- "visual.blocks.22.attn.qkv",
167
- "visual.blocks.22.attn.proj",
168
- "visual.blocks.22.mlp.gate_proj",
169
- "visual.blocks.22.mlp.up_proj",
170
- "visual.blocks.22.mlp.down_proj",
171
- "visual.blocks.23.attn.qkv",
172
- "visual.blocks.23.attn.proj",
173
- "visual.blocks.23.mlp.gate_proj",
174
- "visual.blocks.23.mlp.up_proj",
175
- "visual.blocks.23.mlp.down_proj",
176
- "visual.blocks.24.attn.qkv",
177
- "visual.blocks.24.attn.proj",
178
- "visual.blocks.24.mlp.gate_proj",
179
- "visual.blocks.24.mlp.up_proj",
180
- "visual.blocks.24.mlp.down_proj",
181
- "visual.blocks.25.attn.qkv",
182
- "visual.blocks.25.attn.proj",
183
- "visual.blocks.25.mlp.gate_proj",
184
- "visual.blocks.25.mlp.up_proj",
185
- "visual.blocks.25.mlp.down_proj",
186
- "visual.blocks.26.attn.qkv",
187
- "visual.blocks.26.attn.proj",
188
- "visual.blocks.26.mlp.gate_proj",
189
- "visual.blocks.26.mlp.up_proj",
190
- "visual.blocks.26.mlp.down_proj",
191
- "visual.blocks.27.attn.qkv",
192
- "visual.blocks.27.attn.proj",
193
- "visual.blocks.27.mlp.gate_proj",
194
- "visual.blocks.27.mlp.up_proj",
195
- "visual.blocks.27.mlp.down_proj",
196
- "visual.blocks.28.attn.qkv",
197
- "visual.blocks.28.attn.proj",
198
- "visual.blocks.28.mlp.gate_proj",
199
- "visual.blocks.28.mlp.up_proj",
200
- "visual.blocks.28.mlp.down_proj",
201
- "visual.blocks.29.attn.qkv",
202
- "visual.blocks.29.attn.proj",
203
- "visual.blocks.29.mlp.gate_proj",
204
- "visual.blocks.29.mlp.up_proj",
205
- "visual.blocks.29.mlp.down_proj",
206
- "visual.blocks.30.attn.qkv",
207
- "visual.blocks.30.attn.proj",
208
- "visual.blocks.30.mlp.gate_proj",
209
- "visual.blocks.30.mlp.up_proj",
210
- "visual.blocks.30.mlp.down_proj",
211
- "visual.blocks.31.attn.qkv",
212
- "visual.blocks.31.attn.proj",
213
- "visual.blocks.31.mlp.gate_proj",
214
- "visual.blocks.31.mlp.up_proj",
215
- "visual.blocks.31.mlp.down_proj",
216
- "visual.merger.mlp.0",
217
- "visual.merger.mlp.2",
218
- "lm_head"
219
- ],
220
- "kv_cache_scheme": null,
221
- "quant_method": "compressed-tensors",
222
- "quantization_status": "compressed"
223
- },
224
  "rms_norm_eps": 1e-06,
225
- "rope_scaling": {
226
- "mrope_section": [
227
- 16,
228
- 24,
229
- 24
230
- ],
231
- "rope_type": "default",
232
- "type": "default"
233
- },
234
  "rope_theta": 1000000.0,
235
  "sliding_window": 32768,
236
  "tie_word_embeddings": false,
237
  "torch_dtype": "bfloat16",
238
- "transformers_version": "4.49.0.dev0",
239
  "use_cache": true,
240
  "use_sliding_window": false,
241
- "video_token_id": 151656,
242
  "vision_config": {
 
 
243
  "hidden_size": 1280,
244
- "in_chans": 3,
245
  "intermediate_size": 3456,
246
- "model_type": "qwen2_5_vl",
 
247
  "out_hidden_size": 8192,
 
 
248
  "spatial_patch_size": 14,
249
- "tokens_per_second": 2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
250
  },
251
- "vision_end_token_id": 151653,
252
- "vision_start_token_id": 151652,
253
- "vision_token_id": 151654,
254
  "vocab_size": 152064
255
  }
 
1
  {
 
2
  "architectures": [
3
  "Qwen2_5_VLForConditionalGeneration"
4
  ],
5
  "attention_dropout": 0.0,
6
  "bos_token_id": 151643,
7
  "eos_token_id": 151645,
8
+ "vision_start_token_id": 151652,
9
+ "vision_end_token_id": 151653,
10
+ "vision_token_id": 151654,
11
+ "image_token_id": 151655,
12
+ "video_token_id": 151656,
13
  "hidden_act": "silu",
14
  "hidden_size": 8192,
 
15
  "initializer_range": 0.02,
16
  "intermediate_size": 29568,
17
  "max_position_embeddings": 128000,
 
20
  "num_attention_heads": 64,
21
  "num_hidden_layers": 80,
22
  "num_key_value_heads": 8,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  "rms_norm_eps": 1e-06,
 
 
 
 
 
 
 
 
 
24
  "rope_theta": 1000000.0,
25
  "sliding_window": 32768,
26
  "tie_word_embeddings": false,
27
  "torch_dtype": "bfloat16",
28
+ "transformers_version": "4.41.2",
29
  "use_cache": true,
30
  "use_sliding_window": false,
 
31
  "vision_config": {
32
+ "depth": 32,
33
+ "hidden_act": "silu",
34
  "hidden_size": 1280,
 
35
  "intermediate_size": 3456,
36
+ "num_heads": 16,
37
+ "in_chans": 3,
38
  "out_hidden_size": 8192,
39
+ "patch_size": 14,
40
+ "spatial_merge_size": 2,
41
  "spatial_patch_size": 14,
42
+ "window_size": 112,
43
+ "fullatt_block_indexes": [
44
+ 7,
45
+ 15,
46
+ 23,
47
+ 31
48
+ ],
49
+ "tokens_per_second": 2,
50
+ "temporal_patch_size": 2
51
+ },
52
+ "rope_scaling": {
53
+ "type": "mrope",
54
+ "mrope_section": [
55
+ 16,
56
+ 24,
57
+ 24
58
+ ]
59
  },
 
 
 
60
  "vocab_size": 152064
61
  }
generation_config.json CHANGED
@@ -1,13 +1,12 @@
1
  {
2
  "bos_token_id": 151643,
 
3
  "do_sample": true,
4
  "eos_token_id": [
5
  151645,
6
  151643
7
  ],
8
- "pad_token_id": 151643,
9
  "repetition_penalty": 1.05,
10
- "top_k": 1,
11
- "top_p": 0.001,
12
- "transformers_version": "4.49.0.dev0"
13
- }
 
1
  {
2
  "bos_token_id": 151643,
3
+ "pad_token_id": 151643,
4
  "do_sample": true,
5
  "eos_token_id": [
6
  151645,
7
  151643
8
  ],
 
9
  "repetition_penalty": 1.05,
10
+ "temperature": 0.000001,
11
+ "transformers_version": "4.49.0"
12
+ }
 
merges.txt CHANGED
@@ -1,4 +1,3 @@
1
- #version: 0.2
2
  Ġ Ġ
3
  ĠĠ ĠĠ
4
  i n
 
 
1
  Ġ Ġ
2
  ĠĠ ĠĠ
3
  i n
preprocessor_config.json CHANGED
@@ -1,8 +1,9 @@
1
  {
2
- "do_convert_rgb": true,
3
- "do_normalize": true,
4
- "do_rescale": true,
5
- "do_resize": true,
 
6
  "image_mean": [
7
  0.48145466,
8
  0.4578275,
@@ -13,17 +14,6 @@
13
  0.26130258,
14
  0.27577711
15
  ],
16
- "max_pixels": 12845056,
17
- "merge_size": 2,
18
- "min_pixels": 3136,
19
- "patch_size": 14,
20
  "image_processor_type": "Qwen2VLImageProcessor",
21
- "processor_class": "Qwen2_5_VLProcessor",
22
- "resample": 3,
23
- "rescale_factor": 0.00392156862745098,
24
- "size": {
25
- "longest_edge": 12845056,
26
- "shortest_edge": 3136
27
- },
28
- "temporal_patch_size": 2
29
- }
 
1
  {
2
+ "min_pixels": 3136,
3
+ "max_pixels": 12845056,
4
+ "patch_size": 14,
5
+ "temporal_patch_size": 2,
6
+ "merge_size": 2,
7
  "image_mean": [
8
  0.48145466,
9
  0.4578275,
 
14
  0.26130258,
15
  0.27577711
16
  ],
 
 
 
 
17
  "image_processor_type": "Qwen2VLImageProcessor",
18
+ "processor_class": "Qwen2_5_VLProcessor"
19
+ }
 
 
 
 
 
 
 
special_tokens_map.json DELETED
@@ -1,31 +0,0 @@
1
- {
2
- "additional_special_tokens": [
3
- "<|im_start|>",
4
- "<|im_end|>",
5
- "<|object_ref_start|>",
6
- "<|object_ref_end|>",
7
- "<|box_start|>",
8
- "<|box_end|>",
9
- "<|quad_start|>",
10
- "<|quad_end|>",
11
- "<|vision_start|>",
12
- "<|vision_end|>",
13
- "<|vision_pad|>",
14
- "<|image_pad|>",
15
- "<|video_pad|>"
16
- ],
17
- "eos_token": {
18
- "content": "<|im_end|>",
19
- "lstrip": false,
20
- "normalized": false,
21
- "rstrip": false,
22
- "single_word": false
23
- },
24
- "pad_token": {
25
- "content": "<|endoftext|>",
26
- "lstrip": false,
27
- "normalized": false,
28
- "rstrip": false,
29
- "single_word": false
30
- }
31
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:913950e4971737031da511cdd1b410daae4566f62eb845b3975bca5a102323d8
3
- size 11421995
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0382117ea329cdf097041132f6d735924b697924d6f6fc3945713e96ce87539
3
+ size 7031645
tokenizer_config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "add_bos_token": false,
3
  "add_prefix_space": false,
4
  "added_tokens_decoder": {
5
  "151643": {
@@ -195,15 +194,14 @@
195
  "<|video_pad|>"
196
  ],
197
  "bos_token": null,
198
- "chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
199
  "clean_up_tokenization_spaces": false,
200
  "eos_token": "<|im_end|>",
201
  "errors": "replace",
202
- "extra_special_tokens": {},
203
  "model_max_length": 131072,
204
  "pad_token": "<|endoftext|>",
205
- "processor_class": "Qwen2_5_VLProcessor",
206
  "split_special_tokens": false,
207
  "tokenizer_class": "Qwen2Tokenizer",
208
- "unk_token": null
 
209
  }
 
1
  {
 
2
  "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
  "151643": {
 
194
  "<|video_pad|>"
195
  ],
196
  "bos_token": null,
197
+ "chat_template": "{% set image_count = namespace(value=0) %}{% set video_count = namespace(value=0) %}{% for message in messages %}{% if loop.first and message['role'] != 'system' %}<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n{% endif %}<|im_start|>{{ message['role'] }}\n{% if message['content'] is string %}{{ message['content'] }}<|im_end|>\n{% else %}{% for content in message['content'] %}{% if content['type'] == 'image' or 'image' in content or 'image_url' in content %}{% set image_count.value = image_count.value + 1 %}{% if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<|vision_start|><|image_pad|><|vision_end|>{% elif content['type'] == 'video' or 'video' in content %}{% set video_count.value = video_count.value + 1 %}{% if add_vision_id %}Video {{ video_count.value }}: {% endif %}<|vision_start|><|video_pad|><|vision_end|>{% elif 'text' in content %}{{ content['text'] }}{% endif %}{% endfor %}<|im_end|>\n{% endif %}{% endfor %}{% if add_generation_prompt %}<|im_start|>assistant\n{% endif %}",
198
  "clean_up_tokenization_spaces": false,
199
  "eos_token": "<|im_end|>",
200
  "errors": "replace",
 
201
  "model_max_length": 131072,
202
  "pad_token": "<|endoftext|>",
 
203
  "split_special_tokens": false,
204
  "tokenizer_class": "Qwen2Tokenizer",
205
+ "unk_token": null,
206
+ "add_bos_token": false
207
  }