danielhanchen commited on
Commit
e954699
·
verified ·
1 Parent(s): 9e7f59c

Add files using upload-large-folder tool

Browse files
config.json CHANGED
@@ -50,13 +50,9 @@
50
  "visual.blocks.8.attn",
51
  "visual.blocks.8.mlp",
52
  "visual.blocks.9.attn",
53
- "visual.blocks.9.mlp",
54
- "visual.blocks.10.attn",
55
- "visual.blocks.10.mlp",
56
  "visual.blocks.11.attn",
57
- "visual.blocks.12.attn",
58
  "visual.blocks.12.mlp",
59
- "visual.blocks.13.attn",
60
  "visual.blocks.13.mlp",
61
  "visual.blocks.14.attn",
62
  "visual.blocks.14.mlp",
@@ -64,11 +60,9 @@
64
  "visual.blocks.15.mlp",
65
  "visual.blocks.16.mlp",
66
  "visual.blocks.17.mlp",
67
- "visual.blocks.18.mlp",
68
  "visual.blocks.21.mlp",
69
  "model.layers.1.mlp",
70
- "model.layers.2.mlp",
71
- "visual.blocks.31.mlp.down_proj"
72
  ],
73
  "llm_int8_threshold": 6.0,
74
  "load_in_4bit": true,
@@ -89,7 +83,7 @@
89
  "sliding_window": 32768,
90
  "tie_word_embeddings": true,
91
  "torch_dtype": "bfloat16",
92
- "transformers_version": "4.49.0.dev0",
93
  "unsloth_fixed": true,
94
  "use_cache": true,
95
  "use_sliding_window": false,
 
50
  "visual.blocks.8.attn",
51
  "visual.blocks.8.mlp",
52
  "visual.blocks.9.attn",
 
 
 
53
  "visual.blocks.11.attn",
54
+ "visual.blocks.11.mlp",
55
  "visual.blocks.12.mlp",
 
56
  "visual.blocks.13.mlp",
57
  "visual.blocks.14.attn",
58
  "visual.blocks.14.mlp",
 
60
  "visual.blocks.15.mlp",
61
  "visual.blocks.16.mlp",
62
  "visual.blocks.17.mlp",
 
63
  "visual.blocks.21.mlp",
64
  "model.layers.1.mlp",
65
+ "visual.blocks.31.mlp"
 
66
  ],
67
  "llm_int8_threshold": 6.0,
68
  "load_in_4bit": true,
 
83
  "sliding_window": 32768,
84
  "tie_word_embeddings": true,
85
  "torch_dtype": "bfloat16",
86
+ "transformers_version": "4.50.0.dev0",
87
  "unsloth_fixed": true,
88
  "use_cache": true,
89
  "use_sliding_window": false,
generation_config.json CHANGED
@@ -11,5 +11,5 @@
11
  "temperature": 0.1,
12
  "top_k": 1,
13
  "top_p": 0.001,
14
- "transformers_version": "4.49.0.dev0"
15
  }
 
11
  "temperature": 0.1,
12
  "top_k": 1,
13
  "top_p": 0.001,
14
+ "transformers_version": "4.50.0.dev0"
15
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1485b5e218a22fa049dbf50f2edb9507169919ed321a5a993da60b1cf04a74d6
3
- size 3190402195
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:51a6325439277b0f7489ce22ba924841cf6a785675221d0b3df81f763e925d45
3
+ size 3034882794
preprocessor_config.json CHANGED
@@ -8,7 +8,7 @@
8
  0.4578275,
9
  0.40821073
10
  ],
11
- "image_processor_type": "Qwen2_5_VLImageProcessor",
12
  "image_std": [
13
  0.26862954,
14
  0.26130258,
@@ -22,8 +22,8 @@
22
  "resample": 3,
23
  "rescale_factor": 0.00392156862745098,
24
  "size": {
25
- "max_pixels": 12845056,
26
- "min_pixels": 3136
27
  },
28
  "temporal_patch_size": 2
29
  }
 
8
  0.4578275,
9
  0.40821073
10
  ],
11
+ "image_processor_type": "Qwen2VLImageProcessor",
12
  "image_std": [
13
  0.26862954,
14
  0.26130258,
 
22
  "resample": 3,
23
  "rescale_factor": 0.00392156862745098,
24
  "size": {
25
+ "longest_edge": 12845056,
26
+ "shortest_edge": 3136
27
  },
28
  "temporal_patch_size": 2
29
  }
tokenizer_config.json CHANGED
@@ -200,7 +200,7 @@
200
  "eos_token": "<|im_end|>",
201
  "errors": "replace",
202
  "extra_special_tokens": {},
203
- "model_max_length": 131072,
204
  "pad_token": "<|vision_pad|>",
205
  "padding_side": "left",
206
  "processor_class": "Qwen2_5_VLProcessor",
 
200
  "eos_token": "<|im_end|>",
201
  "errors": "replace",
202
  "extra_special_tokens": {},
203
+ "model_max_length": 32768,
204
  "pad_token": "<|vision_pad|>",
205
  "padding_side": "left",
206
  "processor_class": "Qwen2_5_VLProcessor",