mobicham commited on
Commit
0573dab
·
verified ·
1 Parent(s): 3fa56f0

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +26 -70
config.json CHANGED
@@ -5,9 +5,13 @@
5
  "attention_dropout": 0.0,
6
  "bos_token_id": 151643,
7
  "eos_token_id": 151645,
 
 
 
 
 
8
  "hidden_act": "silu",
9
  "hidden_size": 2048,
10
- "image_token_id": 151655,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 11008,
13
  "max_position_embeddings": 128000,
@@ -39,89 +43,41 @@
39
  ]
40
  },
41
  "rms_norm_eps": 1e-06,
42
- "rope_scaling": {
43
- "mrope_section": [
44
- 16,
45
- 24,
46
- 24
47
- ],
48
- "rope_type": "default",
49
- "type": "default"
50
- },
51
  "rope_theta": 1000000.0,
52
  "sliding_window": 32768,
53
- "text_config": {
54
- "architectures": [
55
- "Qwen2_5_VLForConditionalGeneration"
56
- ],
57
- "attention_dropout": 0.0,
58
- "bos_token_id": 151643,
59
- "eos_token_id": 151645,
60
- "hidden_act": "silu",
61
- "hidden_size": 2048,
62
- "image_token_id": null,
63
- "initializer_range": 0.02,
64
- "intermediate_size": 11008,
65
- "max_position_embeddings": 128000,
66
- "max_window_layers": 70,
67
- "model_type": "qwen2_5_vl_text",
68
- "num_attention_heads": 16,
69
- "num_hidden_layers": 36,
70
- "num_key_value_heads": 2,
71
- "rms_norm_eps": 1e-06,
72
- "rope_scaling": {
73
- "mrope_section": [
74
- 16,
75
- 24,
76
- 24
77
- ],
78
- "rope_type": "default",
79
- "type": "default"
80
- },
81
- "rope_theta": 1000000.0,
82
- "sliding_window": 32768,
83
- "tie_word_embeddings": true,
84
- "torch_dtype": "float16",
85
- "use_cache": true,
86
- "use_sliding_window": false,
87
- "video_token_id": null,
88
- "vision_end_token_id": 151653,
89
- "vision_start_token_id": 151652,
90
- "vision_token_id": 151654,
91
- "vocab_size": 151936
92
- },
93
- "torch_dtype": "float16",
94
- "transformers_version": "4.52.0.dev0",
95
  "use_cache": true,
96
  "use_sliding_window": false,
97
- "video_token_id": 151656,
98
  "vision_config": {
99
  "depth": 32,
100
- "fullatt_block_indexes": [
101
- 7,
102
- 15,
103
- 23,
104
- 31
105
- ],
106
  "hidden_act": "silu",
107
  "hidden_size": 1280,
108
- "in_channels": 3,
109
- "in_chans": 3,
110
- "initializer_range": 0.02,
111
  "intermediate_size": 3420,
112
- "model_type": "qwen2_5_vl",
113
  "num_heads": 16,
 
114
  "out_hidden_size": 2048,
115
  "patch_size": 14,
116
  "spatial_merge_size": 2,
117
  "spatial_patch_size": 14,
118
- "temporal_patch_size": 2,
 
 
 
 
 
 
119
  "tokens_per_second": 2,
120
- "torch_dtype": "float16",
121
- "window_size": 112
 
 
 
 
 
 
 
122
  },
123
- "vision_end_token_id": 151653,
124
- "vision_start_token_id": 151652,
125
- "vision_token_id": 151654,
126
  "vocab_size": 151936
127
- }
 
5
  "attention_dropout": 0.0,
6
  "bos_token_id": 151643,
7
  "eos_token_id": 151645,
8
+ "vision_start_token_id": 151652,
9
+ "vision_end_token_id": 151653,
10
+ "vision_token_id": 151654,
11
+ "image_token_id": 151655,
12
+ "video_token_id": 151656,
13
  "hidden_act": "silu",
14
  "hidden_size": 2048,
 
15
  "initializer_range": 0.02,
16
  "intermediate_size": 11008,
17
  "max_position_embeddings": 128000,
 
43
  ]
44
  },
45
  "rms_norm_eps": 1e-06,
 
 
 
 
 
 
 
 
 
46
  "rope_theta": 1000000.0,
47
  "sliding_window": 32768,
48
+ "tie_word_embeddings": true,
49
+ "torch_dtype": "bfloat16",
50
+ "transformers_version": "4.41.2",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
51
  "use_cache": true,
52
  "use_sliding_window": false,
 
53
  "vision_config": {
54
  "depth": 32,
 
 
 
 
 
 
55
  "hidden_act": "silu",
56
  "hidden_size": 1280,
 
 
 
57
  "intermediate_size": 3420,
 
58
  "num_heads": 16,
59
+ "in_chans": 3,
60
  "out_hidden_size": 2048,
61
  "patch_size": 14,
62
  "spatial_merge_size": 2,
63
  "spatial_patch_size": 14,
64
+ "window_size": 112,
65
+ "fullatt_block_indexes": [
66
+ 7,
67
+ 15,
68
+ 23,
69
+ 31
70
+ ],
71
  "tokens_per_second": 2,
72
+ "temporal_patch_size": 2
73
+ },
74
+ "rope_scaling": {
75
+ "type": "mrope",
76
+ "mrope_section": [
77
+ 16,
78
+ 24,
79
+ 24
80
+ ]
81
  },
 
 
 
82
  "vocab_size": 151936
83
+ }