File size: 254 Bytes
c6fc62b
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
{
  "visual_gen": true,
  "visual_und": true,
  "vit_max_num_patch_per_side": 70,
  "connector_act": "gelu_pytorch_tanh",
  "latent_patch_size": 2,
  "max_latent_size": 64,
  "llm_config_class": "Qwen2Config",
  "vit_config_class": "SiglipVisionConfig"
}