sweinbach's picture
Upload HATForCausalLM
58634eb verified
raw
history blame contribute delete
2.23 kB
{
"auto_map": {
"AutoConfig": "config.HATArchitectureConfig",
"AutoModelForCausalLM": "model.HATForCausalLM"
},
"backbone_config": {
"hidden_size": 4096,
"intermediate_size": 14336,
"max_position_embeddings": 32900,
"mlp_bias": false,
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 8,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"factor": 8.0,
"high_freq_factor": 4.0,
"low_freq_factor": 1.0,
"original_max_position_embeddings": 8192,
"rope_type": "llama3"
},
"rope_theta": 500000,
"sliding_window": null,
"transformers_version": null,
"use_cache": true,
"vocab_size": 0
},
"decoder_config": {
"cross_attention_config": {
"attention_num_kv_heads": 8,
"hidden_size": 1024,
"hidden_size_kv": 4096,
"hidden_size_q": 1024,
"num_attention_heads": 8,
"word_window_size": 1
},
"cross_attn_every_layer": true,
"hidden_size": 1024,
"intermediate_size": 2816,
"max_position_embeddings": 262144,
"mlp_bias": false,
"num_attention_heads": 8,
"num_hidden_layers": 4,
"num_key_value_heads": 8,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"rope_type": "default"
},
"rope_theta": 100000,
"sliding_window": 768,
"transformers_version": null,
"use_cache": true,
"vocab_size": 256
},
"encoder_config": {
"cross_attention_config": {
"attention_num_kv_heads": 32,
"hidden_size": 4096,
"hidden_size_kv": 1024,
"hidden_size_q": 4096,
"num_attention_heads": 32,
"word_window_size": 1
},
"hidden_size": 1024,
"intermediate_size": 2816,
"max_position_embeddings": 262144,
"mlp_bias": false,
"num_attention_heads": 8,
"num_hidden_layers": 6,
"num_key_value_heads": 8,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"rope_type": "default"
},
"rope_theta": 100000,
"sliding_window": 768,
"transformers_version": null,
"use_cache": true,
"vocab_size": 256
},
"model_type": "hierarchical_autoregressive_transformer",
"special_token_dict": {
"<|eot_id|>": 192
},
"transformers_version": "4.46.3"
}