Update config.json
Browse files- config.json +24 -51
config.json
CHANGED
@@ -1,52 +1,25 @@
|
|
1 |
{
|
2 |
-
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
"name":"DiscreteMamba2",
|
27 |
-
"input":{
|
28 |
-
"d_state":64,
|
29 |
-
"n_qk_heads":32,
|
30 |
-
"n_v_heads":32,
|
31 |
-
"expand":1,
|
32 |
-
"chunk_size":128,
|
33 |
-
"activation":"identity",
|
34 |
-
"use_ref_impl":false,
|
35 |
-
"bias":false,
|
36 |
-
"norm_cls":"none",
|
37 |
-
"initializer":{
|
38 |
-
"a_log":"default",
|
39 |
-
"x":"default",
|
40 |
-
"B":"default",
|
41 |
-
"C":"default",
|
42 |
-
"D":"default",
|
43 |
-
"z":"identity",
|
44 |
-
"out":"default",
|
45 |
-
"convolution":"identity"
|
46 |
-
}
|
47 |
-
}
|
48 |
-
}
|
49 |
-
}
|
50 |
-
]
|
51 |
-
}
|
52 |
-
}
|
|
|
1 |
{
|
2 |
+
"model_type": "llamba",
|
3 |
+
"vocab_size": 128256,
|
4 |
+
"tie_embeddings": true,
|
5 |
+
"pad_vocab_size_multiple": 8,
|
6 |
+
"lm_head_bias": false,
|
7 |
+
"d_model": 2048,
|
8 |
+
"n_layer": 32,
|
9 |
+
"resid_dropout": 0.0,
|
10 |
+
"norm_epsilon": 1e-5,
|
11 |
+
"mlp_cfg": {
|
12 |
+
"intermediate_size": 14336,
|
13 |
+
"bias": false,
|
14 |
+
"act_fn": "silu"
|
15 |
+
},
|
16 |
+
"ssm_cfg": {
|
17 |
+
"d_state": 64,
|
18 |
+
"n_v_heads": 32,
|
19 |
+
"n_qk_heads": 32,
|
20 |
+
"expand": 1,
|
21 |
+
"chunk_size": 128,
|
22 |
+
"activation": "identity",
|
23 |
+
"bias": false
|
24 |
+
}
|
25 |
+
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|