danielhanchen commited on
Commit
3120b83
·
verified ·
1 Parent(s): bd2fa41

Add files using upload-large-folder tool

Browse files
Qwen3-30B-A3B-UD-IQ1_S.gguf CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2e00680c1397c1e47f87f4c7c8ce74cb96967f24d4b9138909ebf5e53daa79fa
3
- size 9021280032
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9fcab6086b5f5501202cd1ce5966883901e9ee8dcf7c0abb7fc8fa5108679b66
3
+ size 9021281024
Qwen3-30B-A3B-UD-IQ2_M.gguf CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:05fbcb913f3f39100a22991e164af72307b350ec9940e82103771fbc2917970d
3
- size 10865577760
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e83ec59ac7e61b56645f8a9295a7d4db0bd69626bf135841a140f4f49824ddc0
3
+ size 10865578752
Qwen3-30B-A3B-UD-Q2_K_XL.gguf CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:12d122cad286c90008da1734ed02959d5a846c6f5f5cbc0f1f3f6786c82ba574
3
- size 11814276896
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fb9c0746f06ab6303039a5e9da4d271a9423cdeb7663ba92bdb0152004ff9fbf
3
+ size 11814277888
Qwen3-30B-A3B-UD-Q4_K_XL.gguf CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1f06c359e1d6ee8cc7acc9d45b7e68e94f31dad9bca19422d96747ae11442469
3
- size 17715662624
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a6ad38467286a0f40ae6e2e46eda187615b78e4fda647dd9d0d3e21d2289cc57
3
+ size 17715663616
config.json CHANGED
@@ -4,13 +4,14 @@
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
 
7
  "decoder_sparse_step": 1,
8
  "eos_token_id": 151645,
9
  "head_dim": 128,
10
  "hidden_act": "silu",
11
  "hidden_size": 2048,
12
  "initializer_range": 0.02,
13
- "intermediate_size": 8192,
14
  "max_position_embeddings": 40960,
15
  "max_window_layers": 48,
16
  "mlp_only_layers": [],
@@ -23,7 +24,6 @@
23
  "num_hidden_layers": 48,
24
  "num_key_value_heads": 4,
25
  "output_router_logits": false,
26
- "pad_token_id": 151654,
27
  "rms_norm_eps": 1e-06,
28
  "rope_scaling": null,
29
  "rope_theta": 1000000.0,
@@ -31,9 +31,8 @@
31
  "sliding_window": null,
32
  "tie_word_embeddings": false,
33
  "torch_dtype": "bfloat16",
34
- "transformers_version": "4.52.0.dev0",
35
- "unsloth_fixed": true,
36
  "use_cache": true,
37
  "use_sliding_window": false,
38
  "vocab_size": 151936
39
- }
 
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
  "decoder_sparse_step": 1,
9
  "eos_token_id": 151645,
10
  "head_dim": 128,
11
  "hidden_act": "silu",
12
  "hidden_size": 2048,
13
  "initializer_range": 0.02,
14
+ "intermediate_size": 6144,
15
  "max_position_embeddings": 40960,
16
  "max_window_layers": 48,
17
  "mlp_only_layers": [],
 
24
  "num_hidden_layers": 48,
25
  "num_key_value_heads": 4,
26
  "output_router_logits": false,
 
27
  "rms_norm_eps": 1e-06,
28
  "rope_scaling": null,
29
  "rope_theta": 1000000.0,
 
31
  "sliding_window": null,
32
  "tie_word_embeddings": false,
33
  "torch_dtype": "bfloat16",
34
+ "transformers_version": "4.51.0",
 
35
  "use_cache": true,
36
  "use_sliding_window": false,
37
  "vocab_size": 151936
38
+ }