Add files using upload-large-folder tool
Browse files
Llama-3.1-8B-Instruct-UD-IQ1_M.gguf
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e05550ad1029a36b82ae68913c1e455deb80b18927cb02466379797ba4874bb
|
3 |
+
size 2292204928
|
Llama-3.1-8B-Instruct-UD-IQ1_S.gguf
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:34683ed4a25956f2007991681c8769d85453552972b2090459fddad364be61d5
|
3 |
+
size 2164671872
|
Llama-3.1-8B-Instruct-UD-IQ2_M.gguf
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:be6185fd092fb65cd9bb8fd8a77a5e3351d41b7e86aa2feb5f5585efa63ab0bc
|
3 |
+
size 3003270528
|
Llama-3.1-8B-Instruct-UD-Q2_K_XL.gguf
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5ab1da650de036e555f53eaf49c9d577d4dbefa6731beffecf66da2d0ec974a8
|
3 |
+
size 3388769664
|
Llama-3.1-8B-Instruct-UD-Q4_K_XL.gguf
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7d516ad921b6ebf5b990cee322961445074b0d3629d5bb6c8af84dd668662593
|
3 |
+
size 4994205056
|
config.json
CHANGED
@@ -30,7 +30,7 @@
|
|
30 |
"rope_theta": 500000.0,
|
31 |
"tie_word_embeddings": false,
|
32 |
"torch_dtype": "bfloat16",
|
33 |
-
"transformers_version": "4.
|
34 |
"unsloth_fixed": true,
|
35 |
"use_cache": true,
|
36 |
"vocab_size": 128256
|
|
|
30 |
"rope_theta": 500000.0,
|
31 |
"tie_word_embeddings": false,
|
32 |
"torch_dtype": "bfloat16",
|
33 |
+
"transformers_version": "4.51.3",
|
34 |
"unsloth_fixed": true,
|
35 |
"use_cache": true,
|
36 |
"vocab_size": 128256
|