danielhanchen commited on
Commit
90bf564
·
verified ·
1 Parent(s): 6d032ae

Add files using upload-large-folder tool

Browse files
Llama-3.1-8B-Instruct-UD-IQ1_M.gguf CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:68f3dc7e5dd0368abb7f2cae7c8d75d82efea7b8bb03f46f062de311f073e6b6
3
- size 2292200256
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e05550ad1029a36b82ae68913c1e455deb80b18927cb02466379797ba4874bb
3
+ size 2292204928
Llama-3.1-8B-Instruct-UD-IQ1_S.gguf CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:12c5f051b28ca477c090546021e294477491e140d1c5ea9a9838758db68741ad
3
- size 2164667200
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:34683ed4a25956f2007991681c8769d85453552972b2090459fddad364be61d5
3
+ size 2164671872
Llama-3.1-8B-Instruct-UD-IQ2_M.gguf CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:766990b2c129f86e6f522c82eba245acd11ec6aee4bbaea24ab2ac0cdb763b69
3
- size 2997302080
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be6185fd092fb65cd9bb8fd8a77a5e3351d41b7e86aa2feb5f5585efa63ab0bc
3
+ size 3003270528
Llama-3.1-8B-Instruct-UD-Q2_K_XL.gguf CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e3b7cc5e3a76e664911bbefdbf69c07a6cde18a47c87219ca6d72b7b35079267
3
- size 3388764992
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ab1da650de036e555f53eaf49c9d577d4dbefa6731beffecf66da2d0ec974a8
3
+ size 3388769664
Llama-3.1-8B-Instruct-UD-Q4_K_XL.gguf CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cff2350cf5e3edc3c5f0f03bd0613cdbb1e6a0cfc80ea5d74b3eb32b5cf8838c
3
- size 4994200384
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d516ad921b6ebf5b990cee322961445074b0d3629d5bb6c8af84dd668662593
3
+ size 4994205056
config.json CHANGED
@@ -30,7 +30,7 @@
30
  "rope_theta": 500000.0,
31
  "tie_word_embeddings": false,
32
  "torch_dtype": "bfloat16",
33
- "transformers_version": "4.52.0.dev0",
34
  "unsloth_fixed": true,
35
  "use_cache": true,
36
  "vocab_size": 128256
 
30
  "rope_theta": 500000.0,
31
  "tie_word_embeddings": false,
32
  "torch_dtype": "bfloat16",
33
+ "transformers_version": "4.51.3",
34
  "unsloth_fixed": true,
35
  "use_cache": true,
36
  "vocab_size": 128256