WyettZ commited on
Commit
3d43fb4
·
verified ·
1 Parent(s): 72e1a17

Upload AceCoderLlamaForCausalRM

Browse files
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "architectures": [
3
- "LlamaForCausalLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
 
1
  {
2
  "architectures": [
3
+ "AceCoderLlamaForCausalRM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
model-00004-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d38e578156d959ca5a0c36006a01aeb5bf09c610ec2af005f60cf297afab9a06
3
- size 2336277048
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a02a679aeb42eec2fe88a58470bb820b197e11bef76f35fe12d07c764651a49f
3
+ size 2336293620
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 32121044992
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00004-of-00004.safetensors",
@@ -293,6 +293,8 @@
293
  "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
294
  "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
295
  "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
296
- "model.norm.weight": "model-00004-of-00004.safetensors"
 
 
297
  }
298
  }
 
1
  {
2
  "metadata": {
3
+ "total_size": 32121061380
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00004-of-00004.safetensors",
 
293
  "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
294
  "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
295
  "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
296
+ "model.norm.weight": "model-00004-of-00004.safetensors",
297
+ "v_head.summary.bias": "model-00004-of-00004.safetensors",
298
+ "v_head.summary.weight": "model-00004-of-00004.safetensors"
299
  }
300
  }