farzadab commited on
Commit
0ace0e2
·
verified ·
1 Parent(s): f27f705

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -0
config.json CHANGED
@@ -72,5 +72,6 @@
72
  "torch_dtype": "bfloat16",
73
  "transformers_version": "4.48.1",
74
  "num_attention_heads": 32,
 
75
  "vocab_size": 128256
76
  }
 
72
  "torch_dtype": "bfloat16",
73
  "transformers_version": "4.48.1",
74
  "num_attention_heads": 32,
75
+ "num_hidden_layers": 16,
76
  "vocab_size": 128256
77
  }