jaszczur commited on
Commit
6b812eb
·
verified ·
1 Parent(s): 84660d3

Add model checkpoint

Browse files
Files changed (2) hide show
  1. config.json +3 -8
  2. model.safetensors +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/home/rllaskowski_a100/mot-training/experiments/mot_medium_32_8/checkpoint-125000",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "MoTModel"
@@ -18,20 +18,15 @@
18
  "n_embd": 512,
19
  "n_expert": 256,
20
  "n_head": 8,
21
- "n_inner": 32,
22
  "n_layer": 8,
23
  "n_positions": 1024,
24
  "reorder_and_upcast_attn": false,
25
  "resid_pdrop": 0.1,
26
  "scale_attn_by_inverse_layer_idx": false,
27
  "scale_attn_weights": true,
28
- "summary_activation": null,
29
- "summary_first_dropout": 0.1,
30
- "summary_proj_to_labels": true,
31
- "summary_type": "cls_index",
32
- "summary_use_proj": true,
33
  "torch_dtype": "float32",
34
- "transformers_version": "4.39.0.dev0",
35
  "use_cache": true,
36
  "use_discrete_routing": false,
37
  "vocab_size": 50257
 
1
  {
2
+ "_name_or_path": "/home/rllaskowski_a100/mot-training/experiments/mot_medium_32_8/checkpoint-80000",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "MoTModel"
 
18
  "n_embd": 512,
19
  "n_expert": 256,
20
  "n_head": 8,
21
+ "n_inner": 65536,
22
  "n_layer": 8,
23
  "n_positions": 1024,
24
  "reorder_and_upcast_attn": false,
25
  "resid_pdrop": 0.1,
26
  "scale_attn_by_inverse_layer_idx": false,
27
  "scale_attn_weights": true,
 
 
 
 
 
28
  "torch_dtype": "float32",
29
+ "transformers_version": "4.42.0.dev0",
30
  "use_cache": true,
31
  "use_discrete_routing": false,
32
  "vocab_size": 50257
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:15c1e153bdc798e5d76c9f2c00aa0ca34332082fbc889825811e87290041e302
3
  size 2290399320
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16f5f35057f07a1f48e60e72ac84ffa132022ca1d9f53934d817a821682e2f93
3
  size 2290399320