File size: 492 Bytes
51465d5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
{
  "normalize_embed": true,
  "nth_layer": 13,
  "output_dim": 1,
  "precision": "32",
  "proj_act_fn": "gelu",
  "proj_dropout": 0.0,
  "proj_ln": true,
  "proj_num_layer": 5,
  "target_transform": {
    "CE": {
      "mean": 5.06865,
      "std": 1.93029
    },
    "CU": {
      "mean": 5.73633,
      "std": 1.75669
    },
    "PC": {
      "mean": 3.18591,
      "std": 1.86637
    },
    "PQ": {
      "mean": 6.57505,
      "std": 1.51466
    }
  },
  "use_weighted_layer_sum": true
}