dvruette commited on
Commit
2fcda29
·
verified ·
1 Parent(s): 227c382

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. config.json +2 -0
  2. configuration_dit.py +4 -0
config.json CHANGED
@@ -12,6 +12,8 @@
12
  "model_type": "dit",
13
  "num_attention_heads": 16,
14
  "num_hidden_layers": 24,
 
 
15
  "timestep_cond_dim": 128,
16
  "torch_dtype": "float32",
17
  "transformers_version": "4.49.0",
 
12
  "model_type": "dit",
13
  "num_attention_heads": 16,
14
  "num_hidden_layers": 24,
15
+ "p_uniform": 0.1,
16
+ "t_eps": 0.0001,
17
  "timestep_cond_dim": 128,
18
  "torch_dtype": "float32",
19
  "transformers_version": "4.49.0",
configuration_dit.py CHANGED
@@ -13,6 +13,8 @@ class DITConfig(PretrainedConfig):
13
  num_hidden_layers: int = 12,
14
  num_attention_heads: int = 12,
15
  attention_dropout: float = 0.0,
 
 
16
  **kwargs
17
  ):
18
  super().__init__(**kwargs)
@@ -23,3 +25,5 @@ class DITConfig(PretrainedConfig):
23
  self.num_hidden_layers = num_hidden_layers
24
  self.num_attention_heads = num_attention_heads
25
  self.attention_dropout = attention_dropout
 
 
 
13
  num_hidden_layers: int = 12,
14
  num_attention_heads: int = 12,
15
  attention_dropout: float = 0.0,
16
+ p_uniform: float = 0.0,
17
+ t_eps: float = 1e-4,
18
  **kwargs
19
  ):
20
  super().__init__(**kwargs)
 
25
  self.num_hidden_layers = num_hidden_layers
26
  self.num_attention_heads = num_attention_heads
27
  self.attention_dropout = attention_dropout
28
+ self.p_uniform = p_uniform
29
+ self.t_eps = t_eps