Luo-Yihong commited on
Commit
d5f0237
·
verified ·
1 Parent(s): 7fbe94c

Upload config.json

Browse files
Files changed (1) hide show
  1. new/config.json +5 -4
new/config.json CHANGED
@@ -1,17 +1,18 @@
1
  {
2
  "_class_name": "Transformer2DModel",
3
- "_diffusers_version": "0.26.0",
4
- "_name_or_path": "PixArt-alpha/PixArt-XL-2-512x512",
5
  "activation_fn": "gelu-approximate",
6
  "attention_bias": true,
7
  "attention_head_dim": 72,
8
  "attention_type": "default",
9
  "caption_channels": 4096,
10
  "cross_attention_dim": 1152,
11
- "decay": 0.9999,
12
  "double_self_attention": false,
13
  "dropout": 0.0,
14
  "in_channels": 4,
 
15
  "inv_gamma": 1.0,
16
  "min_decay": 0.0,
17
  "norm_elementwise_affine": false,
@@ -23,7 +24,7 @@
23
  "num_layers": 28,
24
  "num_vector_embeds": null,
25
  "only_cross_attention": false,
26
- "optimization_step": 30000,
27
  "out_channels": 8,
28
  "patch_size": 2,
29
  "power": 0.6666666666666666,
 
1
  {
2
  "_class_name": "Transformer2DModel",
3
+ "_diffusers_version": "0.27.2",
4
+ "_name_or_path": "Luo-Yihong/yoso_pixart512",
5
  "activation_fn": "gelu-approximate",
6
  "attention_bias": true,
7
  "attention_head_dim": 72,
8
  "attention_type": "default",
9
  "caption_channels": 4096,
10
  "cross_attention_dim": 1152,
11
+ "decay": 0.9995,
12
  "double_self_attention": false,
13
  "dropout": 0.0,
14
  "in_channels": 4,
15
+ "interpolation_scale": null,
16
  "inv_gamma": 1.0,
17
  "min_decay": 0.0,
18
  "norm_elementwise_affine": false,
 
24
  "num_layers": 28,
25
  "num_vector_embeds": null,
26
  "only_cross_attention": false,
27
+ "optimization_step": 100,
28
  "out_channels": 8,
29
  "patch_size": 2,
30
  "power": 0.6666666666666666,