2024-10-20 11:46:03 +00:00
|
|
|
{
|
|
|
|
"_class_name": "AllegroTransformer3DModel",
|
2024-10-20 19:21:22 +00:00
|
|
|
"_diffusers_version": "0.30.3",
|
|
|
|
"_name_or_path": "/cpfs/data/user/larrytsai/Projects/Yi-VG/allegro/transformer",
|
2024-10-20 11:46:03 +00:00
|
|
|
"activation_fn": "gelu-approximate",
|
|
|
|
"attention_bias": true,
|
|
|
|
"attention_head_dim": 96,
|
|
|
|
"ca_attention_mode": "xformers",
|
|
|
|
"caption_channels": 4096,
|
|
|
|
"cross_attention_dim": 2304,
|
|
|
|
"double_self_attention": false,
|
|
|
|
"downsampler": null,
|
|
|
|
"dropout": 0.0,
|
|
|
|
"in_channels": 4,
|
|
|
|
"interpolation_scale_h": 2.0,
|
|
|
|
"interpolation_scale_t": 2.2,
|
|
|
|
"interpolation_scale_w": 2.0,
|
|
|
|
"model_max_length": 300,
|
|
|
|
"norm_elementwise_affine": false,
|
|
|
|
"norm_eps": 1e-06,
|
|
|
|
"norm_type": "ada_norm_single",
|
|
|
|
"num_attention_heads": 24,
|
|
|
|
"num_embeds_ada_norm": 1000,
|
|
|
|
"num_layers": 32,
|
|
|
|
"only_cross_attention": false,
|
|
|
|
"out_channels": 4,
|
|
|
|
"patch_size": 2,
|
|
|
|
"patch_size_t": 1,
|
|
|
|
"sa_attention_mode": "flash",
|
|
|
|
"sample_size": [
|
|
|
|
90,
|
|
|
|
160
|
|
|
|
],
|
|
|
|
"sample_size_t": 22,
|
|
|
|
"upcast_attention": false,
|
|
|
|
"use_additional_conditions": null,
|
|
|
|
"use_linear_projection": false,
|
|
|
|
"use_rope": true
|
2024-10-20 19:21:22 +00:00
|
|
|
}
|