| { | |
| "_class_name": "Flux2Transformer2DModel", | |
| "_diffusers_version": "0.36.0.dev0", | |
| "_name_or_path": "black-forest-labs/FLUX.2-dev", | |
| "attention_head_dim": 128, | |
| "axes_dims_rope": [ | |
| 32, | |
| 32, | |
| 32, | |
| 32 | |
| ], | |
| "eps": 1e-06, | |
| "in_channels": 128, | |
| "joint_attention_dim": 15360, | |
| "mlp_ratio": 3.0, | |
| "num_attention_heads": 48, | |
| "num_layers": 8, | |
| "num_single_layers": 48, | |
| "out_channels": null, | |
| "patch_size": 1, | |
| "rope_theta": 2000, | |
| "timestep_guidance_channels": 256, | |
| "quantization_config": { | |
| "quant_method": "hqq", | |
| "nbits": 2, | |
| "group_size": 64, | |
| "axis": 1, | |
| "format": "hqq_2bit_packed" | |
| } | |
| } | |