dtype: "bfloat16" model: text_cond_attn: false add_cond_attn: false union_cond_attn: true double_use_condition: false single_use_condition: true cond_cond_cross_attn: true train_partial_text_lora: false # dblock txt lora train_partial_text_lora_layers: "qkv_toout" train_partial_latent_lora: false # dblock latent lora train_partial_latent_lora_layers: "qkv_toout" train_partial_lora: true train_partial_lora_layers: "qkv_projout" use_dit_lora: true latent_lora: false text_lora: true sblock_lora: true use_pretrained_dit_lora: true use_condition_dblock_lora: false use_condition_sblock_lora: false use_pretrained_condition_lora: false freeze_mod_adapter: true freeze_txt_lora: true freeze_single_lora: false use_pretrained_dit_single_lora: true pos_offset_type: "diagonal" modulation: use_clip: true use_vae: false use_dit: false use_text_mod: true use_img_mod: false pass_vae: true max_text_len: 128 adapter_type: "clip_adapter" adapter_layers: 3 adapter_width: 3072 out_dim: 3072 use_perblock_adapter: true per_block_adapter_layers: 3 per_block_adapter_width: 3072 per_block_adapter_single_blocks: 0 eos_exclude: false dit_lora_config: r: 128 lora_alpha: 128 init_lora_weights: "gaussian" target_modules: "(.*x_embedder|.*(?