{ "peft_type": "lora", "adapter_type": "lora", "target_modules": ["attn", "resblocks"], "lora_r": 8, "lora_alpha": 16, "lora_dropout": 0.1, "trainable": true, "bias": "none", "layer_norm": true }