{ | |
"adapter_layers": 30, | |
"adapter_len": 32, | |
"auto_mapping": null, | |
"base_model_name_or_path": "meta-llama/Llama-2-7b-hf", | |
"inference_mode": true, | |
"peft_type": "ADAPTION_PROMPT", | |
"revision": null, | |
"target_modules": "self_attn", | |
"task_type": "CAUSAL_LM" | |
} |