File size: 280 Bytes
4fdf5f5 |
1 2 3 4 5 6 7 8 9 10 11 |
{
"adapter_layers": 32,
"adapter_len": 128,
"auto_mapping": null,
"base_model_name_or_path": "mistralai/Mistral-7B-Instruct-v0.2",
"inference_mode": true,
"peft_type": "ADAPTION_PROMPT",
"revision": null,
"target_modules": "self_attn",
"task_type": "CAUSAL_LM"
} |