File size: 275 Bytes
ec3695f
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
{
  "adapter_layers": 32,
  "adapter_len": 128,
  "auto_mapping": null,
  "base_model_name_or_path": "meta-llama/Llama-2-7b-chat-hf",
  "inference_mode": true,
  "peft_type": "ADAPTION_PROMPT",
  "revision": null,
  "target_modules": "self_attn",
  "task_type": "CAUSAL_LM"
}