File size: 264 Bytes
387bb8f
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
{
  "base_model_name_or_path": "tiiuae/falcon-7b-instruct",
  "model_type": "falcon",
  "architectures": ["FalconForCausalLM"],
  "adapter_config": {
    "r": 4,
    "lora_alpha": 16,
    "lora_dropout": 0.09,
    "bias": "none",
    "task_type": "CAUSAL_LM"
  }
}