codys12 commited on
Commit
fd206d5
·
verified ·
1 Parent(s): cd8de59

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +8 -0
config.json CHANGED
@@ -17,6 +17,14 @@
17
  "num_attention_heads": 32,
18
  "num_hidden_layers": 36,
19
  "num_key_value_heads": 8,
 
 
 
 
 
 
 
 
20
  "rms_norm_eps": 1e-06,
21
  "rope_scaling": null,
22
  "rope_theta": 1000000,
 
17
  "num_attention_heads": 32,
18
  "num_hidden_layers": 36,
19
  "num_key_value_heads": 8,
20
+ "quantization_config": {
21
+ "linear_class": "bitlinear",
22
+ "quant_method": "bitnet",
23
+ "quantization_mode": "online",
24
+ "linear_class": "autobitlinear",
25
+ "use_rms_norm": true,
26
+ "modules_to_not_convert": []
27
+ },
28
  "rms_norm_eps": 1e-06,
29
  "rope_scaling": null,
30
  "rope_theta": 1000000,