pisco-mistral / config.json
maxoul's picture
Upload COCOM
6f8f4b9 verified
{
"_attn_implementation_autoset": true,
"ae_mode": "token",
"attn_implementation": null,
"auto_map": {
"AutoConfig": "modelling_pisco.COCOMConfig",
"AutoModel": "modelling_pisco.COCOM"
},
"compr_base_model_name": "mistralai/Mistral-7B-Instruct-v0.2",
"compr_every_n_layer": null,
"compr_linear_type": "concat",
"compr_mlp_hidden_dim": 8096,
"compr_model_name": null,
"compr_n_layers": null,
"compr_rate": 16,
"compr_rms_norm": false,
"compr_use_mlp": true,
"decoder_model_name": "mistralai/Mistral-7B-Instruct-v0.2",
"device_map": null,
"different_mem_tokens": true,
"doc_max_length": 128,
"generation_top_k": 1,
"kbtc_training": false,
"load_adapters": true,
"lora": true,
"lora_compressor": false,
"lora_r": 16,
"lora_r_compressor": 16,
"max_new_tokens": 128,
"model_type": "COCOM",
"optimize_mem_tokens": true,
"quantization": "no",
"sep": true,
"training_form": "both_separately",
"transformers_version": "4.48.0"
}