mixtral_8_7b_en / config.json
prasadsachin's picture
Upload folder using huggingface_hub
239441b verified
raw
history blame contribute delete
716 Bytes
{
"module": "keras_hub.src.models.mixtral.mixtral_backbone",
"class_name": "MixtralBackbone",
"config": {
"name": "mixtral_backbone",
"trainable": true,
"vocabulary_size": 32000,
"num_layers": 32,
"num_query_heads": 32,
"hidden_dim": 4096,
"intermediate_dim": 14336,
"num_experts": 8,
"top_k": 2,
"router_jitter_noise": 0.0,
"rope_max_wavelength": 1000000.0,
"rope_scaling_factor": 1.0,
"num_key_value_heads": 8,
"router_aux_loss_coef": 0.02,
"sliding_window": null,
"layer_norm_epsilon": 1e-05,
"dropout": 0
},
"registered_name": "keras_hub>MixtralBackbone"
}