MambaInLlama3B / mamba_config.json
JunxiongWang's picture
add model
0209785
raw
history blame contribute delete
307 Bytes
{
"d_model": 3072,
"ssm_cfg": {
"expand": 1
},
"rms_norm_eps": 1e-05,
"vocab_size": null,
"d_inner": null,
"d_xb": 1024,
"intermediate_size": 8192,
"hidden_act": "silu",
"n_layer": 28,
"attn_layers": [
6,
13,
20,
27
]
}