File size: 307 Bytes
0209785
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
{
    "d_model": 3072,
    "ssm_cfg": {
        "expand": 1
    },
    "rms_norm_eps": 1e-05,
    "vocab_size": null,
    "d_inner": null,
    "d_xb": 1024,
    "intermediate_size": 8192,
    "hidden_act": "silu",
    "n_layer": 28,
    "attn_layers": [
        6,
        13,
        20,
        27
    ]
}