File size: 282 Bytes
457ef9b
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
{
    "model_type": "gemma2",
    "vocab_size": 256000,
    "n_positions": 512,
    "n_embd": 768,
    "n_layer": 12,
    "n_head": 12,
    "activation_function": "gelu",
    "resid_pdrop": 0.1,
    "attn_pdrop": 0.1,
    "layer_norm_epsilon": 1e-05,
    "initializer_range": 0.02
}