custom-llm / config.json
Shriti09's picture
Create config.json
5fe3515 verified
raw
history blame contribute delete
335 Bytes
{
"vocab_size": 49152,
"hidden_size": 576,
"intermediate_size": 1536,
"num_hidden_layers": 30,
"num_attention_heads": 9,
"num_key_value_heads": 3,
"max_position_embeddings": 2048,
"rms_norm_eps": 1e-5,
"rope_theta": 10000.0,
"pad_token_id": 0,
"bos_token_id": 0,
"eos_token_id": 0,
"model_type": "causal_lm"
}