MohamedAhmedAE commited on
Commit
eb43460
·
verified ·
1 Parent(s): ebab132

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +4 -4
config.json CHANGED
@@ -10,16 +10,16 @@
10
  128008,
11
  128009
12
  ],
13
- "head_dim": 64,
14
  "hidden_act": "silu",
15
- "hidden_size": 2048,
16
  "initializer_range": 0.02,
17
  "intermediate_size": 8192,
18
  "max_position_embeddings": 131072,
19
  "mlp_bias": false,
20
  "model_type": "llama",
21
- "num_attention_heads": 32,
22
- "num_hidden_layers": 16,
23
  "num_key_value_heads": 8,
24
  "pretraining_tp": 1,
25
  "rms_norm_eps": 1e-05,
 
10
  128008,
11
  128009
12
  ],
13
+ "head_dim": 128,
14
  "hidden_act": "silu",
15
+ "hidden_size": 3072,
16
  "initializer_range": 0.02,
17
  "intermediate_size": 8192,
18
  "max_position_embeddings": 131072,
19
  "mlp_bias": false,
20
  "model_type": "llama",
21
+ "num_attention_heads": 24,
22
+ "num_hidden_layers": 28,
23
  "num_key_value_heads": 8,
24
  "pretraining_tp": 1,
25
  "rms_norm_eps": 1e-05,