File size: 2,830 Bytes
5c80d11 7c7a18d 5c80d11 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 |
{
"architectures": [
"LlamaForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_darwinlm.DarwinLMConfig",
"AutoModelForCausalLM": "modeling_darwinlm.LlamaForCausalLM"
},
"bos_token_id": 1,
"dim_each_mlp": {
"0.mlp.down_proj": 3104,
"1.mlp.down_proj": 8032,
"10.mlp.down_proj": 1824,
"11.mlp.down_proj": 0,
"12.mlp.down_proj": 3104,
"13.mlp.down_proj": 5280,
"14.mlp.down_proj": 5280,
"15.mlp.down_proj": 4256,
"16.mlp.down_proj": 4256,
"17.mlp.down_proj": 6496,
"18.mlp.down_proj": 6496,
"19.mlp.down_proj": 5280,
"2.mlp.down_proj": 6496,
"20.mlp.down_proj": 3104,
"21.mlp.down_proj": 4256,
"22.mlp.down_proj": 4256,
"23.mlp.down_proj": 3104,
"24.mlp.down_proj": 3104,
"25.mlp.down_proj": 3104,
"26.mlp.down_proj": 4256,
"27.mlp.down_proj": 3104,
"28.mlp.down_proj": 3104,
"29.mlp.down_proj": 3104,
"3.mlp.down_proj": 4256,
"30.mlp.down_proj": 6496,
"31.mlp.down_proj": 6496,
"4.mlp.down_proj": 5280,
"5.mlp.down_proj": 5280,
"6.mlp.down_proj": 4256,
"7.mlp.down_proj": 3104,
"8.mlp.down_proj": 5280,
"9.mlp.down_proj": 4256
},
"eos_token_id": 2,
"head_dim": 128,
"heads_each_attn": {
"0.self_attn.o_proj": 7,
"1.self_attn.o_proj": 11,
"10.self_attn.o_proj": 14,
"11.self_attn.o_proj": 32,
"12.self_attn.o_proj": 32,
"13.self_attn.o_proj": 4,
"14.self_attn.o_proj": 11,
"15.self_attn.o_proj": 22,
"16.self_attn.o_proj": 14,
"17.self_attn.o_proj": 18,
"18.self_attn.o_proj": 22,
"19.self_attn.o_proj": 14,
"2.self_attn.o_proj": 14,
"20.self_attn.o_proj": 22,
"21.self_attn.o_proj": 22,
"22.self_attn.o_proj": 22,
"23.self_attn.o_proj": 18,
"24.self_attn.o_proj": 32,
"25.self_attn.o_proj": 32,
"26.self_attn.o_proj": 18,
"27.self_attn.o_proj": 31,
"28.self_attn.o_proj": 28,
"29.self_attn.o_proj": 32,
"3.self_attn.o_proj": 14,
"30.self_attn.o_proj": 26,
"31.self_attn.o_proj": 32,
"4.self_attn.o_proj": 18,
"5.self_attn.o_proj": 22,
"6.self_attn.o_proj": 18,
"7.self_attn.o_proj": 22,
"8.self_attn.o_proj": 14,
"9.self_attn.o_proj": 18
},
"hidden_act": "silu",
"hidden_size": 4096,
"initializer_range": 0.02,
"intermediate_size": 11008,
"kv_ignore": false,
"max_position_embeddings": 2048,
"mlp_bias": false,
"model_type": "darwinlm",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"pretraining_tp": 1,
"rms_norm_eps": 1e-06,
"rope_scaling": null,
"rope_theta": 10000.0,
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.45.0.dev0",
"use_cache": true,
"vocab_size": 32000
}
|