Upload folder using huggingface_hub
Browse files- config.json +62 -0
- model.safetensors +3 -0
- quant_log.csv +113 -0
- quantize_config.json +21 -0
config.json
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_attn_implementation_autoset": true,
|
3 |
+
"_name_or_path": "/home/azureuser/.cache/huggingface/hub/models--meta-llama--Llama-3.2-1B-Instruct/snapshots/9213176726f574b556790deb65791e0c5aa438b6",
|
4 |
+
"architectures": [
|
5 |
+
"LlamaForCausalLM"
|
6 |
+
],
|
7 |
+
"attention_bias": false,
|
8 |
+
"attention_dropout": 0.0,
|
9 |
+
"bos_token_id": 128000,
|
10 |
+
"eos_token_id": [
|
11 |
+
128001,
|
12 |
+
128008,
|
13 |
+
128009
|
14 |
+
],
|
15 |
+
"head_dim": 64,
|
16 |
+
"hidden_act": "silu",
|
17 |
+
"hidden_size": 2048,
|
18 |
+
"initializer_range": 0.02,
|
19 |
+
"intermediate_size": 8192,
|
20 |
+
"max_position_embeddings": 131072,
|
21 |
+
"mlp_bias": false,
|
22 |
+
"model_type": "llama",
|
23 |
+
"num_attention_heads": 32,
|
24 |
+
"num_hidden_layers": 16,
|
25 |
+
"num_key_value_heads": 8,
|
26 |
+
"pretraining_tp": 1,
|
27 |
+
"quantization_config": {
|
28 |
+
"bits": 4,
|
29 |
+
"checkpoint_format": "gptq",
|
30 |
+
"desc_act": false,
|
31 |
+
"dynamic": null,
|
32 |
+
"group_size": 32,
|
33 |
+
"lm_head": false,
|
34 |
+
"meta": {
|
35 |
+
"damp_auto_increment": 0.0025,
|
36 |
+
"damp_percent": 0.01,
|
37 |
+
"mse": 0.0,
|
38 |
+
"quantizer": [
|
39 |
+
"gptqmodel:1.5.1-dev"
|
40 |
+
],
|
41 |
+
"static_groups": false,
|
42 |
+
"true_sequential": true,
|
43 |
+
"uri": "https://github.com/modelcloud/gptqmodel"
|
44 |
+
},
|
45 |
+
"quant_method": "gptq",
|
46 |
+
"sym": true
|
47 |
+
},
|
48 |
+
"rms_norm_eps": 1e-05,
|
49 |
+
"rope_scaling": {
|
50 |
+
"factor": 32.0,
|
51 |
+
"high_freq_factor": 4.0,
|
52 |
+
"low_freq_factor": 1.0,
|
53 |
+
"original_max_position_embeddings": 8192,
|
54 |
+
"rope_type": "llama3"
|
55 |
+
},
|
56 |
+
"rope_theta": 500000.0,
|
57 |
+
"tie_word_embeddings": true,
|
58 |
+
"torch_dtype": "bfloat16",
|
59 |
+
"transformers_version": "4.47.1",
|
60 |
+
"use_cache": true,
|
61 |
+
"vocab_size": 128256
|
62 |
+
}
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5435ed8cef43f02129c506f8b2e1dd08538eca737ad419c3ddfba23b81724499
|
3 |
+
size 1614733472
|
quant_log.csv
ADDED
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
layer,module,loss,damp,time
|
2 |
+
0,self_attn.k_proj,0.09601,0.01000,1.079
|
3 |
+
0,self_attn.v_proj,0.00299,0.01000,0.841
|
4 |
+
0,self_attn.q_proj,0.21113,0.01000,0.727
|
5 |
+
0,self_attn.o_proj,0.00017,0.01000,0.727
|
6 |
+
0,mlp.up_proj,0.14873,0.01000,0.763
|
7 |
+
0,mlp.gate_proj,0.18908,0.01000,0.904
|
8 |
+
0,mlp.down_proj,0.00190,0.01000,3.295
|
9 |
+
1,self_attn.k_proj,0.16585,0.01000,0.711
|
10 |
+
1,self_attn.v_proj,0.00992,0.01000,0.749
|
11 |
+
1,self_attn.q_proj,0.32035,0.01000,0.690
|
12 |
+
1,self_attn.o_proj,0.00039,0.01000,0.753
|
13 |
+
1,mlp.up_proj,0.22062,0.01000,0.756
|
14 |
+
1,mlp.gate_proj,0.30241,0.01000,0.732
|
15 |
+
1,mlp.down_proj,0.16209,0.01000,3.030
|
16 |
+
2,self_attn.k_proj,0.31228,0.01000,0.713
|
17 |
+
2,self_attn.v_proj,0.02535,0.01000,0.677
|
18 |
+
2,self_attn.q_proj,0.63971,0.01000,0.711
|
19 |
+
2,self_attn.o_proj,0.00066,0.01000,0.730
|
20 |
+
2,mlp.up_proj,0.28284,0.01000,0.746
|
21 |
+
2,mlp.gate_proj,0.44825,0.01000,0.692
|
22 |
+
2,mlp.down_proj,0.00335,0.01000,3.236
|
23 |
+
3,self_attn.k_proj,0.19264,0.01000,0.758
|
24 |
+
3,self_attn.v_proj,0.02762,0.01000,0.695
|
25 |
+
3,self_attn.q_proj,0.42406,0.01000,0.738
|
26 |
+
3,self_attn.o_proj,0.00111,0.01000,0.765
|
27 |
+
3,mlp.up_proj,0.33513,0.01000,0.785
|
28 |
+
3,mlp.gate_proj,0.66167,0.01000,0.750
|
29 |
+
3,mlp.down_proj,0.00468,0.01000,3.254
|
30 |
+
4,self_attn.k_proj,0.21151,0.01000,0.774
|
31 |
+
4,self_attn.v_proj,0.02663,0.01000,0.746
|
32 |
+
4,self_attn.q_proj,0.43425,0.01000,0.743
|
33 |
+
4,self_attn.o_proj,0.00185,0.01000,0.801
|
34 |
+
4,mlp.up_proj,0.33790,0.01000,0.835
|
35 |
+
4,mlp.gate_proj,0.72013,0.01000,0.745
|
36 |
+
4,mlp.down_proj,0.00553,0.01000,3.029
|
37 |
+
5,self_attn.k_proj,0.30856,0.01000,0.716
|
38 |
+
5,self_attn.v_proj,0.02561,0.01000,0.681
|
39 |
+
5,self_attn.q_proj,0.57913,0.01000,0.702
|
40 |
+
5,self_attn.o_proj,0.00256,0.01000,0.754
|
41 |
+
5,mlp.up_proj,0.36168,0.01000,0.806
|
42 |
+
5,mlp.gate_proj,0.66184,0.01000,0.734
|
43 |
+
5,mlp.down_proj,0.00675,0.01000,3.046
|
44 |
+
6,self_attn.k_proj,0.25328,0.01000,0.736
|
45 |
+
6,self_attn.v_proj,0.02967,0.01000,0.681
|
46 |
+
6,self_attn.q_proj,0.41055,0.01000,0.721
|
47 |
+
6,self_attn.o_proj,0.00379,0.01000,0.724
|
48 |
+
6,mlp.up_proj,0.37050,0.01000,0.751
|
49 |
+
6,mlp.gate_proj,0.66534,0.01000,0.718
|
50 |
+
6,mlp.down_proj,0.00709,0.01000,3.041
|
51 |
+
7,self_attn.k_proj,0.25026,0.01000,0.748
|
52 |
+
7,self_attn.v_proj,0.03261,0.01000,0.678
|
53 |
+
7,self_attn.q_proj,0.47341,0.01000,0.708
|
54 |
+
7,self_attn.o_proj,0.00349,0.01000,0.727
|
55 |
+
7,mlp.up_proj,0.40085,0.01000,0.776
|
56 |
+
7,mlp.gate_proj,0.65237,0.01000,0.749
|
57 |
+
7,mlp.down_proj,0.00813,0.01000,3.014
|
58 |
+
8,self_attn.k_proj,0.32256,0.01000,0.725
|
59 |
+
8,self_attn.v_proj,0.03293,0.01000,0.690
|
60 |
+
8,self_attn.q_proj,0.53772,0.01000,0.689
|
61 |
+
8,self_attn.o_proj,0.00522,0.01000,0.755
|
62 |
+
8,mlp.up_proj,0.47201,0.01000,0.770
|
63 |
+
8,mlp.gate_proj,0.75260,0.01000,0.757
|
64 |
+
8,mlp.down_proj,0.01079,0.01000,3.152
|
65 |
+
9,self_attn.k_proj,0.24430,0.01000,0.727
|
66 |
+
9,self_attn.v_proj,0.03725,0.01000,0.680
|
67 |
+
9,self_attn.q_proj,0.60562,0.01000,0.724
|
68 |
+
9,self_attn.o_proj,0.00523,0.01000,0.754
|
69 |
+
9,mlp.up_proj,0.49043,0.01000,0.764
|
70 |
+
9,mlp.gate_proj,0.80227,0.01000,0.715
|
71 |
+
9,mlp.down_proj,0.01264,0.01000,3.103
|
72 |
+
10,self_attn.k_proj,0.30944,0.01000,0.742
|
73 |
+
10,self_attn.v_proj,0.04692,0.01000,0.668
|
74 |
+
10,self_attn.q_proj,0.68625,0.01000,0.704
|
75 |
+
10,self_attn.o_proj,0.00472,0.01000,0.736
|
76 |
+
10,mlp.up_proj,0.57742,0.01000,0.771
|
77 |
+
10,mlp.gate_proj,0.90810,0.01000,0.740
|
78 |
+
10,mlp.down_proj,0.01584,0.01000,3.073
|
79 |
+
11,self_attn.k_proj,0.36443,0.01000,0.712
|
80 |
+
11,self_attn.v_proj,0.04592,0.01000,0.695
|
81 |
+
11,self_attn.q_proj,0.66843,0.01000,0.697
|
82 |
+
11,self_attn.o_proj,0.00372,0.01000,0.766
|
83 |
+
11,mlp.up_proj,0.63095,0.01000,0.803
|
84 |
+
11,mlp.gate_proj,0.97327,0.01000,0.726
|
85 |
+
11,mlp.down_proj,0.01862,0.01000,3.065
|
86 |
+
12,self_attn.k_proj,0.33120,0.01000,0.731
|
87 |
+
12,self_attn.v_proj,0.04817,0.01000,0.694
|
88 |
+
12,self_attn.q_proj,0.64928,0.01000,0.743
|
89 |
+
12,self_attn.o_proj,0.00386,0.01000,0.726
|
90 |
+
12,mlp.up_proj,0.66255,0.01000,0.791
|
91 |
+
12,mlp.gate_proj,0.97375,0.01000,0.759
|
92 |
+
12,mlp.down_proj,0.02400,0.01000,3.136
|
93 |
+
13,self_attn.k_proj,0.36034,0.01000,0.763
|
94 |
+
13,self_attn.v_proj,0.08636,0.01000,0.717
|
95 |
+
13,self_attn.q_proj,0.78605,0.01000,0.696
|
96 |
+
13,self_attn.o_proj,0.00579,0.01000,0.764
|
97 |
+
13,mlp.up_proj,0.79367,0.01000,0.797
|
98 |
+
13,mlp.gate_proj,1.07306,0.01000,0.757
|
99 |
+
13,mlp.down_proj,0.03768,0.01000,3.024
|
100 |
+
14,self_attn.k_proj,0.35830,0.01000,0.714
|
101 |
+
14,self_attn.v_proj,0.16440,0.01000,0.697
|
102 |
+
14,self_attn.q_proj,0.75050,0.01000,0.727
|
103 |
+
14,self_attn.o_proj,0.01295,0.01000,0.740
|
104 |
+
14,mlp.up_proj,0.92130,0.01000,0.780
|
105 |
+
14,mlp.gate_proj,1.33264,0.01000,0.727
|
106 |
+
14,mlp.down_proj,0.05907,0.01000,3.084
|
107 |
+
15,self_attn.k_proj,0.35875,0.01000,0.726
|
108 |
+
15,self_attn.v_proj,0.17096,0.01000,0.703
|
109 |
+
15,self_attn.q_proj,0.67191,0.01000,0.680
|
110 |
+
15,self_attn.o_proj,0.03647,0.01000,0.743
|
111 |
+
15,mlp.up_proj,1.12717,0.01000,0.775
|
112 |
+
15,mlp.gate_proj,1.49428,0.01000,0.742
|
113 |
+
15,mlp.down_proj,0.22401,0.01000,2.976
|
quantize_config.json
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bits": 4,
|
3 |
+
"dynamic": null,
|
4 |
+
"group_size": 32,
|
5 |
+
"desc_act": false,
|
6 |
+
"sym": true,
|
7 |
+
"lm_head": false,
|
8 |
+
"quant_method": "gptq",
|
9 |
+
"checkpoint_format": "gptq",
|
10 |
+
"meta": {
|
11 |
+
"quantizer": [
|
12 |
+
"gptqmodel:1.5.1-dev"
|
13 |
+
],
|
14 |
+
"uri": "https://github.com/modelcloud/gptqmodel",
|
15 |
+
"damp_percent": 0.01,
|
16 |
+
"damp_auto_increment": 0.0025,
|
17 |
+
"static_groups": false,
|
18 |
+
"true_sequential": true,
|
19 |
+
"mse": 0.0
|
20 |
+
}
|
21 |
+
}
|