Upload folder using huggingface_hub
Browse files- config.json +62 -0
- model.safetensors +3 -0
- quant_log.csv +113 -0
- quantize_config.json +21 -0
config.json
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_attn_implementation_autoset": true,
|
3 |
+
"_name_or_path": "/home/azureuser/.cache/huggingface/hub/models--meta-llama--Llama-3.2-1B-Instruct/snapshots/9213176726f574b556790deb65791e0c5aa438b6",
|
4 |
+
"architectures": [
|
5 |
+
"LlamaForCausalLM"
|
6 |
+
],
|
7 |
+
"attention_bias": false,
|
8 |
+
"attention_dropout": 0.0,
|
9 |
+
"bos_token_id": 128000,
|
10 |
+
"eos_token_id": [
|
11 |
+
128001,
|
12 |
+
128008,
|
13 |
+
128009
|
14 |
+
],
|
15 |
+
"head_dim": 64,
|
16 |
+
"hidden_act": "silu",
|
17 |
+
"hidden_size": 2048,
|
18 |
+
"initializer_range": 0.02,
|
19 |
+
"intermediate_size": 8192,
|
20 |
+
"max_position_embeddings": 131072,
|
21 |
+
"mlp_bias": false,
|
22 |
+
"model_type": "llama",
|
23 |
+
"num_attention_heads": 32,
|
24 |
+
"num_hidden_layers": 16,
|
25 |
+
"num_key_value_heads": 8,
|
26 |
+
"pretraining_tp": 1,
|
27 |
+
"quantization_config": {
|
28 |
+
"bits": 4,
|
29 |
+
"checkpoint_format": "gptq",
|
30 |
+
"desc_act": true,
|
31 |
+
"dynamic": null,
|
32 |
+
"group_size": 32,
|
33 |
+
"lm_head": false,
|
34 |
+
"meta": {
|
35 |
+
"damp_auto_increment": 0.0025,
|
36 |
+
"damp_percent": 0.01,
|
37 |
+
"mse": 0.0,
|
38 |
+
"quantizer": [
|
39 |
+
"gptqmodel:1.5.1-dev"
|
40 |
+
],
|
41 |
+
"static_groups": false,
|
42 |
+
"true_sequential": true,
|
43 |
+
"uri": "https://github.com/modelcloud/gptqmodel"
|
44 |
+
},
|
45 |
+
"quant_method": "gptq",
|
46 |
+
"sym": true
|
47 |
+
},
|
48 |
+
"rms_norm_eps": 1e-05,
|
49 |
+
"rope_scaling": {
|
50 |
+
"factor": 32.0,
|
51 |
+
"high_freq_factor": 4.0,
|
52 |
+
"low_freq_factor": 1.0,
|
53 |
+
"original_max_position_embeddings": 8192,
|
54 |
+
"rope_type": "llama3"
|
55 |
+
},
|
56 |
+
"rope_theta": 500000.0,
|
57 |
+
"tie_word_embeddings": true,
|
58 |
+
"torch_dtype": "bfloat16",
|
59 |
+
"transformers_version": "4.47.1",
|
60 |
+
"use_cache": true,
|
61 |
+
"vocab_size": 128256
|
62 |
+
}
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:85c070d6fc7afa65c11681d789532f1ea79328ad47b0efd3c880af854999cdab
|
3 |
+
size 1614733472
|
quant_log.csv
ADDED
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
layer,module,loss,damp,time
|
2 |
+
0,self_attn.k_proj,0.03242,0.01000,0.906
|
3 |
+
0,self_attn.v_proj,0.00080,0.01000,0.680
|
4 |
+
0,self_attn.q_proj,0.06659,0.01000,0.697
|
5 |
+
0,self_attn.o_proj,0.00007,0.01000,0.746
|
6 |
+
0,mlp.up_proj,0.05609,0.01000,0.751
|
7 |
+
0,mlp.gate_proj,0.07055,0.01000,0.817
|
8 |
+
0,mlp.down_proj,0.00038,0.01000,3.103
|
9 |
+
1,self_attn.k_proj,0.05143,0.01000,0.746
|
10 |
+
1,self_attn.v_proj,0.00292,0.01000,0.728
|
11 |
+
1,self_attn.q_proj,0.09487,0.01000,0.708
|
12 |
+
1,self_attn.o_proj,0.00013,0.01000,0.733
|
13 |
+
1,mlp.up_proj,0.07948,0.01000,0.788
|
14 |
+
1,mlp.gate_proj,0.10903,0.01000,0.859
|
15 |
+
1,mlp.down_proj,0.28001,0.01000,2.977
|
16 |
+
2,self_attn.k_proj,0.09702,0.01000,0.750
|
17 |
+
2,self_attn.v_proj,0.00672,0.01000,0.677
|
18 |
+
2,self_attn.q_proj,0.18856,0.01000,0.696
|
19 |
+
2,self_attn.o_proj,0.00022,0.01000,0.725
|
20 |
+
2,mlp.up_proj,0.09984,0.01000,0.781
|
21 |
+
2,mlp.gate_proj,0.15930,0.01000,0.803
|
22 |
+
2,mlp.down_proj,0.00119,0.01000,2.998
|
23 |
+
3,self_attn.k_proj,0.06446,0.01000,0.734
|
24 |
+
3,self_attn.v_proj,0.00860,0.01000,0.685
|
25 |
+
3,self_attn.q_proj,0.14038,0.01000,0.688
|
26 |
+
3,self_attn.o_proj,0.00043,0.01000,0.744
|
27 |
+
3,mlp.up_proj,0.12107,0.01000,0.794
|
28 |
+
3,mlp.gate_proj,0.24166,0.01000,0.799
|
29 |
+
3,mlp.down_proj,0.00183,0.01000,3.019
|
30 |
+
4,self_attn.k_proj,0.06840,0.01000,0.755
|
31 |
+
4,self_attn.v_proj,0.00803,0.01000,0.702
|
32 |
+
4,self_attn.q_proj,0.14097,0.01000,0.707
|
33 |
+
4,self_attn.o_proj,0.00079,0.01000,0.731
|
34 |
+
4,mlp.up_proj,0.12585,0.01000,0.765
|
35 |
+
4,mlp.gate_proj,0.27268,0.01000,0.796
|
36 |
+
4,mlp.down_proj,0.00232,0.01000,2.969
|
37 |
+
5,self_attn.k_proj,0.10500,0.01000,0.766
|
38 |
+
5,self_attn.v_proj,0.00724,0.01000,0.684
|
39 |
+
5,self_attn.q_proj,0.18579,0.01000,0.700
|
40 |
+
5,self_attn.o_proj,0.00092,0.01000,0.754
|
41 |
+
5,mlp.up_proj,0.13968,0.01000,0.765
|
42 |
+
5,mlp.gate_proj,0.25845,0.01000,0.796
|
43 |
+
5,mlp.down_proj,0.00277,0.01000,3.046
|
44 |
+
6,self_attn.k_proj,0.08859,0.01000,0.727
|
45 |
+
6,self_attn.v_proj,0.00953,0.01000,0.689
|
46 |
+
6,self_attn.q_proj,0.14065,0.01000,0.699
|
47 |
+
6,self_attn.o_proj,0.00129,0.01000,0.735
|
48 |
+
6,mlp.up_proj,0.14354,0.01000,0.791
|
49 |
+
6,mlp.gate_proj,0.25938,0.01000,0.785
|
50 |
+
6,mlp.down_proj,0.00291,0.01000,3.035
|
51 |
+
7,self_attn.k_proj,0.08833,0.01000,0.741
|
52 |
+
7,self_attn.v_proj,0.01088,0.01000,0.672
|
53 |
+
7,self_attn.q_proj,0.16550,0.01000,0.681
|
54 |
+
7,self_attn.o_proj,0.00129,0.01000,0.746
|
55 |
+
7,mlp.up_proj,0.15128,0.01000,0.779
|
56 |
+
7,mlp.gate_proj,0.24538,0.01000,0.803
|
57 |
+
7,mlp.down_proj,0.00320,0.01000,3.057
|
58 |
+
8,self_attn.k_proj,0.10553,0.01000,0.749
|
59 |
+
8,self_attn.v_proj,0.01056,0.01000,0.678
|
60 |
+
8,self_attn.q_proj,0.17305,0.01000,0.717
|
61 |
+
8,self_attn.o_proj,0.00191,0.01000,0.732
|
62 |
+
8,mlp.up_proj,0.17159,0.01000,0.759
|
63 |
+
8,mlp.gate_proj,0.27065,0.01000,0.804
|
64 |
+
8,mlp.down_proj,0.00440,0.01000,3.075
|
65 |
+
9,self_attn.k_proj,0.08821,0.01000,0.751
|
66 |
+
9,self_attn.v_proj,0.01267,0.01000,0.671
|
67 |
+
9,self_attn.q_proj,0.21381,0.01000,0.702
|
68 |
+
9,self_attn.o_proj,0.00240,0.01000,0.738
|
69 |
+
9,mlp.up_proj,0.18510,0.01000,0.789
|
70 |
+
9,mlp.gate_proj,0.30127,0.01000,0.789
|
71 |
+
9,mlp.down_proj,0.00538,0.01000,3.119
|
72 |
+
10,self_attn.k_proj,0.11026,0.01000,0.753
|
73 |
+
10,self_attn.v_proj,0.01362,0.01000,0.693
|
74 |
+
10,self_attn.q_proj,0.22377,0.01000,0.718
|
75 |
+
10,self_attn.o_proj,0.00176,0.01000,0.748
|
76 |
+
10,mlp.up_proj,0.22299,0.01000,0.774
|
77 |
+
10,mlp.gate_proj,0.35132,0.01000,0.820
|
78 |
+
10,mlp.down_proj,0.00690,0.01000,3.018
|
79 |
+
11,self_attn.k_proj,0.13077,0.01000,0.753
|
80 |
+
11,self_attn.v_proj,0.01364,0.01000,0.718
|
81 |
+
11,self_attn.q_proj,0.21940,0.01000,0.718
|
82 |
+
11,self_attn.o_proj,0.00149,0.01000,0.758
|
83 |
+
11,mlp.up_proj,0.25063,0.01000,0.773
|
84 |
+
11,mlp.gate_proj,0.38616,0.01000,0.799
|
85 |
+
11,mlp.down_proj,0.00782,0.01000,2.983
|
86 |
+
12,self_attn.k_proj,0.12650,0.01000,0.742
|
87 |
+
12,self_attn.v_proj,0.01431,0.01000,0.666
|
88 |
+
12,self_attn.q_proj,0.21842,0.01000,0.733
|
89 |
+
12,self_attn.o_proj,0.00136,0.01000,0.730
|
90 |
+
12,mlp.up_proj,0.27024,0.01000,0.783
|
91 |
+
12,mlp.gate_proj,0.39223,0.01000,0.795
|
92 |
+
12,mlp.down_proj,0.00925,0.01000,2.858
|
93 |
+
13,self_attn.k_proj,0.12914,0.01000,0.687
|
94 |
+
13,self_attn.v_proj,0.02417,0.01000,0.633
|
95 |
+
13,self_attn.q_proj,0.25690,0.01000,0.634
|
96 |
+
13,self_attn.o_proj,0.00204,0.01000,0.717
|
97 |
+
13,mlp.up_proj,0.31966,0.01000,0.770
|
98 |
+
13,mlp.gate_proj,0.42314,0.01000,0.836
|
99 |
+
13,mlp.down_proj,0.01332,0.01000,2.869
|
100 |
+
14,self_attn.k_proj,0.13651,0.01000,0.691
|
101 |
+
14,self_attn.v_proj,0.05146,0.01000,0.625
|
102 |
+
14,self_attn.q_proj,0.26610,0.01000,0.670
|
103 |
+
14,self_attn.o_proj,0.00420,0.01000,0.708
|
104 |
+
14,mlp.up_proj,0.36466,0.01000,0.754
|
105 |
+
14,mlp.gate_proj,0.52044,0.01000,0.870
|
106 |
+
14,mlp.down_proj,0.01716,0.01000,2.821
|
107 |
+
15,self_attn.k_proj,0.12916,0.01000,0.698
|
108 |
+
15,self_attn.v_proj,0.05582,0.01000,0.641
|
109 |
+
15,self_attn.q_proj,0.24866,0.01000,0.646
|
110 |
+
15,self_attn.o_proj,0.01490,0.01000,0.705
|
111 |
+
15,mlp.up_proj,0.43859,0.01000,0.785
|
112 |
+
15,mlp.gate_proj,0.58076,0.01000,0.905
|
113 |
+
15,mlp.down_proj,0.03817,0.01000,2.872
|
quantize_config.json
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bits": 4,
|
3 |
+
"dynamic": null,
|
4 |
+
"group_size": 32,
|
5 |
+
"desc_act": true,
|
6 |
+
"sym": true,
|
7 |
+
"lm_head": false,
|
8 |
+
"quant_method": "gptq",
|
9 |
+
"checkpoint_format": "gptq",
|
10 |
+
"meta": {
|
11 |
+
"quantizer": [
|
12 |
+
"gptqmodel:1.5.1-dev"
|
13 |
+
],
|
14 |
+
"uri": "https://github.com/modelcloud/gptqmodel",
|
15 |
+
"damp_percent": 0.01,
|
16 |
+
"damp_auto_increment": 0.0025,
|
17 |
+
"static_groups": false,
|
18 |
+
"true_sequential": true,
|
19 |
+
"mse": 0.0
|
20 |
+
}
|
21 |
+
}
|