Granther commited on
Commit
2993466
·
verified ·
1 Parent(s): 9ad6f78

Upload Phi3ForCausalLM

Browse files
Files changed (4) hide show
  1. README.md +2 -2
  2. config.json +164 -0
  3. generation_config.json +11 -0
  4. model.safetensors +3 -0
README.md CHANGED
@@ -1,8 +1,8 @@
1
  ---
2
  language:
3
  - en
 
4
  pipeline_tag: text-generation
5
  tags:
6
  - gptq
7
- license: mit
8
- ---
 
1
  ---
2
  language:
3
  - en
4
+ license: mit
5
  pipeline_tag: text-generation
6
  tags:
7
  - gptq
8
+ ---
 
config.json ADDED
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "phi3_quantized_4bit",
3
+ "architectures": [
4
+ "Phi3ForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "auto_map": {
9
+ "AutoConfig": "microsoft/Phi-3-mini-128k-instruct--configuration_phi3.Phi3Config",
10
+ "AutoModelForCausalLM": "microsoft/Phi-3-mini-128k-instruct--modeling_phi3.Phi3ForCausalLM",
11
+ "AutoModelForSequenceClassification": "microsoft/Phi-3-mini-128k-instruct--modeling_phi3.Phi3ForSequenceClassification",
12
+ "AutoModelForTokenClassification": "microsoft/Phi-3-mini-128k-instruct--modeling_phi3.Phi3ForTokenClassification"
13
+ },
14
+ "bos_token_id": 1,
15
+ "embd_pdrop": 0.0,
16
+ "eos_token_id": 32000,
17
+ "hidden_act": "silu",
18
+ "hidden_size": 3072,
19
+ "initializer_range": 0.02,
20
+ "intermediate_size": 8192,
21
+ "max_position_embeddings": 131072,
22
+ "model_type": "phi3",
23
+ "num_attention_heads": 32,
24
+ "num_hidden_layers": 32,
25
+ "num_key_value_heads": 32,
26
+ "original_max_position_embeddings": 4096,
27
+ "pad_token_id": 32000,
28
+ "quantization_config": {
29
+ "batch_size": 1,
30
+ "bits": 4,
31
+ "block_name_to_quantize": null,
32
+ "cache_block_outputs": true,
33
+ "damp_percent": 0.1,
34
+ "dataset": "c4",
35
+ "desc_act": false,
36
+ "exllama_config": {
37
+ "version": 1
38
+ },
39
+ "group_size": 128,
40
+ "max_input_length": null,
41
+ "model_seqlen": null,
42
+ "module_name_preceding_first_block": null,
43
+ "modules_in_block_to_quantize": null,
44
+ "pad_token_id": null,
45
+ "quant_method": "gptq",
46
+ "sym": true,
47
+ "tokenizer": null,
48
+ "true_sequential": true,
49
+ "use_cuda_fp16": false,
50
+ "use_exllama": true
51
+ },
52
+ "resid_pdrop": 0.0,
53
+ "rms_norm_eps": 1e-05,
54
+ "rope_scaling": {
55
+ "long_factor": [
56
+ 1.0299999713897705,
57
+ 1.0499999523162842,
58
+ 1.0499999523162842,
59
+ 1.0799999237060547,
60
+ 1.2299998998641968,
61
+ 1.2299998998641968,
62
+ 1.2999999523162842,
63
+ 1.4499999284744263,
64
+ 1.5999999046325684,
65
+ 1.6499998569488525,
66
+ 1.8999998569488525,
67
+ 2.859999895095825,
68
+ 3.68999981880188,
69
+ 5.419999599456787,
70
+ 5.489999771118164,
71
+ 5.489999771118164,
72
+ 9.09000015258789,
73
+ 11.579999923706055,
74
+ 15.65999984741211,
75
+ 15.769999504089355,
76
+ 15.789999961853027,
77
+ 18.360000610351562,
78
+ 21.989999771118164,
79
+ 23.079999923706055,
80
+ 30.009998321533203,
81
+ 32.35000228881836,
82
+ 32.590003967285156,
83
+ 35.56000518798828,
84
+ 39.95000457763672,
85
+ 53.840003967285156,
86
+ 56.20000457763672,
87
+ 57.95000457763672,
88
+ 59.29000473022461,
89
+ 59.77000427246094,
90
+ 59.920005798339844,
91
+ 61.190006256103516,
92
+ 61.96000671386719,
93
+ 62.50000762939453,
94
+ 63.3700065612793,
95
+ 63.48000717163086,
96
+ 63.48000717163086,
97
+ 63.66000747680664,
98
+ 63.850006103515625,
99
+ 64.08000946044922,
100
+ 64.760009765625,
101
+ 64.80001068115234,
102
+ 64.81001281738281,
103
+ 64.81001281738281
104
+ ],
105
+ "short_factor": [
106
+ 1.05,
107
+ 1.05,
108
+ 1.05,
109
+ 1.1,
110
+ 1.1,
111
+ 1.1500000000000001,
112
+ 1.2000000000000002,
113
+ 1.2500000000000002,
114
+ 1.3000000000000003,
115
+ 1.3500000000000003,
116
+ 1.5000000000000004,
117
+ 2.000000000000001,
118
+ 2.000000000000001,
119
+ 2.000000000000001,
120
+ 2.000000000000001,
121
+ 2.000000000000001,
122
+ 2.000000000000001,
123
+ 2.000000000000001,
124
+ 2.000000000000001,
125
+ 2.000000000000001,
126
+ 2.000000000000001,
127
+ 2.000000000000001,
128
+ 2.000000000000001,
129
+ 2.000000000000001,
130
+ 2.000000000000001,
131
+ 2.000000000000001,
132
+ 2.000000000000001,
133
+ 2.000000000000001,
134
+ 2.000000000000001,
135
+ 2.000000000000001,
136
+ 2.000000000000001,
137
+ 2.000000000000001,
138
+ 2.0500000000000007,
139
+ 2.0500000000000007,
140
+ 2.0500000000000007,
141
+ 2.1000000000000005,
142
+ 2.1000000000000005,
143
+ 2.1000000000000005,
144
+ 2.1500000000000004,
145
+ 2.1500000000000004,
146
+ 2.3499999999999996,
147
+ 2.549999999999999,
148
+ 2.5999999999999988,
149
+ 2.5999999999999988,
150
+ 2.7499999999999982,
151
+ 2.849999999999998,
152
+ 2.849999999999998,
153
+ 2.9499999999999975
154
+ ],
155
+ "type": "su"
156
+ },
157
+ "rope_theta": 10000.0,
158
+ "sliding_window": 262144,
159
+ "tie_word_embeddings": false,
160
+ "torch_dtype": "float16",
161
+ "transformers_version": "4.43.0.dev0",
162
+ "use_cache": true,
163
+ "vocab_size": 32064
164
+ }
generation_config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": [
5
+ 32000,
6
+ 32001,
7
+ 32007
8
+ ],
9
+ "pad_token_id": 32000,
10
+ "transformers_version": "4.43.0.dev0"
11
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e0c7fafbd53869f620036f74e7dcfc732f8b86766199994e900b574771e7c569
3
+ size 2279413824