Bingsu commited on
Commit
f9eb59b
Β·
verified Β·
1 Parent(s): bfbd307

Add files using upload-large-folder tool

Browse files
.gitattributes CHANGED
@@ -33,3 +33,17 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ Midm-2.0-Mini-Instruct-Q4_K_M.gguf filter=lfs diff=lfs merge=lfs -text
37
+ Midm-2.0-Mini-Instruct-Q3_K_M.gguf filter=lfs diff=lfs merge=lfs -text
38
+ Midm-2.0-Mini-Instruct-Q3_K_S.gguf filter=lfs diff=lfs merge=lfs -text
39
+ Midm-2.0-Mini-Instruct-Q3_K_L.gguf filter=lfs diff=lfs merge=lfs -text
40
+ Midm-2.0-Mini-Instruct-Q8_0.gguf filter=lfs diff=lfs merge=lfs -text
41
+ Midm-2.0-Mini-Instruct-Q2_K.gguf filter=lfs diff=lfs merge=lfs -text
42
+ Midm-2.0-Mini-Instruct-Q6_K.gguf filter=lfs diff=lfs merge=lfs -text
43
+ Midm-2.0-Mini-Instruct-Q5_0.gguf filter=lfs diff=lfs merge=lfs -text
44
+ Midm-2.0-Mini-Instruct-BF16.gguf filter=lfs diff=lfs merge=lfs -text
45
+ Midm-2.0-Mini-Instruct-Q4_0.gguf filter=lfs diff=lfs merge=lfs -text
46
+ Midm-2.0-Mini-Instruct-Q4_K_S.gguf filter=lfs diff=lfs merge=lfs -text
47
+ Midm-2.0-Mini-Instruct-Q5_K_S.gguf filter=lfs diff=lfs merge=lfs -text
48
+ Midm-2.0-Mini-Instruct-Q5_K_M.gguf filter=lfs diff=lfs merge=lfs -text
49
+ Midm-2.0-Mini-Instruct-F32.gguf filter=lfs diff=lfs merge=lfs -text
LICENSE.txt ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2025 KT Corporation
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
Midm-2.0-Mini-Instruct-BF16.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ca72406eb3f408b1af3ced70f78594dc200dfeaf9a07cab59bf60b220638d0c
3
+ size 4617053184
Midm-2.0-Mini-Instruct-F32.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fb9bf81231ea9e0be40304befd2336134708990845283c055fac75642fb4d75f
3
+ size 9227740160
Midm-2.0-Mini-Instruct-Q2_K.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d028bf4732347b0271d838f73d19d607853fa9a1dd66e49bb59564d2b340e08
3
+ size 975376768
Midm-2.0-Mini-Instruct-Q3_K_L.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a5de5eaa599e5072f8dc99d9c4a1d2c0ea1c56800a982a367eaec642ae097537
3
+ size 1304646016
Midm-2.0-Mini-Instruct-Q3_K_M.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96b1c928ba18712405dd0cc148b35e9b0f2bb64fea5b398e5ba31a9b0fda1042
3
+ size 1203605888
Midm-2.0-Mini-Instruct-Q3_K_S.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1da4a8effb6b7acea5d6fe64b24821bdc152469b261ece68c415fc5da018b4f4
3
+ size 1088917888
Midm-2.0-Mini-Instruct-Q4_0.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f06814adff4847c5b46df480b03e3b896a2052918cd3cf1dc83282cd68bada1a
3
+ size 1363825024
Midm-2.0-Mini-Instruct-Q4_K_M.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a945a20ceae202ab7a96d461fa9c3710e62deaffd6654c2106ab5e813fd8225
3
+ size 1426272640
Midm-2.0-Mini-Instruct-Q4_K_S.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f3f147f6538eed02f090334a708b394a6361269d1e033949731c85385126dff
3
+ size 1370935680
Midm-2.0-Mini-Instruct-Q5_0.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:525792fb9cc7f4db362eaa9bdc3282bc7fe31f27f22f54630e13d7f9ff4c76e3
3
+ size 1622561152
Midm-2.0-Mini-Instruct-Q5_K_M.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f53ac2a8ef50b8626737919361b417809fd4e334703745d5079d62a64da0a9a8
3
+ size 1654731136
Midm-2.0-Mini-Instruct-Q5_K_S.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:15153213f84c01aa7f402f5efac79dbf1bba819d6f1ce978871c966c9b680287
3
+ size 1622561152
Midm-2.0-Mini-Instruct-Q6_K.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6e86b6e1bc8464c07f41217cfb4177f22172668f2f75374d2cbec6b9863b365
3
+ size 1897468288
Midm-2.0-Mini-Instruct-Q8_0.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00774ed1a314af5dcc034dfbfea88c9d0e941236c5282b5a1731b0793bf64f88
3
+ size 2455793664
README.md ADDED
@@ -0,0 +1,572 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: mit
3
+ language:
4
+ - en
5
+ - ko
6
+ tags:
7
+ - KT
8
+ - K-intelligence
9
+ - Mi:dm
10
+ pipeline_tag: text-generation
11
+ library_name: transformers
12
+ ---
13
+
14
+ <p align="center">
15
+ <br>
16
+ <span style="font-size: 60px; font-weight: bold;">Mi:dm 2.0 Mini</span>
17
+ </br>
18
+ </p>
19
+ <p align="center">
20
+ πŸ€— <a href="https://huggingface.co/collections/K-intelligence/mi-dm-20-6866406c301e5f45a6926af8">Mi:dm 2.0 Models</a> |
21
+ πŸ“œ <a href="https://github.com/K-intelligence-Midm/Midm-2.0/blob/main/Mi_dm-2_0_technical_report.pdf">Mi:dm 2.0 Technical Report</a> |
22
+ πŸ“• Mi:dm 2.0 Technical Blog*
23
+ </p>
24
+
25
+ <p align="center"><sub>*To be released soon</sub></p>
26
+
27
+ <br>
28
+
29
+ ## News πŸ“’
30
+
31
+ - πŸ”œ _(Coming Soon!) GGUF format model files will be available soon for easier local deployment._
32
+ - ⚑️`2025/07/04`: Released Mi:dm 2.0 Model collection on Hugging FaceπŸ€—.
33
+ <br>
34
+ <br>
35
+ # Table of Contents
36
+
37
+ - ___Overview___
38
+ - [Mi:dm 2.0](#midm-20)
39
+ - [Quickstart](#quickstart)
40
+ - [Evaluation](#evaluation)
41
+ - ___Usage___
42
+ - [Run on Friendly.AI](#run-on-friendliai)
43
+ - [Run on Your Local Machine](#run-on-your-local-machine)
44
+ - [Deployment](#deployment)
45
+ - [Tutorials](#tutorials)
46
+ - ___More Information___
47
+ - [Limitation](#limitation)
48
+ - [License](#license)
49
+ - [Contact](#contact)
50
+
51
+ <br>
52
+ <br>
53
+
54
+ # Overview
55
+
56
+ ### Mi:dm 2.0
57
+
58
+ **Mi:dm 2.0** is a __"Korea-centric AI"__ model developed using KT's proprietary technology. The term __"Korea-centric AI"__ refers to a model that deeply internalizes the unique values, cognitive frameworks, and commonsense reasoning inherent to Korean society. It goes beyond simply processing or generating Korean textβ€”it reflects a deeper understanding of the socio-cultural norms and values that define Korean society.
59
+
60
+ Mi:dm 2.0 is released in two versions:
61
+
62
+ - **Mi:dm 2.0 Base**
63
+ An 11.5B parameter dense model designed to balance model size and performance.
64
+ It extends an 8B-scale model by applying the Depth-up Scaling (DuS) method, making it suitable for real-world applications that require both performance and versatility.
65
+
66
+ - **Mi:dm 2.0 Mini**
67
+ A lightweight 2.3B parameter dense model optimized for on-device environments and systems with limited GPU resources.
68
+ It was derived from the Base model through pruning and distillation to enable compact deployment.
69
+
70
+
71
+ > [!Note]
72
+ > Neither the pre-training nor the post-training data includes KT users' data.
73
+
74
+
75
+ <br>
76
+
77
+ ### Quickstart
78
+
79
+ Here is the code snippet to run conversational inference with the model:
80
+
81
+ ```python
82
+ import torch
83
+ from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig
84
+
85
+ model_name = "K-intelligence/Midm-2.0-Mini-Instruct"
86
+
87
+ model = AutoModelForCausalLM.from_pretrained(
88
+ model_name,
89
+ torch_dtype=torch.bfloat16,
90
+ trust_remote_code=True,
91
+ device_map="auto"
92
+ )
93
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
94
+ generation_config = GenerationConfig.from_pretrained(model_name)
95
+
96
+ prompt = "KT에 λŒ€ν•΄ μ†Œκ°œν•΄μ€˜"
97
+
98
+ # message for inference
99
+ messages = [
100
+ {"role": "system",
101
+ "content": "Mi:dm(λ―Ώ:음)은 KTμ—μ„œ κ°œλ°œν•œ AI 기반 μ–΄μ‹œμŠ€ν„΄νŠΈμ΄λ‹€."},
102
+ {"role": "user", "content": prompt}
103
+ ]
104
+
105
+ input_ids = tokenizer.apply_chat_template(
106
+ messages,
107
+ tokenize=True,
108
+ add_generation_prompt=True,
109
+ return_tensors="pt"
110
+ )
111
+
112
+ output = model.generate(
113
+ input_ids.to("cuda"),
114
+ generation_config=generation_config,
115
+ eos_token_id=tokenizer.eos_token_id,
116
+ max_new_tokens=128,
117
+ do_sample=False,
118
+ )
119
+ print(tokenizer.decode(output[0]))
120
+ ```
121
+
122
+ > [!NOTE]
123
+ > The `transformers` library should be version `4.45.0` or higher.
124
+
125
+ <br>
126
+ <br>
127
+
128
+ # Evaluation
129
+
130
+
131
+ #### Korean
132
+
133
+ <!-- first half table-->
134
+ <table>
135
+ <tr>
136
+ <th rowspan="2">Model</th>
137
+ <th colspan="5" align="center">Society & Culture</th>
138
+ <th colspan="3" align="center">General Knowledge</th>
139
+ <th colspan="3" align="center">Instruction Following</th>
140
+ </tr>
141
+ <tr>
142
+ <th align="center">K-Refer<sup>*</sup></th>
143
+ <th align="center">K-Refer-Hard<sup>*</sup></th>
144
+ <th align="center">Ko-Sovereign<sup>*</sup></th>
145
+ <th align="center">HAERAE</th>
146
+ <th align="center">Avg.</th>
147
+ <th align="center">KMMLU</th>
148
+ <th align="center">Ko-Sovereign<sup>*</sup></th>
149
+ <th align="center">Avg.</th>
150
+ <th align="center">Ko-IFEval</th>
151
+ <th align="center">Ko-MTBench</th>
152
+ <th align="center">Avg.</th>
153
+ </tr>
154
+
155
+ <!-- Small Models -->
156
+ <tr>
157
+ <td><strong>Qwen3-4B</strong></td>
158
+ <td align="center">53.6</td>
159
+ <td align="center">42.9</td>
160
+ <td align="center">35.8</td>
161
+ <td align="center">50.6</td>
162
+ <td align="center">45.7</td>
163
+ <td align="center"><strong>50.6</strong></td>
164
+ <td align="center"><strong>42.5</strong></td>
165
+ <td align="center"><strong>46.5</strong></td>
166
+ <td align="center"><strong>75.9</strong></td>
167
+ <td align="center">63.0</td>
168
+ <td align="center">69.4</td>
169
+ </tr>
170
+ <tr>
171
+ <td><strong>Exaone-3.5-2.4B-inst</strong></td>
172
+ <td align="center">64.0</td>
173
+ <td align="center"><strong>67.1</strong></td>
174
+ <td align="center"><strong>44.4</strong></td>
175
+ <td align="center">61.3</td>
176
+ <td align="center"><strong>59.2</strong></td>
177
+ <td align="center">43.5</td>
178
+ <td align="center">42.4</td>
179
+ <td align="center">43.0</td>
180
+ <td align="center">65.4</td>
181
+ <td align="center"><strong>74.0</strong></td>
182
+ <td align="center">68.9</td>
183
+ </tr>
184
+ <tr>
185
+ <td><strong>Mi:dm 2.0-Mini-inst</strong></td>
186
+ <td align="center"><strong>66.4</strong></td>
187
+ <td align="center">61.4</td>
188
+ <td align="center">36.7</td>
189
+ <td align="center"><strong>70.8</strong></td>
190
+ <td align="center">58.8</td>
191
+ <td align="center">45.1</td>
192
+ <td align="center">42.4</td>
193
+ <td align="center">43.8</td>
194
+ <td align="center">73.3</td>
195
+ <td align="center"><strong>74.0</strong></td>
196
+ <td align="center"><strong>73.6</strong></td>
197
+ </tr>
198
+
199
+ <!-- Spacer row -->
200
+ <tr><td colspan="13"> </td></tr>
201
+
202
+ <!-- Large Models -->
203
+ <tr>
204
+ <td><strong>Qwen3-14B</strong></td>
205
+ <td align="center">72.4</td>
206
+ <td align="center">65.7</td>
207
+ <td align="center">49.8</td>
208
+ <td align="center">68.4</td>
209
+ <td align="center">64.1</td>
210
+ <td align="center">55.4</td>
211
+ <td align="center">54.7</td>
212
+ <td align="center">55.1</td>
213
+ <td align="center"><strong>83.6</strong></td>
214
+ <td align="center">71</td>
215
+ <td align="center">77.3</td>
216
+ </tr>
217
+ <tr>
218
+ <td><strong>Llama-3.1-8B-inst</strong></td>
219
+ <td align="center">43.2</td>
220
+ <td align="center">36.4</td>
221
+ <td align="center">33.8</td>
222
+ <td align="center">49.5</td>
223
+ <td align="center">40.7</td>
224
+ <td align="center">33.0</td>
225
+ <td align="center">36.7</td>
226
+ <td align="center">34.8</td>
227
+ <td align="center">60.1</td>
228
+ <td align="center">57</td>
229
+ <td align="center">58.5</td>
230
+ </tr>
231
+ <tr>
232
+ <td><strong>Exaone-3.5-7.8B-inst</strong></td>
233
+ <td align="center">71.6</td>
234
+ <td align="center">69.3</td>
235
+ <td align="center">46.9</td>
236
+ <td align="center">72.9</td>
237
+ <td align="center">65.2</td>
238
+ <td align="center">52.6</td>
239
+ <td align="center">45.6</td>
240
+ <td align="center">49.1</td>
241
+ <td align="center">69.1</td>
242
+ <td align="center">79.6</td>
243
+ <td align="center">74.4</td>
244
+ </tr>
245
+ <tr>
246
+ <td><strong>Mi:dm 2.0-Base-inst</strong></td>
247
+ <td align="center"><strong>89.6</strong></td>
248
+ <td align="center"><strong>86.4</strong></td>
249
+ <td align="center"><strong>56.3</strong></td>
250
+ <td align="center"><strong>81.5</strong></td>
251
+ <td align="center"><strong>78.4</strong></td>
252
+ <td align="center"><strong>57.3</strong></td>
253
+ <td align="center"><strong>58.0</strong></td>
254
+ <td align="center"><strong>57.7</strong></td>
255
+ <td align="center">82</td>
256
+ <td align="center"><strong>89.7</strong></td>
257
+ <td align="center"><strong>85.9</strong></td>
258
+ </tr>
259
+ </table>
260
+
261
+ <!-- second half table-->
262
+ <table>
263
+ <tr>
264
+ <th rowspan="2" align="center">Model</th>
265
+ <th colspan="5" align="center">Comprehension</th>
266
+ <th colspan="5" align="center">Reasoning</th>
267
+ </tr>
268
+ <tr>
269
+ <th align="center">K-Prag<sup>*</sup></th>
270
+ <th align="center">K-Refer-Hard<sup>*</sup></th>
271
+ <th align="center">Ko-Best</th>
272
+ <th align="center">Ko-Sovereign<sup>*</sup></th>
273
+ <th align="center">Avg.</th>
274
+ <th align="center">Ko-Winogrande</th>
275
+ <th align="center">Ko-Best</th>
276
+ <th align="center">LogicKor</th>
277
+ <th align="center">HRM8K</th>
278
+ <th align="center">Avg.</th>
279
+ </tr>
280
+
281
+ <!-- Small Models -->
282
+ <tr>
283
+ <td><strong>Qwen3-4B</strong></td>
284
+ <td align="center"><strong>73.9<strong></td>
285
+ <td align="center">56.7</td>
286
+ <td align="center"><strong>91.5</strong></td>
287
+ <td align="center"><strong>43.5</strong></td>
288
+ <td align="center"><strong>66.6</strong></td>
289
+ <td align="center"><strong>67.5</strong></td>
290
+ <td align="center"><strong>69.2</strong></td>
291
+ <td align="center">5.6</td>
292
+ <td align="center"><strong>56.7</strong></td>
293
+ <td align="center"><strong>43.8</strong></td>
294
+ </tr>
295
+ <tr>
296
+ <td><strong>Exaone-3.5-2.4B-inst</strong></td>
297
+ <td align="center">68.7</td>
298
+ <td align="center"><strong>58.5</strong></td>
299
+ <td align="center">87.2</td>
300
+ <td align="center">38.0</td>
301
+ <td align="center">62.5</td>
302
+ <td align="center">60.3</td>
303
+ <td align="center">64.1</td>
304
+ <td align="center">7.4</td>
305
+ <td align="center">38.5</td>
306
+ <td align="center">36.7</td>
307
+ </tr>
308
+ <tr>
309
+ <td><strong>Mi:dm 2.0-Mini-inst</strong></td>
310
+ <td align="center">69.5</td>
311
+ <td align="center">55.4</td>
312
+ <td align="center">80.5</td>
313
+ <td align="center">42.5</td>
314
+ <td align="center">61.9</td>
315
+ <td align="center">61.7</td>
316
+ <td align="center">64.5</td>
317
+ <td align="center"><strong>7.7</strong></td>
318
+ <td align="center">39.9</td>
319
+ <td align="center">37.4</td>
320
+ </tr>
321
+
322
+ <!-- Visual Spacer -->
323
+ <tr><td colspan="11"> </td></tr>
324
+
325
+ <!-- Large Models -->
326
+ <tr>
327
+ <td><strong>Qwen3-14B</strong></td>
328
+ <td align="center"><strong>86.7</strong></td>
329
+ <td align="center"><strong>74.0</strong></td>
330
+ <td align="center">93.9</td>
331
+ <td align="center">52.0</td>
332
+ <td align="center"><strong>76.8</strong></td>
333
+ <td align="center"><strong>77.2</strong></td>
334
+ <td align="center"><strong>75.4</strong></td>
335
+ <td align="center">6.4</td>
336
+ <td align="center"><strong>64.5</strong></td>
337
+ <td align="center"><strong>48.8</strong></td>
338
+ </tr>
339
+ <tr>
340
+ <td><strong>Llama-3.1-8B-inst</strong></td>
341
+ <td align="center">59.9</td>
342
+ <td align="center">48.6</td>
343
+ <td align="center">77.4</td>
344
+ <td align="center">31.5</td>
345
+ <td align="center">51.5</td>
346
+ <td align="center">40.1</td>
347
+ <td align="center">26.0</td>
348
+ <td align="center">2.4</td>
349
+ <td align="center">30.9</td>
350
+ <td align="center">19.8</td>
351
+ </tr>
352
+ <tr>
353
+ <td><strong>Exaone-3.5-7.8B-inst</strong></td>
354
+ <td align="center">73.5</td>
355
+ <td align="center">61.9</td>
356
+ <td align="center">92.0</td>
357
+ <td align="center">44.0</td>
358
+ <td align="center">67.2</td>
359
+ <td align="center">64.6</td>
360
+ <td align="center">60.3</td>
361
+ <td align="center"><strong>8.6</strong></td>
362
+ <td align="center">49.7</td>
363
+ <td align="center">39.5</td>
364
+ </tr>
365
+ <tr>
366
+ <td><strong>Mi:dm 2.0-Base-inst</strong></td>
367
+ <td align="center">86.5</td>
368
+ <td align="center">70.8</td>
369
+ <td align="center"><strong>95.2</strong></td>
370
+ <td align="center"><strong>53.0</strong></td>
371
+ <td align="center">76.1</td>
372
+ <td align="center">75.1</td>
373
+ <td align="center">73.0</td>
374
+ <td align="center"><strong>8.6</strong></td>
375
+ <td align="center">52.9</td>
376
+ <td align="center">44.8</td>
377
+ </tr>
378
+ </table>
379
+
380
+ `*` indicates KT proprietary evaluation resources.
381
+
382
+ <br>
383
+
384
+
385
+ #### English
386
+
387
+
388
+ <table>
389
+ <tr>
390
+ <th rowspan="2" align="center">Model</th>
391
+ <th align="center">Instruction</th>
392
+ <th colspan="4" align="center">Reasoning</th>
393
+ <th align="center">Math</th>
394
+ <th align="center">Coding</th>
395
+ <th colspan="3" align="center">General Knowledge</th>
396
+ </tr>
397
+ <tr>
398
+ <th align="center">IFEval</th>
399
+ <th align="center">BBH</th>
400
+ <th align="center">GPQA</th>
401
+ <th align="center">MuSR</th>
402
+ <th align="center">Avg.</th>
403
+ <th align="center">GSM8K</th>
404
+ <th align="center">MBPP+</th>
405
+ <th align="center">MMLU-pro</th>
406
+ <th align="center">MMLU</th>
407
+ <th align="center">Avg.</th>
408
+ </tr>
409
+
410
+ <!-- Small Models -->
411
+ <tr>
412
+ <td><strong>Qwen3-4B</strong></td>
413
+ <td align="center">79.7</td>
414
+ <td align="center"><strong>79.0</strong></td>
415
+ <td align="center"><strong>39.8</strong></td>
416
+ <td align="center"><strong>58.5</strong></td>
417
+ <td align="center"><strong>59.1</strong></td>
418
+ <td align="center"><strong>90.4</strong></td>
419
+ <td align="center">62.4</td>
420
+ <td align="center">-</td>
421
+ <td align="center"><strong>73.3</strong></td>
422
+ <td align="center"><strong>73.3</strong></td>
423
+ </tr>
424
+ <tr>
425
+ <td><strong>Exaone-3.5-2.4B-inst</strong></td>
426
+ <td align="center"><strong>81.1</strong></td>
427
+ <td align="center">46.4</td>
428
+ <td align="center">28.1</td>
429
+ <td align="center">49.7</td>
430
+ <td align="center">41.4</td>
431
+ <td align="center">82.5</td>
432
+ <td align="center">59.8</td>
433
+ <td align="center">-</td>
434
+ <td align="center">59.5</td>
435
+ <td align="center">59.5</td>
436
+ </tr>
437
+ <tr>
438
+ <td><strong>Mi:dm 2.0-Mini-inst</strong></td>
439
+ <td align="center">73.6</td>
440
+ <td align="center">44.5</td>
441
+ <td align="center">26.6</td>
442
+ <td align="center">51.7</td>
443
+ <td align="center">40.9</td>
444
+ <td align="center">83.1</td>
445
+ <td align="center"><strong>60.9</strong></td>
446
+ <td align="center">-</td>
447
+ <td align="center">56.5</td>
448
+ <td align="center">56.5</td>
449
+ </tr>
450
+
451
+ <tr><td colspan="11">&nbsp;</td></tr>
452
+
453
+ <!-- Large Models -->
454
+ <tr>
455
+ <td><strong>Qwen3-14B</strong></td>
456
+ <td align="center">83.9</td>
457
+ <td align="center"><strong>83.4</strong></td>
458
+ <td align="center"><strong>49.8</strong></td>
459
+ <td align="center"><strong>57.7</strong></td>
460
+ <td align="center"><strong>63.6</strong></td>
461
+ <td align="center">88.0</td>
462
+ <td align="center">73.4</td>
463
+ <td align="center"><strong>70.5</strong></td>
464
+ <td align="center"><strong>82.7</strong></td>
465
+ <td align="center"><strong>76.6</strong></td>
466
+ </tr>
467
+ <tr>
468
+ <td><strong>Llama-3.1-8B-inst</strong></td>
469
+ <td align="center">79.9</td>
470
+ <td align="center">60.3</td>
471
+ <td align="center">21.6</td>
472
+ <td align="center">50.3</td>
473
+ <td align="center">44.1</td>
474
+ <td align="center">81.2</td>
475
+ <td align="center"><strong>81.8</strong></td>
476
+ <td align="center">47.6</td>
477
+ <td align="center">70.7</td>
478
+ <td align="center">59.2</td>
479
+ </tr>
480
+ <tr>
481
+ <td><strong>Exaone-3.5-7.8B-inst</strong></td>
482
+ <td align="center">83.6</td>
483
+ <td align="center">50.1</td>
484
+ <td align="center">33.1</td>
485
+ <td align="center">51.2</td>
486
+ <td align="center">44.8</td>
487
+ <td align="center">81.1</td>
488
+ <td align="center">79.4</td>
489
+ <td align="center">40.7</td>
490
+ <td align="center">69.0</td>
491
+ <td align="center">54.8</td>
492
+ </tr>
493
+ <tr>
494
+ <td><strong>Mi:dm 2.0-Base-inst</strong></td>
495
+ <td align="center"><strong>84.0</strong></td>
496
+ <td align="center">77.7</td>
497
+ <td align="center">33.5</td>
498
+ <td align="center">51.9</td>
499
+ <td align="center">54.4</td>
500
+ <td align="center"><strong>91.6</strong></td>
501
+ <td align="center">77.5</td>
502
+ <td align="center">53.3</td>
503
+ <td align="center">73.7</td>
504
+ <td align="center">63.5</td>
505
+ </tr>
506
+ </table>
507
+
508
+ <br>
509
+
510
+ # Usage
511
+
512
+ ### Run on Friendli.AI
513
+ You can try our model immediately via `Friendli.AI`. Simply click `Deploy` and then `Friendli Endpoints`.
514
+
515
+ > [!Note]
516
+ > Please note that a login to `Friendli.AI` is required after your fifth chat interaction.
517
+
518
+ <p>
519
+ <img src="./assets/image_1.png" alt="Left Image" width="36%" style="display:inline-block; margin-right:2%">
520
+ <img src="./assets/image_2.png" alt="Right Image" width="36%" style="display:inline-block">
521
+ </p>
522
+
523
+ ### Run on Your Local Machine
524
+ We provide a detailed description about running Mi:dm 2.0 on your local machine using llama.cpp, LM Studio, and Ollama. Please check our [github](https://github.com/K-intelligence-Midm/Midm-2.0) for more information
525
+
526
+
527
+ ### Deployment
528
+
529
+ To serve Mi:dm 2.0 using [vLLM](https://github.com/vllm-project/vllm)(`>=0.8.0`) with an OpenAI-compatible API:
530
+ ```bash
531
+ vllm serve K-intelligence/Midm-2.0-Mini-Instruct
532
+ ```
533
+
534
+
535
+ ### Tutorials
536
+ To help our end-users easily use Mi:dm 2.0, we have provided comprehensive tutorials on [github](https://github.com/K-intelligence-Midm/Midm-2.0).
537
+ <br>
538
+
539
+ <br>
540
+ <br>
541
+
542
+ # More Information
543
+
544
+ ### Limitation
545
+ * The training data for both Mi:dm 2.0 models consists primarily of English and Korean. Understanding and generation in other languages are not guaranteed.
546
+
547
+ * The model is not guaranteed to provide reliable advice in fields that require professional expertise, such as law, medicine, or finance.
548
+
549
+ * Researchers have made efforts to exclude unethical content from the training data β€” such as profanity, slurs, bias, and discriminatory language. However, despite these efforts, the model may still produce inappropriate expressions or factual inaccuracies.
550
+
551
+
552
+ ### License
553
+
554
+ Mi:dm 2.0 is licensed under the [MIT License](./LICENSE).
555
+
556
+ <!-- ### Citation
557
+
558
+ ```
559
+ @misc{,
560
+ title={},
561
+ author={},
562
+ year={2025},
563
+ eprint={},
564
+ archivePrefix={arXiv},
565
+ primaryClass={cs.CL},
566
+ url={},
567
+ }
568
+ ``` -->
569
+ ### Contact
570
+ Mi:dm 2.0 Technical Inquiries: [email protected]
571
+
572
+ <br>