openfree commited on
Commit
b8ba0ff
ยท
verified ยท
1 Parent(s): 5265989

Update app-backup.py

Browse files
Files changed (1) hide show
  1. app-backup.py +47 -74
app-backup.py CHANGED
@@ -8,9 +8,7 @@ import re
8
  import uuid
9
  import pymupdf
10
 
11
- # =======================================================
12
- # magic-pdf & MinerU ์„ค์น˜ ๊ด€๋ จ (๊ธฐ์กด ์ฝ”๋“œ ๊ทธ๋Œ€๋กœ ์˜ˆ์‹œ)
13
- # =======================================================
14
  os.system('pip uninstall -y magic-pdf')
15
  os.system('pip install git+https://github.com/opendatalab/MinerU.git@dev')
16
  os.system('wget https://github.com/opendatalab/MinerU/raw/dev/scripts/download_models_hf.py -O download_models_hf.py')
@@ -29,6 +27,9 @@ with open('/home/user/magic-pdf.json', 'w') as file:
29
 
30
  os.system('cp -r paddleocr /home/user/.paddleocr')
31
 
 
 
 
32
  import gradio as gr
33
  from loguru import logger
34
 
@@ -36,14 +37,8 @@ from magic_pdf.data.data_reader_writer import FileBasedDataReader
36
  from magic_pdf.libs.hash_utils import compute_sha256
37
  from magic_pdf.tools.common import do_parse, prepare_env
38
 
39
-
40
- ###########################################
41
- # 1) UI ์Šคํƒ€์ผ(CSS) + PDF์ฒ˜๋ฆฌ ๊ด€๋ จ ํ•จ์ˆ˜๋“ค
42
- ###########################################
43
  def create_css():
44
- """
45
- ํ™”๋ฉด์„ ๊ฐ€๋“ ์ฑ„์šฐ๊ณ  ์Šคํฌ๋กค ๊ฐ€๋Šฅํ•˜๋„๋ก ์„ค์ •
46
- """
47
  return """
48
  .gradio-container {
49
  width: 100vw !important;
@@ -152,10 +147,7 @@ def to_pdf(file_path):
152
  return tmp_file_path
153
 
154
  def to_markdown(file_path, end_pages, is_ocr, layout_mode, formula_enable, table_enable, language, progress=gr.Progress(track_tqdm=False)):
155
- """
156
- - PDF ๋ณ€ํ™˜ ๊ณผ์ •์—์„œ 'progress(...)' ๋กœ ์ง„ํ–‰๋ฅ  ์—…๋ฐ์ดํŠธ
157
- - Gradio ๋ฒ„์ „์ด ๋‚ฎ์•„๋„ 'with progress:' ๋ฅผ ์“ฐ์ง€ ์•Š์œผ๋ฉด __enter__ ์˜ค๋ฅ˜๊ฐ€ ์•ˆ๋œธ
158
- """
159
  progress(0, "PDF๋กœ ๋ณ€ํ™˜ ์ค‘...")
160
  file_path = to_pdf(file_path)
161
  time.sleep(0.5)
@@ -177,23 +169,19 @@ def to_markdown(file_path, end_pages, is_ocr, layout_mode, formula_enable, table
177
  logger.error("์••์ถ• ์‹คํŒจ")
178
  time.sleep(0.5)
179
 
180
- progress(70, "๋งˆํฌ๋‹ค์šด ๋กœ๋“œ ์ค‘...")
181
  md_path = os.path.join(local_md_dir, file_name + ".md")
182
  with open(md_path, 'r', encoding='utf-8') as f:
183
  txt_content = f.read()
184
  time.sleep(0.5)
185
 
186
- progress(90, "์ด๋ฏธ์ง€(base64) ๋ณ€ํ™˜ ์ค‘...")
187
  md_content = replace_image_with_base64(txt_content, local_md_dir)
188
  time.sleep(0.5)
189
 
190
  progress(100, "๋ณ€ํ™˜ ์™„๋ฃŒ!")
191
  return md_content
192
 
193
-
194
- ###############################
195
- # magic_pdf ๋ชจ๋ธ ์ดˆ๊ธฐํ™”
196
- ###############################
197
  def init_model():
198
  from magic_pdf.model.doc_analyze_by_custom_model import ModelSingleton
199
  try:
@@ -223,10 +211,6 @@ other_lang = ['ch','en','korean','japan','chinese_cht','ta','te','ka']
223
  all_lang = ['', 'auto']
224
  all_lang.extend([*other_lang, *latin_lang, *arabic_lang, *cyrillic_lang, *devanagari_lang])
225
 
226
-
227
- #################################
228
- # 2) Gemini (google.generativeai)
229
- #################################
230
  import google.generativeai as genai
231
  from gradio import ChatMessage
232
  from typing import Iterator
@@ -234,13 +218,10 @@ import time
234
 
235
  GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
236
  genai.configure(api_key=GEMINI_API_KEY)
237
-
238
  model = genai.GenerativeModel("gemini-2.0-flash-thinking-exp-1219")
239
 
240
  def format_chat_history(messages: list) -> list:
241
- """
242
- Gemini๊ฐ€ ์ดํ•ดํ•  ์ˆ˜ ์žˆ๋Š” ํ˜•์‹ (role, content)
243
- """
244
  formatted_history = []
245
  for message in messages:
246
  if not (message.role == "assistant" and hasattr(message, "metadata")):
@@ -251,16 +232,12 @@ def format_chat_history(messages: list) -> list:
251
  return formatted_history
252
 
253
  def stream_gemini_response(user_message: str, messages: list) -> Iterator[list]:
254
- """
255
- Gemini ์‘๋‹ต ์ŠคํŠธ๋ฆฌ๋ฐ: user_message๊ฐ€ ๋น„์–ด์žˆ์œผ๋ฉด ๊ธฐ๋ณธ ๋ฌธ๊ตฌ๋กœ ๋Œ€์ฒด
256
- """
257
- # ๋นˆ ๋ฌธ์ž์—ด์ด๋ฉด ๊ธฐ๋ณธ ๋ฌธ๊ตฌ๋กœ ๊ต์ฒด(์˜ค๋ฅ˜ ๋ฐฉ์ง€)
258
  if not user_message.strip():
259
- user_message = "โ€ฆ(No content from user)โ€ฆ"
260
 
261
  try:
262
  print(f"\n=== [Gemini] New Request ===\nUser message: '{user_message}'")
263
-
264
  chat_history = format_chat_history(messages)
265
  chat = model.start_chat(history=chat_history)
266
  response = chat.send_message(user_message, stream=True)
@@ -269,7 +246,6 @@ def stream_gemini_response(user_message: str, messages: list) -> Iterator[list]:
269
  response_buffer = ""
270
  thinking_complete = False
271
 
272
- # "Thinking" ๋ฉ”์‹œ์ง€ ์ถ”๊ฐ€
273
  messages.append(
274
  ChatMessage(
275
  role="assistant",
@@ -284,7 +260,6 @@ def stream_gemini_response(user_message: str, messages: list) -> Iterator[list]:
284
  current_chunk = parts[0].text
285
 
286
  if len(parts) == 2 and not thinking_complete:
287
- # Complete thought
288
  thought_buffer += current_chunk
289
  messages[-1] = ChatMessage(
290
  role="assistant",
@@ -293,16 +268,13 @@ def stream_gemini_response(user_message: str, messages: list) -> Iterator[list]:
293
  )
294
  yield convert_chat_messages_to_gradio_format(messages)
295
 
296
- # Start final response
297
  response_buffer = parts[1].text
298
  messages.append(ChatMessage(role="assistant", content=response_buffer))
299
  thinking_complete = True
300
  elif thinking_complete:
301
- # Response ongoing
302
  response_buffer += current_chunk
303
  messages[-1] = ChatMessage(role="assistant", content=response_buffer)
304
  else:
305
- # Still thinking
306
  thought_buffer += current_chunk
307
  messages[-1] = ChatMessage(
308
  role="assistant",
@@ -320,38 +292,31 @@ def stream_gemini_response(user_message: str, messages: list) -> Iterator[list]:
320
  yield convert_chat_messages_to_gradio_format(messages)
321
 
322
  def convert_chat_messages_to_gradio_format(messages):
323
- """
324
- ChatMessage list -> [ (์œ ์ €๋ฐœํ™”, ๋ด‡์‘๋‹ต), (...), ... ]
325
- """
326
  gradio_chat = []
327
  user_text, assistant_text = None, None
328
 
329
  for msg in messages:
330
  if msg.role == "user":
331
- # ์ด์ „ ํ„ด ์ €์žฅ
332
  if user_text is not None or assistant_text is not None:
333
  gradio_chat.append((user_text or "", assistant_text or ""))
334
  user_text = msg.content
335
  assistant_text = None
336
  else:
337
- # assistant
338
  if user_text is None:
339
  user_text = ""
340
  if assistant_text is None:
341
  assistant_text = msg.content
342
  else:
343
- assistant_text += msg.content # ์ŠคํŠธ๋ฆฌ๋ฐ ์‹œ ๋ˆ„์ 
344
 
345
- # ๋งˆ์ง€๋ง‰ ํ„ด
346
  if user_text is not None or assistant_text is not None:
347
  gradio_chat.append((user_text or "", assistant_text or ""))
348
 
349
  return gradio_chat
350
 
351
  def user_message(msg: str, history: list, doc_text: str) -> tuple[str, list]:
352
- """
353
- doc_text(๋งˆํฌ๋‹ค์šด) ์ฐธ๊ณ  ๋ฌธ๊ตฌ๋ฅผ ์ž๋™ ์‚ฝ์ž…
354
- """
355
  if doc_text.strip():
356
  user_query = f"๋‹ค์Œ ๋ฌธ์„œ๋ฅผ ์ฐธ๊ณ ํ•˜์—ฌ ๋‹ต๋ณ€:\n\n{doc_text}\n\n์งˆ๋ฌธ: {msg}"
357
  else:
@@ -360,49 +325,62 @@ def user_message(msg: str, history: list, doc_text: str) -> tuple[str, list]:
360
  history.append(ChatMessage(role="user", content=user_query))
361
  return "", history
362
 
 
 
 
 
 
 
 
 
363
 
364
- ################################
365
- # 3) ํ†ตํ•ฉ Gradio ์•ฑ ๊ตฌ์„ฑ & ์‹คํ–‰
366
- ################################
367
- with gr.Blocks(title="OCR FLEX + Gemini Chat", css=create_css()) as demo:
368
  gr.HTML("""
369
  <div class="title-area">
370
- <h1>OCR FLEX + Gemini Chat</h1>
371
- <p>PDF/์ด๋ฏธ์ง€ -> ํ…์ŠคํŠธ(๋งˆํฌ๋‹ค์šด) ๋ณ€ํ™˜ ํ›„, Gemini LLM ๋Œ€ํ™”</p>
372
  </div>
373
  """)
374
 
375
- md_state = gr.State("")
376
- chat_history = gr.State([])
377
 
 
378
  with gr.Row():
379
  file = gr.File(label="PDF/์ด๋ฏธ์ง€ ์—…๋กœ๋“œ", file_types=[".pdf", ".png", ".jpeg", ".jpg"], interactive=True)
380
  convert_btn = gr.Button("๋ณ€ํ™˜ํ•˜๊ธฐ")
381
 
382
- # ์ˆจ๊ธด ์ปดํฌ๋„ŒํŠธ๋“ค
383
- max_pages = gr.Slider(1, 20, 10, visible=False, elem_classes="invisible")
384
- layout_mode = gr.Dropdown(["layoutlmv3","doclayout_yolo"], value="doclayout_yolo", visible=False, elem_classes="invisible")
 
 
 
 
 
 
 
 
 
385
  language = gr.Dropdown(all_lang, value='auto', visible=False, elem_classes="invisible")
386
  formula_enable = gr.Checkbox(value=True, visible=False, elem_classes="invisible")
387
  is_ocr = gr.Checkbox(value=False, visible=False, elem_classes="invisible")
388
  table_enable = gr.Checkbox(value=True, visible=False, elem_classes="invisible")
389
 
390
- # ๋ณ€ํ™˜ ํด๋ฆญ -> to_markdown (progress)
391
  convert_btn.click(
392
  fn=to_markdown,
393
  inputs=[file, max_pages, is_ocr, layout_mode, formula_enable, table_enable, language],
394
  outputs=md_state,
395
- show_progress=True # ํ”„๋กœ๊ทธ๋ ˆ์Šค๋ฐ”+๋กœ๋”ฉ ํ‘œ์‹œ
396
  )
397
 
398
  # Gemini Chat
399
- gr.Markdown("## Gemini 2.0 Flash (Thinking) Chat")
400
- chatbot = gr.Chatbot(height=600)
401
  with gr.Row():
402
  chat_input = gr.Textbox(lines=1, placeholder="์งˆ๋ฌธ์„ ์ž…๋ ฅํ•˜์„ธ์š”...")
403
  clear_btn = gr.Button("๋Œ€ํ™” ์ดˆ๊ธฐํ™”")
404
 
405
- # ํ”„๋กฌํ”„ํŠธ ์ „์†ก -> user_message -> stream_gemini_response
406
  chat_input.submit(
407
  fn=user_message,
408
  inputs=[chat_input, chat_history, md_state],
@@ -413,19 +391,14 @@ with gr.Blocks(title="OCR FLEX + Gemini Chat", css=create_css()) as demo:
413
  outputs=chatbot
414
  )
415
 
416
- def clear_states():
417
- return [], ""
418
 
419
  clear_btn.click(
420
- fn=clear_states,
421
  inputs=[],
422
- outputs=[chat_history, md_state]
423
- ).then(
424
- fn=lambda: [],
425
- inputs=[],
426
- outputs=chatbot
427
  )
428
 
429
-
430
  if __name__ == "__main__":
431
  demo.launch(server_name="0.0.0.0", server_port=7860, debug=True)
 
8
  import uuid
9
  import pymupdf
10
 
11
+ # (๊ธฐ์กด magic-pdf ์„ค์น˜ ๋ฐ ์„ค์ • ๋กœ์ง)
 
 
12
  os.system('pip uninstall -y magic-pdf')
13
  os.system('pip install git+https://github.com/opendatalab/MinerU.git@dev')
14
  os.system('wget https://github.com/opendatalab/MinerU/raw/dev/scripts/download_models_hf.py -O download_models_hf.py')
 
27
 
28
  os.system('cp -r paddleocr /home/user/.paddleocr')
29
 
30
+ ###############################
31
+ # Gradio ๋ฐ ๊ธฐํƒ€ ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ
32
+ ###############################
33
  import gradio as gr
34
  from loguru import logger
35
 
 
37
  from magic_pdf.libs.hash_utils import compute_sha256
38
  from magic_pdf.tools.common import do_parse, prepare_env
39
 
 
 
 
 
40
  def create_css():
41
+ """ํ™”๋ฉด ๊ฐ€๋“ ์‚ฌ์šฉ + ์Šคํฌ๋กค ํ—ˆ์šฉ"""
 
 
42
  return """
43
  .gradio-container {
44
  width: 100vw !important;
 
147
  return tmp_file_path
148
 
149
  def to_markdown(file_path, end_pages, is_ocr, layout_mode, formula_enable, table_enable, language, progress=gr.Progress(track_tqdm=False)):
150
+ """PDF ๋ณ€ํ™˜ ํ•จ์ˆ˜ (ํ”„๋กœ๊ทธ๋ ˆ์Šค๋ฐ” ํ‘œ์‹œ)"""
 
 
 
151
  progress(0, "PDF๋กœ ๋ณ€ํ™˜ ์ค‘...")
152
  file_path = to_pdf(file_path)
153
  time.sleep(0.5)
 
169
  logger.error("์••์ถ• ์‹คํŒจ")
170
  time.sleep(0.5)
171
 
172
+ progress(70, "๋งˆํฌ๋‹ค์šด ์ฝ๋Š” ์ค‘...")
173
  md_path = os.path.join(local_md_dir, file_name + ".md")
174
  with open(md_path, 'r', encoding='utf-8') as f:
175
  txt_content = f.read()
176
  time.sleep(0.5)
177
 
178
+ progress(90, "์ด๋ฏธ์ง€ base64 ๋ณ€ํ™˜ ์ค‘...")
179
  md_content = replace_image_with_base64(txt_content, local_md_dir)
180
  time.sleep(0.5)
181
 
182
  progress(100, "๋ณ€ํ™˜ ์™„๋ฃŒ!")
183
  return md_content
184
 
 
 
 
 
185
  def init_model():
186
  from magic_pdf.model.doc_analyze_by_custom_model import ModelSingleton
187
  try:
 
211
  all_lang = ['', 'auto']
212
  all_lang.extend([*other_lang, *latin_lang, *arabic_lang, *cyrillic_lang, *devanagari_lang])
213
 
 
 
 
 
214
  import google.generativeai as genai
215
  from gradio import ChatMessage
216
  from typing import Iterator
 
218
 
219
  GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
220
  genai.configure(api_key=GEMINI_API_KEY)
 
221
  model = genai.GenerativeModel("gemini-2.0-flash-thinking-exp-1219")
222
 
223
  def format_chat_history(messages: list) -> list:
224
+ """Gemini๊ฐ€ ์ดํ•ดํ•  ์ˆ˜ ์žˆ๋Š” (role, parts[]) ํ˜•์‹์œผ๋กœ ๋ณ€ํ™˜"""
 
 
225
  formatted_history = []
226
  for message in messages:
227
  if not (message.role == "assistant" and hasattr(message, "metadata")):
 
232
  return formatted_history
233
 
234
  def stream_gemini_response(user_message: str, messages: list) -> Iterator[list]:
235
+ """Gemini ์‘๋‹ต ์ŠคํŠธ๋ฆฌ๋ฐ (user_message๊ฐ€ ๊ณต๋ฐฑ์ด๋ฉด ๊ธฐ๋ณธ ๋ฌธ๊ตฌ๋กœ ๊ต์ฒด)"""
 
 
 
236
  if not user_message.strip():
237
+ user_message = "...(No content from user)..."
238
 
239
  try:
240
  print(f"\n=== [Gemini] New Request ===\nUser message: '{user_message}'")
 
241
  chat_history = format_chat_history(messages)
242
  chat = model.start_chat(history=chat_history)
243
  response = chat.send_message(user_message, stream=True)
 
246
  response_buffer = ""
247
  thinking_complete = False
248
 
 
249
  messages.append(
250
  ChatMessage(
251
  role="assistant",
 
260
  current_chunk = parts[0].text
261
 
262
  if len(parts) == 2 and not thinking_complete:
 
263
  thought_buffer += current_chunk
264
  messages[-1] = ChatMessage(
265
  role="assistant",
 
268
  )
269
  yield convert_chat_messages_to_gradio_format(messages)
270
 
 
271
  response_buffer = parts[1].text
272
  messages.append(ChatMessage(role="assistant", content=response_buffer))
273
  thinking_complete = True
274
  elif thinking_complete:
 
275
  response_buffer += current_chunk
276
  messages[-1] = ChatMessage(role="assistant", content=response_buffer)
277
  else:
 
278
  thought_buffer += current_chunk
279
  messages[-1] = ChatMessage(
280
  role="assistant",
 
292
  yield convert_chat_messages_to_gradio_format(messages)
293
 
294
  def convert_chat_messages_to_gradio_format(messages):
295
+ """ChatMessage list -> [ (์œ ์ €๋ฐœํ™”, ๋ด‡์‘๋‹ต), (...), ... ]"""
 
 
296
  gradio_chat = []
297
  user_text, assistant_text = None, None
298
 
299
  for msg in messages:
300
  if msg.role == "user":
 
301
  if user_text is not None or assistant_text is not None:
302
  gradio_chat.append((user_text or "", assistant_text or ""))
303
  user_text = msg.content
304
  assistant_text = None
305
  else:
 
306
  if user_text is None:
307
  user_text = ""
308
  if assistant_text is None:
309
  assistant_text = msg.content
310
  else:
311
+ assistant_text += msg.content
312
 
 
313
  if user_text is not None or assistant_text is not None:
314
  gradio_chat.append((user_text or "", assistant_text or ""))
315
 
316
  return gradio_chat
317
 
318
  def user_message(msg: str, history: list, doc_text: str) -> tuple[str, list]:
319
+ """doc_text(๋งˆํฌ๋‹ค์šด) ์‚ฌ์šฉํ•ด ์งˆ๋ฌธ ์ž๋™ ๋ณ€ํ˜•"""
 
 
320
  if doc_text.strip():
321
  user_query = f"๋‹ค์Œ ๋ฌธ์„œ๋ฅผ ์ฐธ๊ณ ํ•˜์—ฌ ๋‹ต๋ณ€:\n\n{doc_text}\n\n์งˆ๋ฌธ: {msg}"
322
  else:
 
325
  history.append(ChatMessage(role="user", content=user_query))
326
  return "", history
327
 
328
+ def reset_states(_):
329
+ """
330
+ ์ƒˆ ํŒŒ์ผ ์—…๋กœ๋“œ ์‹œ
331
+ - chat_history -> ๋นˆ ๋ฆฌ์ŠคํŠธ
332
+ - md_state -> ๋นˆ ๋ฌธ์ž์—ด
333
+ - chatbot -> ๋นˆ list of tuples
334
+ """
335
+ return [], "", []
336
 
337
+ with gr.Blocks(title="VisionOCR", css=create_css()) as demo:
 
 
 
338
  gr.HTML("""
339
  <div class="title-area">
340
+ <h1>VisionOCR</h1>
341
+ <p>PDF/์ด๋ฏธ์ง€ -> ํ…์ŠคํŠธ(๋งˆํฌ๋‹ค์šด) ๋ณ€ํ™˜ ํ›„, ์ถ” LLM๊ณผ ๋Œ€ํ™”</p>
342
  </div>
343
  """)
344
 
345
+ md_state = gr.State("") # ๋ณ€ํ™˜๋œ ๋งˆํฌ๋‹ค์šด ํ…์ŠคํŠธ
346
+ chat_history = gr.State([]) # ChatMessage ๋ฆฌ์ŠคํŠธ
347
 
348
+ # ์—…๋กœ๋“œ & ๋ณ€ํ™˜
349
  with gr.Row():
350
  file = gr.File(label="PDF/์ด๋ฏธ์ง€ ์—…๋กœ๋“œ", file_types=[".pdf", ".png", ".jpeg", ".jpg"], interactive=True)
351
  convert_btn = gr.Button("๋ณ€ํ™˜ํ•˜๊ธฐ")
352
 
353
+ # ์ƒˆ ํŒŒ์ผ ์—…๋กœ๋“œ ์‹œ: ์ด์ „ ๋Œ€ํ™”/๋งˆํฌ๋‹ค์šด/์ฑ—๋ด‡ ์ดˆ๊ธฐํ™”
354
+ chatbot = gr.Chatbot(height=600) # ์‹ค์ œ Chatbot ์ปดํฌ๋„ŒํŠธ
355
+
356
+ file.change(
357
+ fn=reset_states,
358
+ inputs=file,
359
+ outputs=[chat_history, md_state, chatbot] # <--- ๋ฌธ์ž์—ด "chatbot" ์•„๋‹ˆ๋ผ chatbot ์ปดํฌ๋„ŒํŠธ ๊ฐ์ฒด
360
+ )
361
+
362
+ # ์ˆจ๊ธด ์ปดํฌ๋„ŒํŠธ
363
+ max_pages = gr.Slider(1,20,10, visible=False, elem_classes="invisible")
364
+ layout_mode = gr.Dropdown(["layoutlmv3","doclayout_yolo"],value="doclayout_yolo",visible=False,elem_classes="invisible")
365
  language = gr.Dropdown(all_lang, value='auto', visible=False, elem_classes="invisible")
366
  formula_enable = gr.Checkbox(value=True, visible=False, elem_classes="invisible")
367
  is_ocr = gr.Checkbox(value=False, visible=False, elem_classes="invisible")
368
  table_enable = gr.Checkbox(value=True, visible=False, elem_classes="invisible")
369
 
 
370
  convert_btn.click(
371
  fn=to_markdown,
372
  inputs=[file, max_pages, is_ocr, layout_mode, formula_enable, table_enable, language],
373
  outputs=md_state,
374
+ show_progress=True # ํ”„๋กœ๊ทธ๋ ˆ์Šค๋ฐ” ํ‘œ์‹œ
375
  )
376
 
377
  # Gemini Chat
378
+ gr.Markdown("##์ถ”๋ก  LLM๊ณผ ๋Œ€ํ™”")
379
+
380
  with gr.Row():
381
  chat_input = gr.Textbox(lines=1, placeholder="์งˆ๋ฌธ์„ ์ž…๋ ฅํ•˜์„ธ์š”...")
382
  clear_btn = gr.Button("๋Œ€ํ™” ์ดˆ๊ธฐํ™”")
383
 
 
384
  chat_input.submit(
385
  fn=user_message,
386
  inputs=[chat_input, chat_history, md_state],
 
391
  outputs=chatbot
392
  )
393
 
394
+ def clear_all():
395
+ return [], "", []
396
 
397
  clear_btn.click(
398
+ fn=clear_all,
399
  inputs=[],
400
+ outputs=[chat_history, md_state, chatbot]
 
 
 
 
401
  )
402
 
 
403
  if __name__ == "__main__":
404
  demo.launch(server_name="0.0.0.0", server_port=7860, debug=True)