Spaces:
Sleeping
Sleeping
############################## | |
# 1) ๊ธฐ์กด PDF ์ฒ๋ฆฌ ์ฝ๋ | |
############################## | |
import base64 | |
import json | |
import os | |
import time | |
import zipfile | |
from pathlib import Path | |
import re | |
import uuid | |
import pymupdf | |
# ์๋ ์ฝ๋์ ์๋ os.system() ํธ์ถ๋ค | |
os.system('pip uninstall -y magic-pdf') | |
os.system('pip install git+https://github.com/opendatalab/MinerU.git@dev') | |
os.system('wget https://github.com/opendatalab/MinerU/raw/dev/scripts/download_models_hf.py -O download_models_hf.py') | |
os.system('python download_models_hf.py') | |
with open('/home/user/magic-pdf.json', 'r') as file: | |
data = json.load(file) | |
data['device-mode'] = "cuda" | |
if os.getenv('apikey'): | |
data['llm-aided-config']['title_aided']['api_key'] = os.getenv('apikey') | |
data['llm-aided-config']['title_aided']['enable'] = True | |
with open('/home/user/magic-pdf.json', 'w') as file: | |
json.dump(data, file, indent=4) | |
os.system('cp -r paddleocr /home/user/.paddleocr') | |
# from gradio_pdf import PDF # PDF ๋ฏธ๋ฆฌ๋ณด๊ธฐ๋ฅผ ์ํ ์ปดํฌ๋ํธ์ด์ง๋ง, ์ง๊ธ์ ์จ๊ธธ ์์ | |
import gradio as gr | |
from loguru import logger | |
from magic_pdf.data.data_reader_writer import FileBasedDataReader | |
from magic_pdf.libs.hash_utils import compute_sha256 | |
from magic_pdf.tools.common import do_parse, prepare_env | |
def create_css(): | |
return """ | |
/* ์ ์ฒด ์คํ์ผ */ | |
.gradio-container { | |
background: linear-gradient(135deg, #EFF6FF 0%, #F5F3FF 100%); | |
max-width: 1200px !important; | |
margin: 0 auto !important; | |
padding: 2rem !important; | |
} | |
/* ์ ๋ชฉ ์คํ์ผ */ | |
.title-area { | |
text-align: center; | |
margin-bottom: 2rem; | |
padding: 1rem; | |
background: white; | |
border-radius: 1rem; | |
box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1); | |
} | |
.title-area h1 { | |
background: linear-gradient(90deg, #2563EB 0%, #7C3AED 100%); | |
-webkit-background-clip: text; | |
-webkit-text-fill-color: transparent; | |
font-size: 2.5rem; | |
font-weight: bold; | |
margin-bottom: 0.5rem; | |
} | |
.title-area p { | |
color: #6B7280; | |
font-size: 1.1rem; | |
} | |
/* ์จ๊ธธ ์์ ์ธ ์ปดํฌ๋ํธ ์คํ์ผ */ | |
.invisible { | |
display: none !important; | |
} | |
""" | |
def read_fn(path): | |
disk_rw = FileBasedDataReader(os.path.dirname(path)) | |
return disk_rw.read(os.path.basename(path)) | |
def parse_pdf(doc_path, output_dir, end_page_id, is_ocr, layout_mode, formula_enable, table_enable, language): | |
os.makedirs(output_dir, exist_ok=True) | |
try: | |
file_name = f"{str(Path(doc_path).stem)}_{time.time()}" | |
pdf_data = read_fn(doc_path) | |
if is_ocr: | |
parse_method = "ocr" | |
else: | |
parse_method = "auto" | |
local_image_dir, local_md_dir = prepare_env(output_dir, file_name, parse_method) | |
do_parse( | |
output_dir, | |
file_name, | |
pdf_data, | |
[], | |
parse_method, | |
False, | |
end_page_id=end_page_id, | |
layout_model=layout_mode, | |
formula_enable=formula_enable, | |
table_enable=table_enable, | |
lang=language, | |
f_dump_orig_pdf=False, | |
) | |
return local_md_dir, file_name | |
except Exception as e: | |
logger.exception(e) | |
def compress_directory_to_zip(directory_path, output_zip_path): | |
try: | |
with zipfile.ZipFile(output_zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf: | |
for root, dirs, files in os.walk(directory_path): | |
for file in files: | |
file_path = os.path.join(root, file) | |
arcname = os.path.relpath(file_path, directory_path) | |
zipf.write(file_path, arcname) | |
return 0 | |
except Exception as e: | |
logger.exception(e) | |
return -1 | |
def image_to_base64(image_path): | |
with open(image_path, "rb") as image_file: | |
return base64.b64encode(image_file.read()).decode('utf-8') | |
def replace_image_with_base64(markdown_text, image_dir_path): | |
pattern = r'\!\[(?:[^\]]*)\]\(([^)]+)\)' | |
def replace(match): | |
relative_path = match.group(1) | |
full_path = os.path.join(image_dir_path, relative_path) | |
base64_image = image_to_base64(full_path) | |
return f"" | |
return re.sub(pattern, replace, markdown_text) | |
def to_pdf(file_path): | |
""" | |
PDF๊ฐ ์๋ ๊ฒฝ์ฐ(์: PNG, JPG ํ์ผ)์๋ pymupdf๋ฅผ ์ด์ฉํ์ฌ PDF๋ก ๋ณํํ๊ธฐ ์ํ ํจ์. | |
""" | |
with pymupdf.open(file_path) as f: | |
if f.is_pdf: | |
return file_path | |
else: | |
pdf_bytes = f.convert_to_pdf() | |
unique_filename = f"{uuid.uuid4()}.pdf" | |
tmp_file_path = os.path.join(os.path.dirname(file_path), unique_filename) | |
with open(tmp_file_path, 'wb') as tmp_pdf_file: | |
tmp_pdf_file.write(pdf_bytes) | |
return tmp_file_path | |
def to_markdown(file_path, end_pages, is_ocr, layout_mode, formula_enable, table_enable, language): | |
""" | |
ํ์ผ์ ๋ฐ์ ์ต๋ end_pages ํ์ด์ง๊น์ง ๋งํฌ๋ค์ด ์ถ์ถ ํ, | |
base64 ์ด๋ฏธ์ง๊ฐ ํฌํจ๋ md_content๋ฅผ ๋ฐํ. | |
""" | |
file_path = to_pdf(file_path) | |
if end_pages > 20: | |
end_pages = 20 | |
local_md_dir, file_name = parse_pdf(file_path, './output', end_pages - 1, is_ocr, | |
layout_mode, formula_enable, table_enable, language) | |
archive_zip_path = os.path.join("./output", compute_sha256(local_md_dir) + ".zip") | |
zip_archive_success = compress_directory_to_zip(local_md_dir, archive_zip_path) | |
if zip_archive_success == 0: | |
logger.info("์์ถ ์ฑ๊ณต") | |
else: | |
logger.error("์์ถ ์คํจ") | |
md_path = os.path.join(local_md_dir, file_name + ".md") | |
with open(md_path, 'r', encoding='utf-8') as f: | |
txt_content = f.read() | |
md_content = replace_image_with_base64(txt_content, local_md_dir) | |
# new_pdf_path = os.path.join(local_md_dir, file_name + "_layout.pdf") # ์๋ pdf ๋ฏธ๋ฆฌ๋ณด๊ธฐ์ฉ | |
return md_content # base64 ์ด๋ฏธ์ง๊ฐ ํฌํจ๋ ์ต์ข ๋งํฌ๋ค์ด ํ ์คํธ๋ง ๋ฐํ | |
latex_delimiters = [ | |
{"left": "$$", "right": "$$", "display": True}, | |
{"left": '$', "right": '$', "display": False} | |
] | |
def init_model(): | |
""" | |
magic_pdf์ ๋ชจ๋ธ์ ๋ฏธ๋ฆฌ ์ด๊ธฐํ. | |
""" | |
from magic_pdf.model.doc_analyze_by_custom_model import ModelSingleton | |
try: | |
model_manager = ModelSingleton() | |
txt_model = model_manager.get_model(False, False) | |
logger.info(f"txt_model init final") | |
ocr_model = model_manager.get_model(True, False) | |
logger.info(f"ocr_model init final") | |
return 0 | |
except Exception as e: | |
logger.exception(e) | |
return -1 | |
model_init = init_model() | |
logger.info(f"model_init: {model_init}") | |
latin_lang = [ | |
'af', 'az', 'bs', 'cs', 'cy', 'da', 'de', 'es', 'et', 'fr', 'ga', 'hr', | |
'hu', 'id', 'is', 'it', 'ku', 'la', 'lt', 'lv', 'mi', 'ms', 'mt', 'nl', | |
'no', 'oc', 'pi', 'pl', 'pt', 'ro', 'rs_latin', 'sk', 'sl', 'sq', 'sv', | |
'sw', 'tl', 'tr', 'uz', 'vi', 'french', 'german' | |
] | |
arabic_lang = ['ar', 'fa', 'ug', 'ur'] | |
cyrillic_lang = [ | |
'ru', 'rs_cyrillic', 'be', 'bg', 'uk', 'mn', 'abq', 'ady', 'kbd', 'ava', | |
'dar', 'inh', 'che', 'lbe', 'lez', 'tab' | |
] | |
devanagari_lang = [ | |
'hi', 'mr', 'ne', 'bh', 'mai', 'ang', 'bho', 'mah', 'sck', 'new', 'gom', | |
'sa', 'bgc' | |
] | |
other_lang = ['ch', 'en', 'korean', 'japan', 'chinese_cht', 'ta', 'te', 'ka'] | |
all_lang = ['', 'auto'] | |
all_lang.extend([*other_lang, *latin_lang, *arabic_lang, *cyrillic_lang, *devanagari_lang]) | |
############################## | |
# 2) Gemini LLM ์ฑ ์ฝ๋ | |
############################## | |
# (์ค๋ณต import์ด์ง๋ง "๋๋ฝ ์์ด" ์ถ๋ ฅํด์ผ ํ๋ฏ๋ก ์ฃผ์ ์ฒ๋ฆฌ) | |
# import os | |
# import gradio as gr | |
from gradio import ChatMessage | |
from typing import Iterator | |
import google.generativeai as genai | |
import time | |
# get Gemini API Key from the environ variable | |
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY") | |
genai.configure(api_key=GEMINI_API_KEY) | |
# we will be using the Gemini 2.0 Flash model with Thinking capabilities | |
model = genai.GenerativeModel("gemini-2.0-flash-thinking-exp-1219") | |
def format_chat_history(messages: list) -> list: | |
""" | |
Formats the chat history into a structure Gemini can understand | |
""" | |
formatted_history = [] | |
for message in messages: | |
# Skip thinking messages (messages with metadata) | |
if not (message.get("role") == "assistant" and "metadata" in message): | |
formatted_history.append({ | |
"role": "user" if message.get("role") == "user" else "assistant", | |
"parts": [message.get("content", "")] | |
}) | |
return formatted_history | |
def stream_gemini_response(user_message: str, messages: list) -> Iterator[list]: | |
""" | |
Streams thoughts and response with conversation history support for text input only. | |
""" | |
if not user_message.strip(): | |
messages.append(ChatMessage(role="assistant", content="Please provide a non-empty text message. Empty input is not allowed.")) | |
yield messages | |
return | |
try: | |
print(f"\n=== New Request (Text) ===") | |
print(f"User message: {user_message}") | |
chat_history = format_chat_history(messages) | |
chat = model.start_chat(history=chat_history) | |
response = chat.send_message(user_message, stream=True) | |
thought_buffer = "" | |
response_buffer = "" | |
thinking_complete = False | |
messages.append( | |
ChatMessage( | |
role="assistant", | |
content="", | |
metadata={"title": "โ๏ธ Thinking: *The thoughts produced by the model are experimental"} | |
) | |
) | |
for chunk in response: | |
parts = chunk.candidates[0].content.parts | |
current_chunk = parts[0].text | |
if len(parts) == 2 and not thinking_complete: | |
# Complete thought and start response | |
thought_buffer += current_chunk | |
print(f"\n=== Complete Thought ===\n{thought_buffer}") | |
messages[-1] = ChatMessage( | |
role="assistant", | |
content=thought_buffer, | |
metadata={"title": "โ๏ธ Thinking: *The thoughts produced by the model are experimental"} | |
) | |
yield messages | |
# Start response | |
response_buffer = parts[1].text | |
print(f"\n=== Starting Response ===\n{response_buffer}") | |
messages.append( | |
ChatMessage( | |
role="assistant", | |
content=response_buffer | |
) | |
) | |
thinking_complete = True | |
elif thinking_complete: | |
response_buffer += current_chunk | |
print(f"\n=== Response Chunk ===\n{current_chunk}") | |
messages[-1] = ChatMessage( | |
role="assistant", | |
content=response_buffer | |
) | |
else: | |
thought_buffer += current_chunk | |
print(f"\n=== Thinking Chunk ===\n{current_chunk}") | |
messages[-1] = ChatMessage( | |
role="assistant", | |
content=thought_buffer, | |
metadata={"title": "โ๏ธ Thinking: *The thoughts produced by the model are experimental"} | |
) | |
# time.sleep(0.05) #Optional debugging delay | |
yield messages | |
print(f"\n=== Final Response ===\n{response_buffer}") | |
except Exception as e: | |
print(f"\n=== Error ===\n{str(e)}") | |
messages.append( | |
ChatMessage( | |
role="assistant", | |
content=f"I apologize, but I encountered an error: {str(e)}" | |
) | |
) | |
yield messages | |
def user_message(msg: str, history: list) -> tuple[str, list]: | |
"""Adds user message to chat history""" | |
history.append(ChatMessage(role="user", content=msg)) | |
return "", history | |
###################################################### | |
# 3) ํตํฉ Gradio ์ฑ ๊ตฌ์ฑ | |
# - PDF ์ ๋ก๋๋ง ๋ณด์ด๊ฒ ํ๊ณ (๋๋จธ์ง๋ hidden) | |
# - ์ ๋ก๋ ํ "๋ณํ" ๋ฒํผ ํด๋ฆญ ์, ๋งํฌ๋ค์ด์ ๋ง๋ค์ด | |
# Chatbot๊ณผ ๋ํํ ์ ์๋๋ก ์ ๋ฌ | |
###################################################### | |
with gr.Blocks(title="ํตํฉ OCR & Gemini Chat", css=create_css(), theme=gr.themes.Soft(primary_hue="teal", secondary_hue="slate", neutral_hue="neutral")) as demo: | |
gr.HTML(""" | |
<div class="title-area"> | |
<h1>OCR FLEX + Gemini Chat</h1> | |
<p>PDF/์ด๋ฏธ์ง -> ํ ์คํธ(๋งํฌ๋ค์ด) ๋ณํ ํ, LLM Gemini์ ๋ํ</p> | |
</div> | |
""") | |
# ๋ด๋ถ ์ํ(๋งํฌ๋ค์ด ํ ์คํธ) | |
md_state = gr.State("") | |
chat_history = gr.State([]) # Gemini ์ฑ ๊ธฐ๋ก ์ํ | |
# 1) ํ์ผ ์ ๋ก๋ UI | |
with gr.Row(): | |
file = gr.File( | |
label="PDF ๋๋ ์ด๋ฏธ์ง ํ์ผ ์ ๋ก๋", | |
file_types=[".pdf", ".png", ".jpeg", ".jpg"], | |
interactive=True | |
) | |
convert_btn = gr.Button( | |
"๋ณํ", | |
elem_classes="primary-button" | |
) | |
# 2) ์๋ ์กด์ฌํ๋ ์ฌ๋ผ์ด๋, ์ฒดํฌ๋ฐ์ค ๋ฑ์ ์ ๋ถ hidden | |
max_pages = gr.Slider( | |
1, 20, 10, | |
step=1, | |
label='์ต๋ ๋ณํ ํ์ด์ง ์', | |
elem_classes="invisible", | |
visible=False | |
) | |
layout_mode = gr.Dropdown( | |
["layoutlmv3", "doclayout_yolo"], | |
label="๋ ์ด์์ ๋ชจ๋ธ", | |
value="doclayout_yolo", | |
elem_classes="invisible", | |
visible=False | |
) | |
language = gr.Dropdown( | |
all_lang, | |
label="์ธ์ด", | |
value='auto', | |
elem_classes="invisible", | |
visible=False | |
) | |
formula_enable = gr.Checkbox( | |
label="์์ ์ธ์ ํ์ฑํ", | |
value=True, | |
elem_classes="invisible", | |
visible=False | |
) | |
is_ocr = gr.Checkbox( | |
label="OCR ๊ฐ์ ํ์ฑํ", | |
value=False, | |
elem_classes="invisible", | |
visible=False | |
) | |
table_enable = gr.Checkbox( | |
label="ํ ์ธ์ ํ์ฑํ(ํ ์คํธ)", | |
value=True, | |
elem_classes="invisible", | |
visible=False | |
) | |
# 3) ์ถ๋ ฅ ๊ฒฐ๊ณผ(ํ์ผ, ๋งํฌ๋ค์ด ๋ฑ)๋ ์จ๊น | |
# ํ์ํ๋ฉด ์ฃผ์ ํด์ ํ์ฌ ํ์ธ ๊ฐ๋ฅ | |
# output_file = gr.File( | |
# label="๋ณํ ๊ฒฐ๊ณผ", | |
# interactive=False, | |
# visible=False | |
# ) | |
# md = gr.Markdown( | |
# label="๋งํฌ๋ค์ด ๋ ๋๋ง", | |
# visible=False | |
# ) | |
# md_text = gr.TextArea( | |
# lines=45, | |
# visible=False | |
# ) | |
# pdf_show = PDF( | |
# label='PDF ๋ฏธ๋ฆฌ๋ณด๊ธฐ', | |
# interactive=False, | |
# visible=False, | |
# height=800 | |
# ) | |
# 4) ํ์ผ ์ ๋ก๋ -> '๋ณํ' ๋ฒํผ ํด๋ฆญ์ ๋์: | |
# to_markdown ํจ์๋ฅผ ํตํด md_state์ ๋งํฌ๋ค์ด ์ ์ฅ | |
convert_btn.click( | |
fn=to_markdown, | |
inputs=[file, max_pages, is_ocr, layout_mode, formula_enable, table_enable, language], | |
outputs=md_state | |
) | |
# ========================== | |
# Gemini Chat ๋ถ๋ถ | |
# ========================== | |
gr.Markdown("## Gemini 2.0 Flash (With Thinking) Chat") | |
chatbot = gr.Chatbot( | |
label="Gemini2.0 Chatbot (Streaming Output)", | |
render_markdown=True, | |
height=400 | |
) | |
with gr.Row(): | |
chat_input = gr.Textbox( | |
lines=1, | |
label="์ง๋ฌธ ์ ๋ ฅ", | |
placeholder="์ถ์ถ๋ ๋ฌธ์(๋งํฌ๋ค์ด ๋ด์ฉ)์ ๋ํด ๊ถ๊ธํ ์ ์ ๋ฌผ์ด๋ณด์ธ์..." | |
) | |
clear_button = gr.Button("๋ํ ์ด๊ธฐํ") | |
# ์ฌ์ฉ์๊ฐ ์ง๋ฌธ -> user_message -> Gemini ์ฒ๋ฆฌ -> stream_gemini_response | |
def user_message_wrapper(msg, history, doc_text): | |
""" | |
์ฌ์ฉ์๊ฐ ์ ๋ ฅํ ๋๋ง๋ค, doc_text(๋งํฌ๋ค์ด)๋ฅผ ์ฐธ๊ณ ํ๋๋ก | |
์ง๋ฌธ์ ์ฝ๊ฐ ๋ณํํด์ history์ ์ถ๊ฐํ๋ ๋ฐฉ์(๊ฐ๋จ ์์). | |
""" | |
if not doc_text: | |
# ์์ง ๋ณํ๋ ๋ฌธ์๊ฐ ์๋ค๋ฉด ๊ทธ๋ฅ ์ง๋ฌธ | |
user_query = msg | |
else: | |
# ๋ฌธ์ ๋ด์ฉ(doc_text)์ "์ฐธ๊ณ " ์์ฒญํ๋ ๊ฐ๋จ ํ๋กฌํํธ ์์ | |
user_query = f"๋ค์ ๋ฌธ์๋ฅผ ์ฐธ๊ณ ํ์ฌ ๋ต๋ณ:\n\n{doc_text}\n\n์ง๋ฌธ: {msg}" | |
history.append(ChatMessage(role="user", content=user_query)) | |
return "", history | |
chat_input.submit( | |
fn=user_message_wrapper, | |
inputs=[chat_input, chat_history, md_state], | |
outputs=[chat_input, chat_history] | |
).then( | |
fn=stream_gemini_response, | |
inputs=[chat_input, chat_history], | |
outputs=chat_history | |
).then( | |
fn=lambda h: h, | |
inputs=chat_history, | |
outputs=chatbot | |
) | |
clear_button.click( | |
fn=lambda: ([], ""), | |
inputs=[], | |
outputs=[chat_history, md_state] | |
).then( | |
fn=lambda: [], | |
inputs=[], | |
outputs=chatbot | |
) | |
############################## | |
# 4) ์ค์ ์คํ | |
############################## | |
if __name__ == "__main__": | |
# ์ฒซ ๋ฒ์งธ demo.launch() - ํตํฉ ์ฑ ์คํ | |
demo.launch(ssr_mode=True, debug=True) | |
############################################### | |
# ์๋๋ "Gemini ์ฑ ์ฝ๋" ์๋ณธ์ ์๋ | |
# ๋ณ๋์ demo.launch() ๋ถ๋ถ (๋๋ฝ ์์ด ์ฃผ์ ๋ณด์กด) | |
############################################### | |
# if __name__ == "__main__": | |
# demo.launch(debug=True) | |