openfree's picture
Update app.py
275364c verified
raw
history blame
18.3 kB
import base64
import json
import os
import time
import zipfile
from pathlib import Path
import re
import uuid
import pymupdf
###############################
# ํ™˜๊ฒฝ ์„ค์ •
###############################
os.system('pip uninstall -y magic-pdf')
os.system('pip install git+https://github.com/opendatalab/MinerU.git@dev')
os.system('wget https://github.com/opendatalab/MinerU/raw/dev/scripts/download_models_hf.py -O download_models_hf.py')
os.system('python download_models_hf.py')
with open('/home/user/magic-pdf.json', 'r') as file:
data = json.load(file)
data['device-mode'] = "cuda"
if os.getenv('apikey'):
data['llm-aided-config']['title_aided']['api_key'] = os.getenv('apikey')
data['llm-aided-config']['title_aided']['enable'] = True
with open('/home/user/magic-pdf.json', 'w') as file:
json.dump(data, file, indent=4)
os.system('cp -r paddleocr /home/user/.paddleocr')
###############################
# ๊ทธ ์™ธ ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ
###############################
import gradio as gr
from loguru import logger
from gradio_pdf import PDF
###############################
# magic_pdf ๊ด€๋ จ ๋ชจ๋“ˆ
###############################
from magic_pdf.data.data_reader_writer import FileBasedDataReader
from magic_pdf.libs.hash_utils import compute_sha256
from magic_pdf.tools.common import do_parse, prepare_env
###############################
# ๊ณตํ†ต ํ•จ์ˆ˜๋“ค
###############################
def create_css():
"""
๊ธฐ๋ณธ CSS ์Šคํƒ€์ผ.
"""
return """
.gradio-container {
width: 100vw !important;
min-height: 100vh !important;
margin: 0 !important;
padding: 0 !important;
background: linear-gradient(135deg, #EFF6FF 0%, #F5F3FF 100%);
display: flex;
flex-direction: column;
overflow-y: auto !important;
}
.title-area {
text-align: center;
margin: 1rem auto;
padding: 1rem;
background: white;
border-radius: 1rem;
box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1);
max-width: 800px;
}
.title-area h1 {
background: linear-gradient(90deg, #2563EB 0%, #7C3AED 100%);
-webkit-background-clip: text;
-webkit-text-fill-color: transparent;
font-size: 2.5rem;
font-weight: bold;
margin-bottom: 0.5rem;
}
.title-area p {
color: #6B7280;
font-size: 1.1rem;
}
.gr-block, .gr-box {
padding: 0.5rem !important;
}
"""
def read_fn(path):
disk_rw = FileBasedDataReader(os.path.dirname(path))
return disk_rw.read(os.path.basename(path))
def parse_pdf(doc_path, output_dir, end_page_id, is_ocr, layout_mode, formula_enable, table_enable, language):
os.makedirs(output_dir, exist_ok=True)
try:
file_name = f"{str(Path(doc_path).stem)}_{time.time()}"
pdf_data = read_fn(doc_path)
parse_method = "ocr" if is_ocr else "auto"
local_image_dir, local_md_dir = prepare_env(output_dir, file_name, parse_method)
do_parse(
output_dir,
file_name,
pdf_data,
[],
parse_method,
False,
end_page_id=end_page_id,
layout_model=layout_mode,
formula_enable=formula_enable,
table_enable=table_enable,
lang=language,
f_dump_orig_pdf=False
)
return local_md_dir, file_name
except Exception as e:
logger.exception(e)
def compress_directory_to_zip(directory_path, output_zip_path):
try:
with zipfile.ZipFile(output_zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
for root, dirs, files in os.walk(directory_path):
for file in files:
file_path = os.path.join(root, file)
arcname = os.path.relpath(file_path, directory_path)
zipf.write(file_path, arcname)
return 0
except Exception as e:
logger.exception(e)
return -1
def image_to_base64(image_path):
with open(image_path, "rb") as image_file:
return base64.b64encode(image_file.read()).decode('utf-8')
def replace_image_with_base64(markdown_text, image_dir_path):
pattern = r'\!\[(?:[^\]]*)\]\(([^)]+)\)'
def replace(match):
relative_path = match.group(1)
full_path = os.path.join(image_dir_path, relative_path)
base64_image = image_to_base64(full_path)
return f"![{relative_path}](data:image/jpeg;base64,{base64_image})"
return re.sub(pattern, replace, markdown_text)
def to_pdf(file_path):
"""
์ด๋ฏธ์ง€(JPG/PNG ๋“ฑ)๋ฅผ PDF๋กœ ์ปจ๋ฒ„ํŒ….
TXT, CSV ํŒŒ์ผ์ธ ๊ฒฝ์šฐ ๋ณ€ํ™˜ ์—†์ด ์›๋ณธ ๊ฒฝ๋กœ๋ฅผ ๋ฐ˜ํ™˜ํ•œ๋‹ค.
"""
ext = Path(file_path).suffix.lower()
if ext in ['.txt', '.csv']:
return file_path
with pymupdf.open(file_path) as f:
if f.is_pdf:
return file_path
else:
pdf_bytes = f.convert_to_pdf()
unique_filename = f"{uuid.uuid4()}.pdf"
tmp_file_path = os.path.join(os.path.dirname(file_path), unique_filename)
with open(tmp_file_path, 'wb') as tmp_pdf_file:
tmp_pdf_file.write(pdf_bytes)
return tmp_file_path
def to_markdown(file_path, end_pages, is_ocr, layout_mode, formula_enable, table_enable, language, progress=gr.Progress(track_tqdm=False)):
"""
์—…๋กœ๋“œ๋œ PDF/์ด๋ฏธ์ง€ ๋˜๋Š” TXT/CSV -> ๋งˆํฌ๋‹ค์šด ๋ณ€ํ™˜
(ํ”„๋กœ๊ทธ๋ ˆ์Šค ๋ฐ” ํ‘œ์‹œ์šฉ)
"""
ext = Path(file_path).suffix.lower()
if ext in ['.txt', '.csv']:
progress(0, "ํŒŒ์ผ ์ฝ๋Š” ์ค‘...")
with open(file_path, 'r', encoding='utf-8') as f:
txt_content = f.read()
time.sleep(0.5)
progress(50, "ํŒŒ์ผ ๋‚ด์šฉ ์ฒ˜๋ฆฌ ์ค‘...")
progress(100, "๋ณ€ํ™˜ ์™„๋ฃŒ!")
return f"```{txt_content}```\n\n**๋ณ€ํ™˜ ์™„๋ฃŒ (ํ…์ŠคํŠธ/CSV ํŒŒ์ผ)**"
else:
progress(0, "PDF๋กœ ๋ณ€ํ™˜ ์ค‘...")
file_path = to_pdf(file_path)
time.sleep(0.5)
if end_pages > 20:
end_pages = 20
progress(20, "๋ฌธ์„œ ํŒŒ์‹ฑ ์ค‘...")
local_md_dir, file_name = parse_pdf(file_path, './output', end_pages - 1, is_ocr,
layout_mode, formula_enable, table_enable, language)
time.sleep(0.5)
progress(50, "์••์ถ•(zip) ์ƒ์„ฑ ์ค‘...")
archive_zip_path = os.path.join("./output", compute_sha256(local_md_dir) + ".zip")
zip_archive_success = compress_directory_to_zip(local_md_dir, archive_zip_path)
if zip_archive_success == 0:
logger.info("์••์ถ• ์„ฑ๊ณต")
status_message = "\n\n**๋ณ€ํ™˜ ์™„๋ฃŒ (์••์ถ• ์„ฑ๊ณต)**"
else:
logger.error("์••์ถ• ์‹คํŒจ")
status_message = "\n\n**๋ณ€ํ™˜ ์™„๋ฃŒ (์••์ถ• ์‹คํŒจ)**"
time.sleep(0.5)
progress(70, "๋งˆํฌ๋‹ค์šด ์ฝ๋Š” ์ค‘...")
md_path = os.path.join(local_md_dir, file_name + ".md")
with open(md_path, 'r', encoding='utf-8') as f:
txt_content = f.read()
time.sleep(0.5)
progress(90, "์ด๋ฏธ์ง€ base64 ๋ณ€ํ™˜ ์ค‘...")
md_content = replace_image_with_base64(txt_content, local_md_dir)
time.sleep(0.5)
progress(100, "๋ณ€ํ™˜ ์™„๋ฃŒ!")
return md_content + status_message
def to_markdown_comparison(file_a, file_b, end_pages, is_ocr, layout_mode, formula_enable, table_enable, language, progress=gr.Progress(track_tqdm=False)):
"""
๋‘ ๊ฐœ์˜ ํŒŒ์ผ์„ ๋ณ€ํ™˜ํ•˜์—ฌ A/B ๋น„๊ต์šฉ ๋งˆํฌ๋‹ค์šด ์ƒ์„ฑ.
๊ฐ ํŒŒ์ผ์€ "๋ฌธ์„œ A", "๋ฌธ์„œ B" ํ—ค๋”๋กœ ๊ตฌ๋ถ„๋˜๋ฉฐ,
๋‘ ํŒŒ์ผ ๋ชจ๋‘ ์—…๋กœ๋“œ๋œ ๊ฒฝ์šฐ ์ถ”๊ฐ€๋กœ ๋น„๊ต ๋ถ„์„ ์ง€์‹œ์‚ฌํ•ญ์„ ํฌํ•จํ•œ๋‹ค.
"""
combined_md = ""
if file_a is not None:
combined_md += "### ๋ฌธ์„œ A\n"
md_a = to_markdown(file_a, end_pages, is_ocr, layout_mode, formula_enable, table_enable, language, progress=progress)
combined_md += md_a + "\n"
if file_b is not None:
combined_md += "### ๋ฌธ์„œ B\n"
md_b = to_markdown(file_b, end_pages, is_ocr, layout_mode, formula_enable, table_enable, language, progress=progress)
combined_md += md_b + "\n"
if file_a is not None and file_b is not None:
combined_md += "### ๋น„๊ต ๋ถ„์„:\n๋‘ ๋ฌธ์„œ์˜ ์ฐจ์ด์ , ์žฅ๋‹จ์  ๋ฐ ์ฃผ์š” ๋‚ด์šฉ์„ ๋น„๊ต ๋ถ„์„ํ•˜์‹ญ์‹œ์˜ค.\n"
return combined_md
def init_model():
"""
magic-pdf ๋ชจ๋ธ ์ดˆ๊ธฐํ™”
"""
from magic_pdf.model.doc_analyze_by_custom_model import ModelSingleton
try:
model_manager = ModelSingleton()
txt_model = model_manager.get_model(False, False)
logger.info("txt_model init final")
ocr_model = model_manager.get_model(True, False)
logger.info("ocr_model init final")
return 0
except Exception as e:
logger.exception(e)
return -1
model_init = init_model()
logger.info(f"model_init: {model_init}")
###############################
# ์–ธ์–ด ๋ชฉ๋ก
###############################
latin_lang = [
'af','az','bs','cs','cy','da','de','es','et','fr','ga','hr','hu','id','is','it','ku',
'la','lt','lv','mi','ms','mt','nl','no','oc','pi','pl','pt','ro','rs_latin','sk','sl',
'sq','sv','sw','tl','tr','uz','vi','french','german'
]
arabic_lang = ['ar','fa','ug','ur']
cyrillic_lang = ['ru','rs_cyrillic','be','bg','uk','mn','abq','ady','kbd','ava','dar','inh','che','lbe','lez','tab']
devanagari_lang = ['hi','mr','ne','bh','mai','ang','bho','mah','sck','new','gom','sa','bgc']
other_lang = ['ch','en','korean','japan','chinese_cht','ta','te','ka']
all_lang = ['', 'auto']
all_lang.extend([*other_lang, *latin_lang, *arabic_lang, *cyrillic_lang, *devanagari_lang])
###############################
# (1) PDF Chat ์šฉ LLM ๊ด€๋ จ
###############################
import google.generativeai as genai
from gradio import ChatMessage
from typing import Iterator
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
genai.configure(api_key=GEMINI_API_KEY)
model = genai.GenerativeModel("gemini-2.0-flash-thinking-exp-1219")
def format_chat_history(messages: list) -> list:
"""
Gemini๊ฐ€ ์ดํ•ดํ•  ์ˆ˜ ์žˆ๋Š” (role, parts[]) ํ˜•์‹์œผ๋กœ ๋ณ€ํ™˜
"""
formatted_history = []
for message in messages:
if not (message.role == "assistant" and hasattr(message, "metadata")):
formatted_history.append({
"role": "user" if message.role == "user" else "assistant",
"parts": [message.content]
})
return formatted_history
def convert_chat_messages_to_gradio_format(messages):
"""
ChatMessage list -> [ (์œ ์ €๋ฐœํ™”, ๋ด‡์‘๋‹ต), (...), ... ]
"""
gradio_chat = []
user_text, assistant_text = None, None
for msg in messages:
if msg.role == "user":
if user_text is not None or assistant_text is not None:
gradio_chat.append((user_text or "", assistant_text or ""))
user_text = msg.content
assistant_text = None
else:
if user_text is None:
user_text = ""
if assistant_text is None:
assistant_text = msg.content
else:
assistant_text += msg.content
if user_text is not None or assistant_text is not None:
gradio_chat.append((user_text or "", assistant_text or ""))
return gradio_chat
def stream_gemini_response(user_message: str, messages: list) -> Iterator[list]:
"""
Gemini ์‘๋‹ต ์ŠคํŠธ๋ฆฌ๋ฐ (user_message๊ฐ€ ๊ณต๋ฐฑ์ด๋ฉด ๊ธฐ๋ณธ ๋ฌธ๊ตฌ๋กœ ๋Œ€์ฒด)
"""
if not user_message.strip():
user_message = "...(No content from user)..."
try:
print(f"\n=== [Gemini] New Request ===\nUser message: '{user_message}'")
chat_history = format_chat_history(messages)
chat = model.start_chat(history=chat_history)
response = chat.send_message(user_message, stream=True)
thought_buffer = ""
response_buffer = ""
thinking_complete = False
# "Thinking" ์—ญํ•  ์ถ”๊ฐ€
messages.append(
ChatMessage(
role="assistant",
content="",
metadata={"title": "โš™๏ธ Thinking: *The thoughts produced by the model are experimental"}
)
)
yield convert_chat_messages_to_gradio_format(messages)
for chunk in response:
parts = chunk.candidates[0].content.parts
current_chunk = parts[0].text
if len(parts) == 2 and not thinking_complete:
thought_buffer += current_chunk
messages[-1] = ChatMessage(
role="assistant",
content=thought_buffer,
metadata={"title": "โš™๏ธ Thinking: *The thoughts produced by the model are experimental"}
)
yield convert_chat_messages_to_gradio_format(messages)
response_buffer = parts[1].text
messages.append(ChatMessage(role="assistant", content=response_buffer))
thinking_complete = True
elif thinking_complete:
response_buffer += current_chunk
messages[-1] = ChatMessage(role="assistant", content=response_buffer)
else:
thought_buffer += current_chunk
messages[-1] = ChatMessage(
role="assistant",
content=thought_buffer,
metadata={"title": "โš™๏ธ Thinking: *The thoughts produced by the model are experimental"}
)
yield convert_chat_messages_to_gradio_format(messages)
print(f"\n=== [Gemini] Final Response ===\n{response_buffer}")
except Exception as e:
print(f"\n=== [Gemini] Error ===\n{str(e)}")
messages.append(ChatMessage(role="assistant", content=f"I encountered an error: {str(e)}"))
yield convert_chat_messages_to_gradio_format(messages)
def user_message(msg: str, history: list, doc_text: str) -> tuple[str, list]:
"""
์ž…๋ ฅ ํ”„๋กฌํ”„ํŠธ์™€ ๋ณ€ํ™˜๋œ ๋ฌธ์„œ๋ฅผ ํ™œ์šฉํ•ด ์งˆ๋ฌธ์„ ๊ตฌ์„ฑ
"""
if doc_text.strip():
user_query = f"๋‹ค์Œ ๋ฌธ์„œ๋ฅผ ์ฐธ๊ณ ํ•˜์—ฌ ๋‹ต๋ณ€:\n\n{doc_text}\n\n์งˆ๋ฌธ: {msg}"
else:
user_query = msg
history.append(ChatMessage(role="user", content=user_query))
return "", history
def reset_states(file_a, file_b):
"""
์ƒˆ ํŒŒ์ผ ์—…๋กœ๋“œ ์‹œ chat_history์™€ md_state๋ฅผ ์ดˆ๊ธฐํ™”
"""
return [], "", ""
###############################
# UI ํ†ตํ•ฉ
###############################
if __name__ == "__main__":
with gr.Blocks(title="Compare RAG CHAT", css=create_css()) as demo:
with gr.Tab("PDF Chat with LLM"):
gr.HTML("""
<div class="title-area">
<h1>VisionOCR</h1>
<p>๋‘ ๊ฐœ์˜ PDF/์ด๋ฏธ์ง€/ํ…์ŠคํŠธ/CSV ํŒŒ์ผ์„ ์—…๋กœ๋“œํ•˜์—ฌ A/B ๋น„๊ต ํ›„, ์ถ”๋ก  LLM๊ณผ ๋Œ€ํ™”ํ•ฉ๋‹ˆ๋‹ค.<br>
ํ•œ ํŒŒ์ผ๋งŒ ์—…๋กœ๋“œํ•˜๋ฉด ํ•ด๋‹น ํŒŒ์ผ๋กœ ๋ถ„์„ํ•ฉ๋‹ˆ๋‹ค.</p>
</div>
""")
# ๋ณ€ํ™˜ ๊ฒฐ๊ณผ๋ฅผ ๋ณด์—ฌ์ค„ visible Markdown ์ปดํฌ๋„ŒํŠธ
conversion_md = gr.Markdown(label="๋ณ€ํ™˜ ๊ฒฐ๊ณผ", visible=True)
md_state = gr.State("") # ๋‚ด๋ถ€ ์ƒํƒœ (๋ฌธ์„œ ๋ณ€ํ™˜ ๊ฒฐ๊ณผ)
chat_history = gr.State([]) # ChatMessage ๋ฆฌ์ŠคํŠธ
# visible Chatbot ์ปดํฌ๋„ŒํŠธ
chatbot = gr.Chatbot(visible=True)
with gr.Row():
file_a = gr.File(label="๋ฌธ์„œ A ์—…๋กœ๋“œ", file_types=[".pdf", ".png", ".jpeg", ".jpg", ".txt", ".csv"], interactive=True)
file_b = gr.File(label="๋ฌธ์„œ B ์—…๋กœ๋“œ", file_types=[".pdf", ".png", ".jpeg", ".jpg", ".txt", ".csv"], interactive=True)
convert_btn = gr.Button("๋น„๊ต์šฉ ๋ณ€ํ™˜ํ•˜๊ธฐ")
# ํŒŒ์ผ ์—…๋กœ๋“œ ์‹œ ์ƒํƒœ ์ดˆ๊ธฐํ™”
file_a.change(
fn=reset_states,
inputs=[file_a, file_b],
outputs=[chat_history, md_state, chatbot]
)
file_b.change(
fn=reset_states,
inputs=[file_a, file_b],
outputs=[chat_history, md_state, chatbot]
)
max_pages = gr.Slider(1, 20, 10, visible=False)
layout_mode = gr.Dropdown(["layoutlmv3", "doclayout_yolo"], value="doclayout_yolo", visible=False)
language = gr.Dropdown(all_lang, value='auto', visible=False)
formula_enable = gr.Checkbox(value=True, visible=False)
is_ocr = gr.Checkbox(value=False, visible=False)
table_enable = gr.Checkbox(value=True, visible=False)
convert_btn.click(
fn=to_markdown_comparison,
inputs=[file_a, file_b, max_pages, is_ocr, layout_mode, formula_enable, table_enable, language],
outputs=conversion_md,
show_progress=True
)
gr.Markdown("## ์ถ”๋ก  LLM๊ณผ ๋Œ€ํ™”")
gr.Markdown(
"### ๋น„๊ต ์˜ˆ์ œ:\n"
"- ๋‘ ํŒŒ์ผ์„ ๋น„๊ตํ•˜์—ฌ ๋‚ด์šฉ์ƒ์˜ ์ฐจ์ด์ ์„ ์ƒ์„ธํ•˜๊ฒŒ ์„ค๋ช…ํ•˜๋ผ.\n"
"- ๋‘ ํŒŒ์ผ์„ ๋น„๊ตํ•˜์—ฌ ์–ด๋А ๊ฒƒ์ด ๋” ์šฐ์ˆ˜ํ•œ ์ œ์•ˆ์ด๋‚˜ ๋‚ด์šฉ์ธ์ง€ ์„ค๋ช…ํ•˜๋ผ.\n"
"- ๋‘ ๋ฌธ์„œ ๊ฐ„์˜ ๋…ผ๋ฆฌ์  ๊ตฌ์„ฑ ๋ฐ ์ฃผ์ œ์˜ ์ฐจ์ด์ ์„ ๋ถ„์„ํ•˜๋ผ.\n"
"- ๋‘ ๋ฌธ์„œ์˜ ์Šคํƒ€์ผ๊ณผ ํ‘œํ˜„ ๋ฐฉ์‹์˜ ์ฐจ์ด๋ฅผ ๋น„๊ตํ•˜๋ผ."
)
with gr.Row():
chat_input = gr.Textbox(lines=1, placeholder="์งˆ๋ฌธ์„ ์ž…๋ ฅํ•˜์„ธ์š”...")
clear_btn = gr.Button("๋Œ€ํ™” ์ดˆ๊ธฐํ™”")
chat_input.submit(
fn=user_message,
inputs=[chat_input, chat_history, conversion_md],
outputs=[chat_input, chat_history]
).then(
fn=stream_gemini_response,
inputs=[chat_input, chat_history],
outputs=chatbot
)
def clear_all():
return [], "", ""
clear_btn.click(
fn=clear_all,
inputs=[],
outputs=[chat_history, md_state, chatbot]
)
demo.launch(server_name="0.0.0.0", server_port=7860, debug=True, ssr_mode=True)