|
import gradio as gr |
|
from huggingface_hub import InferenceClient, HfApi |
|
import os |
|
import requests |
|
from typing import List, Dict, Union, Tuple |
|
import traceback |
|
from PIL import Image |
|
from io import BytesIO |
|
import asyncio |
|
from gradio_client import Client |
|
import time |
|
import threading |
|
import json |
|
import re |
|
|
|
HF_TOKEN = os.getenv("HF_TOKEN") |
|
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=HF_TOKEN) |
|
hf_api = HfApi(token=HF_TOKEN) |
|
|
|
def get_headers(): |
|
if not HF_TOKEN: |
|
raise ValueError("Hugging Face token not found in environment variables") |
|
return {"Authorization": f"Bearer {HF_TOKEN}"} |
|
|
|
def get_file_content(space_id: str, file_path: str) -> str: |
|
file_url = f"https://huggingface.co/spaces/{space_id}/raw/main/{file_path}" |
|
try: |
|
response = requests.get(file_url, headers=get_headers()) |
|
if response.status_code == 200: |
|
return response.text |
|
else: |
|
return f"File not found or inaccessible: {file_path}" |
|
except requests.RequestException: |
|
return f"Error fetching content for file: {file_path}" |
|
|
|
def get_space_structure(space_id: str) -> Dict: |
|
try: |
|
files = hf_api.list_repo_files(repo_id=space_id, repo_type="space") |
|
|
|
tree = {"type": "directory", "path": "", "name": space_id, "children": []} |
|
for file in files: |
|
path_parts = file.split('/') |
|
current = tree |
|
for i, part in enumerate(path_parts): |
|
if i == len(path_parts) - 1: |
|
current["children"].append({"type": "file", "path": file, "name": part}) |
|
else: |
|
found = False |
|
for child in current["children"]: |
|
if child["type"] == "directory" and child["name"] == part: |
|
current = child |
|
found = True |
|
break |
|
if not found: |
|
new_dir = {"type": "directory", "path": '/'.join(path_parts[:i+1]), "name": part, "children": []} |
|
current["children"].append(new_dir) |
|
current = new_dir |
|
|
|
return tree |
|
except Exception as e: |
|
print(f"Error in get_space_structure: {str(e)}") |
|
return {"error": f"API request error: {str(e)}"} |
|
|
|
def format_tree_structure(tree_data: Dict, indent: str = "") -> str: |
|
if "error" in tree_data: |
|
return tree_data["error"] |
|
|
|
formatted = f"{indent}{'๐' if tree_data.get('type') == 'directory' else '๐'} {tree_data.get('name', 'Unknown')}\n" |
|
if tree_data.get("type") == "directory": |
|
for child in sorted(tree_data.get("children", []), key=lambda x: (x.get("type", "") != "directory", x.get("name", ""))): |
|
formatted += format_tree_structure(child, indent + " ") |
|
return formatted |
|
|
|
def summarize_code(app_content: str): |
|
system_message = "๋น์ ์ Python ์ฝ๋๋ฅผ ๋ถ์ํ๊ณ ์์ฝํ๋ AI ์กฐ์์
๋๋ค. ์ฃผ์ด์ง ์ฝ๋๋ฅผ 3์ค ์ด๋ด๋ก ๊ฐ๊ฒฐํ๊ฒ ์์ฝํด์ฃผ์ธ์." |
|
user_message = f"๋ค์ Python ์ฝ๋๋ฅผ 3์ค ์ด๋ด๋ก ์์ฝํด์ฃผ์ธ์:\n\n{app_content}" |
|
|
|
messages = [ |
|
{"role": "system", "content": system_message}, |
|
{"role": "user", "content": user_message} |
|
] |
|
|
|
try: |
|
response = hf_client.chat_completion(messages, max_tokens=200, temperature=0.7) |
|
return response.choices[0].message.content |
|
except Exception as e: |
|
return f"์์ฝ ์์ฑ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}" |
|
|
|
def analyze_code(app_content: str): |
|
system_message = """๋น์ ์ Python ์ฝ๋๋ฅผ ๋ถ์ํ๋ AI ์กฐ์์
๋๋ค. ์ฃผ์ด์ง ์ฝ๋๋ฅผ ๋ถ์ํ์ฌ ๋ค์ ํญ๋ชฉ์ ๋ํด ์ค๋ช
ํด์ฃผ์ธ์: |
|
A. ๋ฐฐ๊ฒฝ ๋ฐ ํ์์ฑ |
|
B. ๊ธฐ๋ฅ์ ํจ์ฉ์ฑ ๋ฐ ๊ฐ์น |
|
C. ํน์ฅ์ |
|
D. ์ ์ฉ ๋์ ๋ฐ ํ๊ฒ |
|
E. ๊ธฐ๋ํจ๊ณผ |
|
๊ธฐ์กด ๋ฐ ์ ์ฌ ํ๋ก์ ํธ์ ๋น๊ตํ์ฌ ๋ถ์ํด์ฃผ์ธ์. Markdown ํ์์ผ๋ก ์ถ๋ ฅํ์ธ์.""" |
|
user_message = f"๋ค์ Python ์ฝ๋๋ฅผ ๋ถ์ํด์ฃผ์ธ์:\n\n{app_content}" |
|
|
|
messages = [ |
|
{"role": "system", "content": system_message}, |
|
{"role": "user", "content": user_message} |
|
] |
|
|
|
try: |
|
response = hf_client.chat_completion(messages, max_tokens=1000, temperature=0.7) |
|
return response.choices[0].message.content |
|
except Exception as e: |
|
return f"๋ถ์ ์์ฑ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}" |
|
|
|
def explain_usage(app_content: str): |
|
system_message = "๋น์ ์ Python ์ฝ๋๋ฅผ ๋ถ์ํ์ฌ ์ฌ์ฉ๋ฒ์ ์ค๋ช
ํ๋ AI ์กฐ์์
๋๋ค. ์ฃผ์ด์ง ์ฝ๋๋ฅผ ๋ฐํ์ผ๋ก ๋ง์น ํ๋ฉด์ ๋ณด๋ ๊ฒ์ฒ๋ผ ์ฌ์ฉ๋ฒ์ ์์ธํ ์ค๋ช
ํด์ฃผ์ธ์. Markdown ํ์์ผ๋ก ์ถ๋ ฅํ์ธ์." |
|
user_message = f"๋ค์ Python ์ฝ๋์ ์ฌ์ฉ๋ฒ์ ์ค๋ช
ํด์ฃผ์ธ์:\n\n{app_content}" |
|
|
|
messages = [ |
|
{"role": "system", "content": system_message}, |
|
{"role": "user", "content": user_message} |
|
] |
|
|
|
try: |
|
response = hf_client.chat_completion(messages, max_tokens=800, temperature=0.7) |
|
return response.choices[0].message.content |
|
except Exception as e: |
|
return f"์ฌ์ฉ๋ฒ ์ค๋ช
์์ฑ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}" |
|
|
|
|
|
|
|
def analyze_space(url: str, progress=gr.Progress()): |
|
try: |
|
space_id = url.split('spaces/')[-1] |
|
|
|
|
|
if not re.match(r'^[\w.-]+/[\w.-]+$', space_id): |
|
raise ValueError(f"Invalid Space ID format: {space_id}") |
|
|
|
progress(0.1, desc="ํ์ผ ๊ตฌ์กฐ ๋ถ์ ์ค...") |
|
tree_structure = get_space_structure(space_id) |
|
if "error" in tree_structure: |
|
raise ValueError(tree_structure["error"]) |
|
tree_view = format_tree_structure(tree_structure) |
|
|
|
progress(0.3, desc="app.py ๋ด์ฉ ๊ฐ์ ธ์ค๋ ์ค...") |
|
app_content = get_file_content(space_id, "app.py") |
|
|
|
progress(0.5, desc="์ฝ๋ ์์ฝ ์ค...") |
|
summary = summarize_code(app_content) |
|
|
|
progress(0.7, desc="์ฝ๋ ๋ถ์ ์ค...") |
|
analysis = analyze_code(app_content) |
|
|
|
progress(0.9, desc="์ฌ์ฉ๋ฒ ์ค๋ช
์์ฑ ์ค...") |
|
usage = explain_usage(app_content) |
|
|
|
progress(1.0, desc="์๋ฃ") |
|
return app_content, tree_view, tree_structure, space_id, summary, analysis, usage |
|
except Exception as e: |
|
print(f"Error in analyze_space: {str(e)}") |
|
print(traceback.format_exc()) |
|
return f"์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}", "", None, "", "", "", "" |
|
|
|
|
|
|
|
|
|
def respond(message: str, chat_history: List[Dict[str, str]], system_message: str, max_tokens: int, temperature: float, top_p: float): |
|
messages = [{"role": "system", "content": system_message}] |
|
for chat in chat_history: |
|
messages.append({"role": "user", "content": chat["role"] == "user"}) |
|
messages.append({"role": "assistant", "content": chat["role"] == "assistant"}) |
|
messages.append({"role": "user", "content": message}) |
|
|
|
try: |
|
response = hf_client.chat_completion(messages, max_tokens=max_tokens, temperature=temperature, top_p=top_p) |
|
return response.choices[0].message.content |
|
except Exception as e: |
|
return f"์๋ต ์์ฑ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}" |
|
|
|
def create_ui(): |
|
try: |
|
css = """ |
|
footer {visibility: hidden;} |
|
.output-group { |
|
border: 1px solid #ddd; |
|
border-radius: 5px; |
|
padding: 10px; |
|
margin-bottom: 20px; |
|
} |
|
.scroll-lock { |
|
overflow-y: auto !important; |
|
max-height: calc((100vh - 200px) / 5) !important; |
|
} |
|
.full-height { |
|
height: calc(100vh - 200px) !important; |
|
overflow-y: auto !important; |
|
} |
|
.tab-nav button { |
|
color: #7FFFD4 !important; /* ํ๊ด ๋ฏผํธ ์์ */ |
|
} |
|
.tab-nav button.selected { |
|
color: #FFFF00 !important; /* ๋ฐ์ ํ๊ด ์๋ก์ฐ */ |
|
border-color: #FFFF00 !important; |
|
} |
|
.file-button { |
|
background-color: #f0f0f0; |
|
border: 1px solid #ddd; |
|
padding: 5px 10px; |
|
margin: 2px 0; |
|
cursor: pointer; |
|
text-align: left; |
|
width: 100%; |
|
} |
|
.file-button:hover { |
|
background-color: #e0e0e0; |
|
} |
|
""" |
|
|
|
with gr.Blocks(css=css, theme="Nymbo/Nymbo_Theme") as demo: |
|
gr.Markdown("# HuggingFace Space Analyzer") |
|
|
|
with gr.Tabs(elem_classes="main-tabs") as tabs: |
|
with gr.TabItem("๋ถ์"): |
|
with gr.Row(): |
|
with gr.Column(scale=6): |
|
url_input = gr.Textbox(label="HuggingFace Space URL") |
|
analyze_button = gr.Button("๋ถ์") |
|
|
|
with gr.Group(elem_classes="output-group scroll-lock"): |
|
summary_output = gr.Markdown(label="์์ฝ (3์ค ์ด๋ด)") |
|
|
|
with gr.Group(elem_classes="output-group scroll-lock"): |
|
analysis_output = gr.Markdown(label="๋ถ์") |
|
|
|
with gr.Group(elem_classes="output-group scroll-lock"): |
|
usage_output = gr.Markdown(label="์ฌ์ฉ๋ฒ") |
|
|
|
with gr.Group(elem_classes="output-group scroll-lock"): |
|
tree_view_output = gr.Textbox(label="ํ์ผ ๊ตฌ์กฐ (Tree View)", lines=20) |
|
|
|
with gr.Group(elem_classes="output-group scroll-lock"): |
|
file_buttons = gr.HTML(label="ํ์ผ ๋ฆฌ์คํธ") |
|
|
|
with gr.Column(scale=4): |
|
with gr.Group(elem_classes="output-group full-height"): |
|
code_tabs = gr.Tabs(elem_classes="code-tabs") |
|
with code_tabs: |
|
app_py_tab = gr.TabItem("app.py") |
|
with app_py_tab: |
|
app_py_content = gr.Code(language="python", label="app.py", lines=30) |
|
requirements_tab = gr.TabItem("requirements.txt") |
|
with requirements_tab: |
|
requirements_content = gr.Textbox(label="requirements.txt", lines=30) |
|
|
|
with gr.TabItem("AI ์ฝ๋ฉ"): |
|
chatbot = gr.Chatbot(label="๋ํ", type="messages") |
|
msg = gr.Textbox(label="๋ฉ์์ง") |
|
with gr.Row(): |
|
system_message = gr.Textbox(label="System Message", value="") |
|
max_tokens = gr.Slider(minimum=1, maximum=8000, value=4000, label="Max Tokens") |
|
temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature") |
|
top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P") |
|
|
|
examples = [ |
|
["์์ธํ ์ฌ์ฉ ๋ฐฉ๋ฒ์ ๋ง์น ํ๋ฉด์ ๋ณด๋ฉด์ ์ค๋ช
ํ๋ฏ์ด 4000 ํ ํฐ ์ด์ ์์ธํ ์ค๋ช
ํ๋ผ"], |
|
["FAQ 20๊ฑด์ ์์ธํ๊ฒ ์์ฑํ๋ผ. 4000ํ ํฐ ์ด์ ์ฌ์ฉํ๋ผ."], |
|
["์ฌ์ฉ ๋ฐฉ๋ฒ๊ณผ ์ฐจ๋ณ์ , ํน์ง, ๊ฐ์ ์ ์ค์ฌ์ผ๋ก 4000 ํ ํฐ ์ด์ ์ ํ๋ธ ์์ ์คํฌ๋ฆฝํธ ํํ๋ก ์์ฑํ๋ผ"], |
|
["๋ณธ ์๋น์ค๋ฅผ SEO ์ต์ ํํ์ฌ ๋ธ๋ก๊ทธ ํฌ์คํธ(๋ฐฐ๊ฒฝ ๋ฐ ํ์์ฑ, ๊ธฐ์กด ์ ์ฌ ์๋น์ค์ ๋น๊ตํ์ฌ ํน์ฅ์ , ํ์ฉ์ฒ, ๊ฐ์น, ๊ธฐ๋ํจ๊ณผ, ๊ฒฐ๋ก ์ ํฌํจ)๋ก 4000 ํ ํฐ ์ด์ ์์ฑํ๋ผ"], |
|
["ํนํ ์ถ์์ ํ์ฉํ ๊ธฐ์ ๋ฐ ๋น์ฆ๋์ค๋ชจ๋ธ ์ธก๋ฉด์ ํฌํจํ์ฌ ํนํ ์ถ์์ ๊ตฌ์ฑ์ ๋ง๊ฒ ํ์ ์ ์ธ ์ฐฝ์ ๋ฐ๋ช
๋ด์ฉ์ ์ค์ฌ์ผ๋ก 4000ํ ํฐ ์ด์ ์์ฑํ๋ผ."], |
|
["๊ณ์ ์ด์ด์ ๋ต๋ณํ๋ผ"], |
|
] |
|
|
|
gr.Examples(examples, inputs=msg) |
|
|
|
def respond_wrapper(message, chat_history, system_message, max_tokens, temperature, top_p): |
|
bot_message = respond(message, chat_history, system_message, max_tokens, temperature, top_p) |
|
chat_history.append({"role": "user", "content": message}) |
|
chat_history.append({"role": "assistant", "content": bot_message}) |
|
return "", chat_history |
|
|
|
msg.submit(respond_wrapper, [msg, chatbot, system_message, max_tokens, temperature, top_p], [msg, chatbot]) |
|
|
|
space_id_state = gr.State() |
|
tree_structure_state = gr.State() |
|
|
|
def update_file_buttons(tree_structure, space_id): |
|
if tree_structure is None: |
|
return "" |
|
|
|
def get_files(node): |
|
files = [] |
|
if node["type"] == "file": |
|
files.append(node) |
|
elif node["type"] == "directory": |
|
for child in node.get("children", []): |
|
files.extend(get_files(child)) |
|
return files |
|
|
|
files = get_files(tree_structure) |
|
buttons_html = "<div style='display: flex; flex-direction: column;'>" |
|
for file in files: |
|
buttons_html += f"<button class='file-button' onclick='openFile(\"{file['path']}\", \"{space_id}\")'>{file['path']}</button>" |
|
buttons_html += "</div>" |
|
return buttons_html |
|
|
|
def open_file(file_path: str, space_id: str): |
|
content = get_file_content(space_id, file_path) |
|
file_name = file_path.split('/')[-1] |
|
if file_name == "requirements.txt": |
|
return gr.Tabs.update(selected="requirements.txt"), gr.Textbox.update(value=content, label=file_name) |
|
else: |
|
return gr.Tabs.update(selected=file_name), gr.Code.update(value=content, language="python" if file_name.endswith('.py') else "plaintext", label=file_name) |
|
|
|
analyze_button.click( |
|
analyze_space, |
|
inputs=[url_input], |
|
outputs=[app_py_content, tree_view_output, tree_structure_state, space_id_state, summary_output, analysis_output, usage_output] |
|
).then( |
|
update_file_buttons, |
|
inputs=[tree_structure_state, space_id_state], |
|
outputs=[file_buttons] |
|
).then( |
|
lambda space_id: get_file_content(space_id, "requirements.txt"), |
|
inputs=[space_id_state], |
|
outputs=[requirements_content] |
|
) |
|
|
|
file_path_input = gr.Textbox(visible=False) |
|
space_id_input = gr.Textbox(visible=False) |
|
|
|
file_path_input.change( |
|
open_file, |
|
inputs=[file_path_input, space_id_input], |
|
outputs=[code_tabs, code_tabs] |
|
) |
|
|
|
|
|
gr.HTML(""" |
|
<script> |
|
function openFile(path, spaceId) { |
|
const filePathInput = document.querySelector('input[data-testid="file_path_input"]'); |
|
const spaceIdInput = document.querySelector('input[data-testid="space_id_input"]'); |
|
if (filePathInput && spaceIdInput) { |
|
filePathInput.value = path; |
|
spaceIdInput.value = spaceId; |
|
filePathInput.dispatchEvent(new Event('change')); |
|
} |
|
} |
|
</script> |
|
""") |
|
|
|
return demo |
|
|
|
except Exception as e: |
|
print(f"Error in create_ui: {str(e)}") |
|
print(traceback.format_exc()) |
|
raise |