import gradio as gr import os import requests import tempfile HF_API_TOKEN = os.getenv("HF_TOKEN") MODEL_ID = "rohitnagareddy/Qwen3-0.6B-Coding-Finetuned-v1" API_URL = f"https://api-inference.huggingface.co/models/{MODEL_ID}" HEADERS = {"Authorization": f"Bearer {HF_API_TOKEN}"} def query_hf_api(prompt): response = requests.post(API_URL, headers=HEADERS, json={"inputs": prompt}) if response.status_code == 200: return response.json()[0]["generated_text"] return f"[Error] {response.status_code}: {response.text}" def chat_fn(prompt, chat_history): response = query_hf_api(prompt) chat_history.append({"role": "user", "content": prompt}) chat_history.append({"role": "assistant", "content": response}) return chat_history, chat_history def save_chat(chat_history): with tempfile.NamedTemporaryFile(delete=False, suffix=".txt", mode="w", encoding="utf-8") as f: for entry in chat_history: f.write(f"{entry['role'].capitalize()}: {entry['content']}\n\n") return f.name # return path to file with gr.Blocks(theme=gr.themes.Monochrome()) as demo: gr.Markdown("# ๐Ÿค– Qwen3 Coding Chatbot (Gradio + HF API)") with gr.Row(): clear = gr.Button("๐Ÿงน Clear Chat") download_btn = gr.Button("โฌ‡๏ธ Download Chat") chat = gr.Chatbot(label="Qwen Chat", type="messages") msg = gr.Textbox(label="Your message", placeholder="Ask me something...") submit = gr.Button("๐Ÿš€ Send") history = gr.State([]) download_file = gr.File(label="Download", visible=False) submit.click(chat_fn, [msg, history], [chat, history]) msg.submit(chat_fn, [msg, history], [chat, history]) clear.click(lambda: ([], []), None, [chat, history]) download_btn.click(save_chat, [history], download_file) demo.launch(auth=[("admin", "securepass")])