Spaces:
Running
Running
Delete chatbot.py
Browse files- chatbot.py +0 -108
chatbot.py
DELETED
@@ -1,108 +0,0 @@
|
|
1 |
-
# chatbot.py
|
2 |
-
import streamlit as st
|
3 |
-
from transformers import pipeline, BlenderbotTokenizer, BlenderbotForConditionalGeneration
|
4 |
-
import torch
|
5 |
-
from typing import List, Dict
|
6 |
-
|
7 |
-
class ChatbotManager:
|
8 |
-
def __init__(self):
|
9 |
-
self.device = "cuda" if torch.cuda.is_available() else "cpu"
|
10 |
-
self.model = None
|
11 |
-
self.tokenizer = None
|
12 |
-
self.load_model()
|
13 |
-
self.initialize_chat()
|
14 |
-
|
15 |
-
def load_model(self):
|
16 |
-
"""Load Blenderbot model locally"""
|
17 |
-
try:
|
18 |
-
with st.spinner("Loading AI model (this may take a minute)..."):
|
19 |
-
model_name = "facebook/blenderbot-400M-distill"
|
20 |
-
self.tokenizer = BlenderbotTokenizer.from_pretrained(model_name)
|
21 |
-
self.model = BlenderbotForConditionalGeneration.from_pretrained(model_name).to(self.device)
|
22 |
-
st.success("Model loaded successfully!")
|
23 |
-
except Exception as e:
|
24 |
-
st.error(f"⚠️ Failed to load model: {str(e)}")
|
25 |
-
self.model = None
|
26 |
-
|
27 |
-
def initialize_chat(self):
|
28 |
-
"""Initialize chat session state"""
|
29 |
-
if "chat_history" not in st.session_state:
|
30 |
-
st.session_state.chat_history = []
|
31 |
-
|
32 |
-
def clear_chat(self):
|
33 |
-
"""Reset chat history"""
|
34 |
-
st.session_state.chat_history = []
|
35 |
-
st.success("Chat history cleared!")
|
36 |
-
|
37 |
-
def add_message(self, role: str, content: str):
|
38 |
-
"""Add a message to chat history"""
|
39 |
-
st.session_state.chat_history.append({"role": role, "content": content})
|
40 |
-
|
41 |
-
def get_chat_history(self) -> List[Dict]:
|
42 |
-
"""Retrieve chat history"""
|
43 |
-
return st.session_state.chat_history
|
44 |
-
|
45 |
-
def generate_response(self, prompt: str) -> str:
|
46 |
-
"""Generate AI response using Blenderbot"""
|
47 |
-
if not self.model:
|
48 |
-
return "Model not loaded. Please try again later."
|
49 |
-
|
50 |
-
try:
|
51 |
-
# Format prompt with business context
|
52 |
-
business_prompt = f"""You are a professional business advisor. Provide helpful, concise advice on:
|
53 |
-
- Business strategy
|
54 |
-
- Marketing
|
55 |
-
- Product development
|
56 |
-
- Startup growth
|
57 |
-
|
58 |
-
User Question: {prompt}
|
59 |
-
|
60 |
-
Answer:"""
|
61 |
-
|
62 |
-
inputs = self.tokenizer([business_prompt], return_tensors="pt").to(self.device)
|
63 |
-
reply_ids = self.model.generate(**inputs, max_length=200)
|
64 |
-
response = self.tokenizer.decode(reply_ids[0], skip_special_tokens=True)
|
65 |
-
|
66 |
-
return response
|
67 |
-
except Exception as e:
|
68 |
-
return f"⚠️ Error generating response: {str(e)}"
|
69 |
-
|
70 |
-
def render_chat_interface(self):
|
71 |
-
"""Render the complete chat UI"""
|
72 |
-
st.header("💬 AI Business Mentor (Blenderbot)")
|
73 |
-
|
74 |
-
# Sidebar controls
|
75 |
-
with st.sidebar:
|
76 |
-
st.subheader("Settings")
|
77 |
-
if st.button("Clear Chat"):
|
78 |
-
self.clear_chat()
|
79 |
-
st.rerun()
|
80 |
-
|
81 |
-
st.markdown("---")
|
82 |
-
st.caption("Model: facebook/blenderbot-400M-distill")
|
83 |
-
st.caption(f"Device: {self.device.upper()}")
|
84 |
-
|
85 |
-
# Display chat history
|
86 |
-
for message in self.get_chat_history():
|
87 |
-
with st.chat_message(message["role"]):
|
88 |
-
st.markdown(message["content"])
|
89 |
-
|
90 |
-
# User input
|
91 |
-
if prompt := st.chat_input("Ask about business..."):
|
92 |
-
self.add_message("user", prompt)
|
93 |
-
|
94 |
-
# Display user message immediately
|
95 |
-
with st.chat_message("user"):
|
96 |
-
st.markdown(prompt)
|
97 |
-
|
98 |
-
# Generate and display AI response
|
99 |
-
with st.chat_message("assistant"):
|
100 |
-
with st.spinner("Thinking..."):
|
101 |
-
response = self.generate_response(prompt)
|
102 |
-
st.markdown(response)
|
103 |
-
|
104 |
-
# Add response to history
|
105 |
-
self.add_message("assistant", response)
|
106 |
-
|
107 |
-
# Auto-refresh to show new messages
|
108 |
-
st.rerun()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|