Spaces:
Sleeping
Sleeping
File size: 4,084 Bytes
e56a122 cfee04d ecf4384 cfee04d e56a122 ecf4384 cfee04d ecf4384 cfee04d ecf4384 cfee04d ecf4384 cfee04d ecf4384 cfee04d ecf4384 cfee04d ecf4384 cfee04d ecf4384 cfee04d ecf4384 cfee04d ecf4384 cfee04d ecf4384 cfee04d ecf4384 cfee04d ecf4384 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 |
import streamlit as st
from langchain.chat_models import ChatHuggingFace
from langchain.schema import SystemMessage, HumanMessage
import torch
from fpdf import FPDF
import io
st.set_page_config(page_title="Data Science Mentor", layout="wide")
# Cache LangChain models per topic
@st.cache_resource
def load_langchain_model(topic):
device = 0 if torch.cuda.is_available() else -1
if topic == "Python":
return ChatHuggingFace(repo_id="tiiuae/falcon-7b-instruct", temperature=0.6, max_new_tokens=256, task="conversational", device=device)
elif topic == "GenAI":
return ChatHuggingFace(repo_id="google/flan-t5-large", temperature=0.6, max_new_tokens=256, task="conversational", device=device)
elif topic == "Statistics":
return ChatHuggingFace(repo_id="databricks/dolly-v2-3b", temperature=0.6, max_new_tokens=256, task="conversational", device=device)
elif topic == "SQL":
return ChatHuggingFace(repo_id="google/flan-t5-base", temperature=0.6, max_new_tokens=256, task="conversational", device=device)
else:
# Fallback for Power BI, Machine Learning, Deep Learning
return ChatHuggingFace(repo_id="tiiuae/falcon-7b-instruct", temperature=0.6, max_new_tokens=256, task="conversational", device=device)
def generate_answer(model, topic, level, question):
system_prompt = f"You are a {level} level mentor in {topic}. Answer the user's question accordingly."
messages = [
SystemMessage(content=system_prompt),
HumanMessage(content=question)
]
response = model.invoke(messages)
return response.content
def create_pdf(chat_history):
pdf = FPDF()
pdf.add_page()
pdf.set_auto_page_break(auto=True, margin=15)
pdf.set_font("Arial", size=12)
pdf.cell(0, 10, "Data Science Mentor Chat History", ln=True, align='C')
pdf.ln(10)
for i in range(0, len(chat_history), 2):
user_msg = chat_history[i][1]
mentor_msg = chat_history[i+1][1] if i+1 < len(chat_history) else ""
pdf.set_font("Arial", 'B', 12)
pdf.multi_cell(0, 10, f"You: {user_msg}")
pdf.set_font("Arial", '', 12)
pdf.multi_cell(0, 10, f"Mentor: {mentor_msg}")
pdf.ln(5)
pdf_buffer = io.BytesIO()
pdf.output(pdf_buffer)
pdf_buffer.seek(0)
return pdf_buffer
# --- Streamlit UI ---
st.title("🤖 Data Science Mentor")
with st.sidebar:
st.header("Configure Your Mentor")
topic = st.radio("Select Topic:", ["Python", "GenAI", "Statistics", "Power BI", "SQL", "Machine Learning", "Deep Learning"])
level = st.radio("Select Experience Level:", ["Beginner", "Intermediate", "Advanced"])
# Load LangChain model for selected topic
model = load_langchain_model(topic)
if "chat_history" not in st.session_state:
st.session_state.chat_history = []
st.subheader(f"Ask your {topic} question:")
user_input = st.text_area("Type your question here:", height=100)
if st.button("Get Answer"):
if user_input.strip() == "":
st.warning("Please enter a question.")
else:
with st.spinner("Mentor is thinking..."):
answer = generate_answer(model, topic, level, user_input)
st.session_state.chat_history.append(("You", user_input))
st.session_state.chat_history.append(("Mentor", answer))
# Display chat history
if st.session_state.chat_history:
for i in range(0, len(st.session_state.chat_history), 2):
user_msg = st.session_state.chat_history[i][1]
mentor_msg = st.session_state.chat_history[i+1][1] if i+1 < len(st.session_state.chat_history) else ""
st.markdown(f"**You:** {user_msg}")
st.markdown(f"**Mentor:** {mentor_msg}")
st.markdown("---")
# PDF Download Button
if st.button("Download Chat as PDF"):
pdf_bytes = create_pdf(st.session_state.chat_history)
st.download_button(
label="Download PDF",
data=pdf_bytes,
file_name="chat_history.pdf",
mime="application/pdf"
)
if st.button("Clear Chat"):
st.session_state.chat_history = []
|