File size: 3,842 Bytes
6007271
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
import streamlit as st
import os
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
from langchain_core.messages import HumanMessage, SystemMessage

# Load Hugging Face token from environment
hf = os.getenv('Data_science')
os.environ['HUGGINGFACEHUB_API_TOKEN'] = hf
os.environ['HF_TOKEN'] = hf

# --- Page Configuration ---
st.set_page_config(page_title="πŸ€– GenAI Mentor Chat", page_icon="🧬", layout="centered")

# --- Custom CSS Styling ---
st.markdown("""
    <style>
    .main {
        background: linear-gradient(135deg, #3e32a8 0%, #80ffe0 100%);
        padding: 2rem;
        font-family: 'Segoe UI', sans-serif;
    }
    h1, h2, h3, h4, h5, h6, p, label, .css-10trblm, .css-q8sbsg {
        color: #ffffff !important;
        text-align: center;
    }
    .stTextInput > div > div > input {
        background-color: rgba(255, 255, 255, 0.1);
        color: white;
        border: 1px solid rgba(255, 255, 255, 0.5);
        border-radius: 8px;
        padding: 0.6em;
    }
    .stTextInput > div > div > input::placeholder {
        color: rgba(255, 255, 255, 0.6);
    }
    .stButton>button {
        background: rgba(255, 255, 255, 0.15);
        border: 2px solid rgba(255, 255, 255, 0.4);
        color: white;
        font-size: 18px;
        font-weight: bold;
        padding: 0.8em 1.2em;
        border-radius: 12px;
        width: 100%;
        transition: all 0.3s ease;
        box-shadow: 0 4px 12px rgba(0, 0, 0, 0.25);
    }
    .stButton>button:hover {
        background: rgba(255, 255, 255, 0.3);
        border-color: white;
        color: white;
    }
    .stSidebar > div:first-child {
        background: #2c3e50;
        padding: 1rem;
        border-radius: 0 15px 15px 0;
    }
    .stSidebar h1, .stSidebar h2, .stSidebar h3, .stSidebar label, .stSidebar p {
        color: white !important;
    }
    hr {
        border: 1px solid rgba(255, 255, 255, 0.3);
        margin: 2em 0;
    }
    </style>
""", unsafe_allow_html=True)

# --- Title ---
st.title("πŸ€– GenAI Mentor Chat")
st.markdown("### πŸ’¬ Ask your questions about Generative AI, LLMs, or foundational models!")

# --- Sidebar: Experience Selection ---
st.sidebar.title("πŸ§‘β€πŸ« Mentor Preferences")
experience_label = st.sidebar.selectbox("πŸŽ“ Choose your experience level:", ["Beginner", "Intermediate", "Expert"])

# --- Initialize LLM ---
genai_skeleton = HuggingFaceEndpoint(
    repo_id='google/gemma-2-9b-it',
    provider='nebius',
    temperature=0.7,
    max_new_tokens=50,
    task='conversational'
)

genai_chat = ChatHuggingFace(
    llm=genai_skeleton,
    repo_id='google/gemma-2-9b-it',
    provider='nebius',
    temperature=0.7,
    max_new_tokens=50,
    task='conversational'
)

PAGE_KEY = "genai_chat_history"

# --- Session Initialization ---
if PAGE_KEY not in st.session_state:
    st.session_state[PAGE_KEY] = []

# --- Chat Input Form ---
with st.form(key="chat_form"):
    user_input = st.text_input("🧠 Ask your GenAI question:", placeholder="e.g. How does a diffusion model generate images?")
    submit = st.form_submit_button("πŸš€ Send")

# --- Chat Handling Logic ---
if submit and user_input:
    system_prompt = (
        f"Act as a Generative AI mentor with {experience_label.lower()} expertise. "
        f"Answer only GenAI-related questions. Be friendly and concise (under 150 words). "
        f"Kindly state if the topic is out of GenAI scope."
    )
    messages = [SystemMessage(content=system_prompt), HumanMessage(content=user_input)]
    result = genai_chat.invoke(messages)
    st.session_state[PAGE_KEY].append((user_input, result.content))

# --- Display Chat History ---
st.subheader("πŸ“œ Chat History")
for user, bot in st.session_state[PAGE_KEY]:
    st.markdown(f"**πŸ§‘ You:** {user}")
    st.markdown(f"**πŸ€– Mentor:** {bot}")
    st.markdown("---")