File size: 5,173 Bytes
a436ce8
06f1ce3
a436ce8
 
693bbee
702417e
 
f5a68cd
702417e
 
a436ce8
 
 
693bbee
a436ce8
 
e27bbda
a436ce8
 
693bbee
a436ce8
 
 
 
 
 
 
693bbee
a436ce8
 
 
 
 
 
 
 
693bbee
a436ce8
 
 
 
 
 
 
 
693bbee
a436ce8
3ec7462
6d9e70f
 
 
 
dce2016
 
 
 
 
6d9e70f
 
 
 
 
 
 
 
856955e
 
 
 
 
 
 
6d9e70f
 
 
 
 
 
 
 
 
 
dce2016
6d9e70f
 
a436ce8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
693bbee
 
 
bc80f7e
a436ce8
 
1255b82
 
bc80f7e
 
 
 
a436ce8
 
 
 
 
 
 
 
 
 
 
 
 
bc80f7e
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
import streamlit as st
import os
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
from langchain_core.messages import HumanMessage, AIMessage, SystemMessage



hf = os.getenv('hf')
os.environ['HUGGINGFACEHUB_API_TOKEN'] = hf
os.environ['HF_TOKEN'] = hf
# --- Config ---
st.set_page_config(page_title="AI Mentor Chat", layout="centered")
st.title("🤖 AI Mentor Chat")

# --- Sidebar for selections ---
st.sidebar.title("Mentor Preferences")

exp1 = ['<1', '1', '2', '3', '4', '5', '5+']
exp = st.sidebar.selectbox("Select experience:", exp1)

# Map experience to label
experience_map = {
    '<1': 'New bie mentor',
    '1': '1', '2': '2', '3': '3', '4': '4', '5': '5',
    '5+': 'Professional'
}
experience_label = experience_map[exp]

# --- Initialize Chat Model ---
deep_seek_skeleton = HuggingFaceEndpoint(
    repo_id='meta-llama/Llama-3.2-3B-Instruct',
    provider='sambanova',
    temperature=0.7,
    max_new_tokens=150,
    task='conversational'
)

deep_seek = ChatHuggingFace(
    llm=deep_seek_skeleton,
    repo_id='meta-llama/Llama-3.2-3B-Instruct',
    provider='sambanova',
    temperature=0.7,
    max_new_tokens=150,
    task='conversational'
)

# --- Session State ---
PAGE_KEY = "python_chat_history" 
try:
    # --- Session State ---
    if PAGE_KEY not in st.session_state:
        st.session_state[PAGE_KEY] = []
    st.subheader("🗨️ Chat History")
    for user, bot in st.session_state[PAGE_KEY]:
        st.markdown(f"**You:** {user}")
        st.markdown(f"**Mentor:** {bot}")
        st.markdown("---")
    # --- Chat Form ---
    with st.form(key="chat_form"):
        user_input = st.text_input("Ask your question:")
        submit = st.form_submit_button("Send")
    
    # --- Chat Logic ---
    if submit and user_input:
        # Add system context
        system_prompt = f"""Act as a Python mentor with {experience_label} years of experience. Teach in a friendly, approachable manner while following these strict rules:
        1. Only answer questions related to Python programming (including libraries, frameworks, and tools in the Python ecosystem)
        2. For any non-Python query, respond with exactly: "I specialize only in Python programming. This appears to be a non-Python topic."
        3. Never suggest you can help with non-Python topics
        4. Keep explanations clear, practical, and beginner-friendly when appropriate
        5. Include practical examples when explaining concepts
        6. For advanced topics, assume the student has basic Python knowledge"""    
        # Create message list
        messages = [SystemMessage(content=system_prompt), HumanMessage(content=user_input)]
    
        # Get model response
        result = deep_seek.invoke(messages)
    
        # Append to history
        st.session_state[PAGE_KEY].append((user_input, result.content))
    
    # --- Display Chat History ---
    
except:
    st.warning('The token limit has reached please revisit in 24 hours!')
































# import streamlit as st
# import os
# import langchain
# import langchain_huggingface
# from langchain_huggingface import HuggingFaceEndpoint,HuggingFacePipeline,ChatHuggingFace
# from langchain_core.messages import HumanMessage,AIMessage,SystemMessage

# deep_seek_skeleton = HuggingFaceEndpoint(repo_id='meta-llama/Llama-3.2-3B-Instruct',
#                     provider = 'sambanova',
#                     temperature=0.7,
#                     max_new_tokens=150,
#                     task = 'conversational')
# deep_seek = ChatHuggingFace(llm=deep_seek_skeleton,
#                     repo_id='meta-llama/Llama-3.2-3B-Instruct',
#                     provider = 'sambanova',
#                     temperature=0.7,
#                     max_new_tokens=150,
#                     task = 'conversational')

# exp1 = ['<1', '1', '2', '3', '4', '5', '5+']
# exp = st.selectbox("Select experience:", exp1)
# if exp == '<1':
#     experince = 'New bie mentor'
# elif exp == '1':
#     experince = '1'
# elif exp == '2':
#     experince = '2'
# elif exp == '3':
#     experince = '3'
# elif exp == '4':
#     experince = '4'
# elif exp == '5':
#     experince = '5'
# elif exp == '5+':
#     experince = 'professional'




# selec = ['Python', 'Machine Learning', 'Deep Learning', 'Statistics', 'SQL', 'Excel']
# sub = st.selectbox("Select experience:", selec)





    
# user_input = st.text_input("Enter your query:")
# l = []
# st.write(l)
# message = [SystemMessage(content=f'Act as {sub} mentor who has {experince} years of experience and the one who teaches in very friendly manner and also he explains everything within 150 words'),
#                    HumanMessage(content=user_input)]
# while user_input!='end':
#     l.append(user_input)
#     l.append(result.content)
#     st.write(l)
#     user_input = st.text_input("Enter your query:")
#     message = [SystemMessage(content=f'Act as {sub} mentor who has {experince} years of experience and the one who teaches in very friendly manner and also he explains everything within 150 words'),
#                    HumanMessage(content=user_input)]
#     result = deep_seek.invoke(message)