File size: 3,277 Bytes
a630adb
 
 
 
 
 
 
 
 
 
 
982d6fd
 
 
322907f
1fb5170
a630adb
982d6fd
a630adb
982d6fd
a630adb
 
 
 
 
 
 
 
 
 
 
 
 
982d6fd
a630adb
 
982d6fd
a630adb
 
 
982d6fd
a630adb
 
 
 
 
982d6fd
a630adb
 
 
 
 
 
 
 
 
 
 
982d6fd
 
 
 
 
 
a630adb
982d6fd
 
a630adb
982d6fd
 
 
 
 
 
a630adb
982d6fd
a630adb
 
 
 
 
 
 
 
982d6fd
a630adb
 
 
982d6fd
a630adb
 
 
 
 
982d6fd
a630adb
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
import os
import keyfile
import warnings
import streamlit as st
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain.schema import HumanMessage, SystemMessage, AIMessage

# Ignore warnings
warnings.filterwarnings("ignore")

# Streamlit settings
st.set_page_config(page_title="🌿 ArchanaCare πŸ§™β€β™€οΈ", page_icon="πŸ§™β€β™€οΈ", layout="wide")

# Header
st.markdown("<h1 style='text-align: center; color: #4B0082;'>Welcome to ArchanaCare 🌿✨</h1>", unsafe_allow_html=True)
st.markdown("<h3 style='color: #003366;'>How can I assist with your ailments or worries today? πŸ§ͺπŸ’«</h3>", unsafe_allow_html=True)

# Initialize session state for messages
if "sessionMessages" not in st.session_state:
    st.session_state.sessionMessages = [
        SystemMessage(content="You are a medieval magical healer known for your peculiar sarcasm.")
    ]

# Set Google API key
os.environ["GOOGLE_API_KEY"] = keyfile.GOOGLEKEY

# Initialize the model
llm = ChatGoogleGenerativeAI(
    model="gemini-1.5-pro",
    temperature=0.7,
    convert_system_message_to_human=True
)

# Function to create chat bubbles
def chat_bubble(message, is_user=True):
    align = 'right' if is_user else 'left'
    color = '#E1F5FE' if is_user else '#FFEBEE'
    st.markdown(f"""
    <div style="text-align: {align}; padding: 10px;">
        <span style="display: inline-block; padding: 10px; background-color: {color}; color: black;
                     border-radius: 15px; max-width: 70%; word-wrap: break-word;">
            {message}
        </span>
    </div>
    """, unsafe_allow_html=True)

# Function to load answer from the model
def load_answer(question):
    st.session_state.sessionMessages.append(HumanMessage(content=question))
    assistant_answer = llm.invoke(st.session_state.sessionMessages)
    
    if isinstance(assistant_answer, AIMessage):
        st.session_state.sessionMessages.append(assistant_answer)
        return assistant_answer.content
    else:
        st.session_state.sessionMessages.append(AIMessage(content=assistant_answer))
        return assistant_answer

# Chat history display
for message in st.session_state.sessionMessages:
    if isinstance(message, HumanMessage):
        chat_bubble(message.content, is_user=True)
    elif isinstance(message, AIMessage):
        chat_bubble(message.content, is_user=False)

# Input area
user_input = st.text_input("You: ", key="input", placeholder="Type your question here...")

# Button for submission
if st.button("🌟 Get a Magical Answer 🌟"):
    if user_input:
        chat_bubble(user_input, is_user=True)  # Display user input
        response = load_answer(user_input)      # Get response
        chat_bubble(response, is_user=False)    # Display AI response

# Background styling
st.markdown("""
<style>
    .stApp {
        background: linear-gradient(to right, #FFEFBA, #FFFFFF);
        color: #4B0082;
        font-family: Arial, sans-serif;
    }
    input[type="text"] {
        padding: 10px;
        border: 2px solid #4B0082;
        border-radius: 15px;
        outline: none;
        width: 100%;
    }
    button {
        background-color: #4B0082;
        color: white;
        border-radius: 15px;
        margin-top: 10px;
    }
</style>
""", unsafe_allow_html=True)