Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -9,107 +9,110 @@ from langchain.memory import ConversationBufferMemory
|
|
9 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
10 |
from langchain_community.document_loaders import PyMuPDFLoader
|
11 |
|
12 |
-
# Initialize session state FIRST (before any other Streamlit
|
13 |
-
|
14 |
-
st.session_state
|
|
|
|
|
|
|
|
|
15 |
|
16 |
-
#
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
# Display welcome message if empty
|
21 |
-
if not st.session_state.messages:
|
22 |
-
st.session_state.messages.append({
|
23 |
-
"role": "assistant",
|
24 |
-
"content": "Hi! Welcome to Café Eleven. What would you like to order today?"
|
25 |
-
})
|
26 |
-
|
27 |
-
# Display chat messages
|
28 |
-
for message in st.session_state.messages:
|
29 |
-
with st.chat_message(message["role"]):
|
30 |
-
st.write(message["content"])
|
31 |
-
|
32 |
-
# Chat processing functions
|
33 |
-
@st.cache_resource
|
34 |
-
def setup_chain():
|
35 |
-
# Load all PDFs
|
36 |
-
pdf_files = list(Path(".").glob("*.pdf"))
|
37 |
-
if not pdf_files:
|
38 |
-
st.error("No PDF menus found! Please upload some.")
|
39 |
-
st.stop()
|
40 |
|
41 |
-
#
|
42 |
-
|
43 |
-
|
44 |
-
loader = PyMuPDFLoader(str(pdf))
|
45 |
-
docs.extend(loader.load())
|
46 |
|
47 |
-
#
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
)
|
52 |
-
splits = text_splitter.split_documents(docs)
|
53 |
|
54 |
-
#
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
|
60 |
-
#
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
# Chat input handling
|
82 |
-
if prompt := st.chat_input("What would you like to order?"):
|
83 |
-
# Add user message
|
84 |
-
st.session_state.messages.append({"role": "user", "content": prompt})
|
85 |
-
with st.chat_message("user"):
|
86 |
-
st.write(prompt)
|
87 |
|
88 |
-
#
|
89 |
-
with st.
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
|
|
101 |
|
102 |
-
#
|
103 |
-
|
104 |
-
|
105 |
-
uploaded_files = st.file_uploader(
|
106 |
-
"Upload menu PDFs",
|
107 |
-
type="pdf",
|
108 |
-
accept_multiple_files=True
|
109 |
-
)
|
110 |
-
if uploaded_files:
|
111 |
-
for file in uploaded_files:
|
112 |
-
with open(file.name, "wb") as f:
|
113 |
-
f.write(file.getbuffer())
|
114 |
-
st.success(f"Uploaded {len(uploaded_files)} new menu(s)")
|
115 |
-
st.cache_resource.clear()
|
|
|
9 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
10 |
from langchain_community.document_loaders import PyMuPDFLoader
|
11 |
|
12 |
+
# Initialize session state FIRST (before any other Streamlit operations)
|
13 |
+
def initialize_session_state():
|
14 |
+
if 'messages' not in st.session_state:
|
15 |
+
st.session_state.messages = [{
|
16 |
+
"role": "assistant",
|
17 |
+
"content": "Hi! Welcome to Café Eleven. What would you like to order today?"
|
18 |
+
}]
|
19 |
|
20 |
+
# Main application function
|
21 |
+
def main():
|
22 |
+
initialize_session_state()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
|
24 |
+
# App configuration
|
25 |
+
st.set_page_config(page_title="Café Eleven", page_icon="☕")
|
26 |
+
st.title("☕ Café Eleven Ordering Assistant")
|
|
|
|
|
27 |
|
28 |
+
# Display chat messages
|
29 |
+
for message in st.session_state.messages:
|
30 |
+
with st.chat_message(message["role"]):
|
31 |
+
st.write(message["content"])
|
|
|
|
|
32 |
|
33 |
+
# Chat processing functions
|
34 |
+
@st.cache_resource
|
35 |
+
def setup_chain():
|
36 |
+
pdf_files = list(Path(".").glob("*.pdf"))
|
37 |
+
if not pdf_files:
|
38 |
+
st.error("No PDF menus found! Please upload some.")
|
39 |
+
st.stop()
|
40 |
+
|
41 |
+
# Process all PDFs
|
42 |
+
docs = []
|
43 |
+
for pdf in pdf_files:
|
44 |
+
loader = PyMuPDFLoader(str(pdf))
|
45 |
+
docs.extend(loader.load())
|
46 |
+
|
47 |
+
# Split text
|
48 |
+
text_splitter = RecursiveCharacterTextSplitter(
|
49 |
+
chunk_size=1000,
|
50 |
+
chunk_overlap=200
|
51 |
+
)
|
52 |
+
splits = text_splitter.split_documents(docs)
|
53 |
+
|
54 |
+
# Create embeddings
|
55 |
+
embeddings = HuggingFaceEmbeddings(
|
56 |
+
model_name="sentence-transformers/all-mpnet-base-v2"
|
57 |
+
)
|
58 |
+
vectorstore = FAISS.from_documents(splits, embeddings)
|
59 |
+
|
60 |
+
# Setup LLM
|
61 |
+
llm = HuggingFaceHub(
|
62 |
+
repo_id="meta-llama/Llama-2-7b-chat-hf",
|
63 |
+
huggingfacehub_api_token=os.environ.get("HF_TOKEN", ""),
|
64 |
+
model_kwargs={
|
65 |
+
"temperature": 0.2,
|
66 |
+
"max_length": 256
|
67 |
+
}
|
68 |
+
)
|
69 |
+
|
70 |
+
# Create conversation chain
|
71 |
+
return ConversationalRetrievalChain.from_llm(
|
72 |
+
llm=llm,
|
73 |
+
retriever=vectorstore.as_retriever(),
|
74 |
+
memory=ConversationBufferMemory(
|
75 |
+
memory_key="chat_history",
|
76 |
+
return_messages=True
|
77 |
+
)
|
78 |
+
)
|
79 |
|
80 |
+
# Handle chat input
|
81 |
+
if prompt := st.chat_input("What would you like to order?"):
|
82 |
+
# Add user message
|
83 |
+
st.session_state.messages.append({"role": "user", "content": prompt})
|
84 |
+
with st.chat_message("user"):
|
85 |
+
st.write(prompt)
|
86 |
+
|
87 |
+
# Generate response
|
88 |
+
with st.chat_message("assistant"):
|
89 |
+
with st.spinner("Preparing your order..."):
|
90 |
+
try:
|
91 |
+
chain = setup_chain()
|
92 |
+
response = chain({"question": prompt})["answer"]
|
93 |
+
st.write(response)
|
94 |
+
st.session_state.messages.append({
|
95 |
+
"role": "assistant",
|
96 |
+
"content": response
|
97 |
+
})
|
98 |
+
except Exception as e:
|
99 |
+
st.error(f"Error: {str(e)}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
100 |
|
101 |
+
# PDF upload in sidebar
|
102 |
+
with st.sidebar:
|
103 |
+
st.header("Menu Management")
|
104 |
+
uploaded_files = st.file_uploader(
|
105 |
+
"Upload menu PDFs",
|
106 |
+
type="pdf",
|
107 |
+
accept_multiple_files=True
|
108 |
+
)
|
109 |
+
if uploaded_files:
|
110 |
+
for file in uploaded_files:
|
111 |
+
with open(file.name, "wb") as f:
|
112 |
+
f.write(file.getbuffer())
|
113 |
+
st.success(f"Uploaded {len(uploaded_files)} new menu(s)")
|
114 |
+
st.cache_resource.clear()
|
115 |
|
116 |
+
# Entry point for Hugging Face Spaces
|
117 |
+
if __name__ == "__main__":
|
118 |
+
main()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|