jenngang commited on
Commit
e4eb061
·
verified ·
1 Parent(s): 5e3d120

Upload app.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +12 -22
app.py CHANGED
@@ -29,9 +29,10 @@ from langchain.agents import create_tool_calling_agent, AgentExecutor
29
  from langchain_core.prompts import ChatPromptTemplate
30
 
31
  # LangChain OpenAI imports
 
32
  #from langchain_openai import AzureOpenAIEmbeddings, AzureChatOpenAI # OpenAI embeddings and models
33
  #from langchain.embeddings.openai import OpenAIEmbeddings # OpenAI embeddings for text vectors
34
- # Added
35
  from langchain_openai import OpenAIEmbeddings, ChatOpenAI
36
  #from langchain_openai import ChatOpenAI
37
 
@@ -58,15 +59,10 @@ from datetime import datetime
58
  #====================================SETUP=====================================#
59
  # Fetch secrets from Hugging Face Spaces
60
  api_key = os.environ["API_KEY"]
61
- #print(f"api_key::{api_key}")
62
  endpoint = os.environ["OPENAI_API_BASE"]
63
- #print(f"endpoint::{endpoint}" )
64
  groq_api_key = os.environ['LLAMA_API_KEY'] # llama_api_key = os.environ['GROQ_API_KEY']
65
- #print(f"groq_api_key::{groq_api_key}")
66
  MEM0_api_key = os.environ['MEM0_API_KEY'] # MEM0_api_key = os.environ['mem0']
67
- #print(f"MEM0_api_key::{MEM0_api_key}")
68
  my_api_key = os.environ["MY_API_KEY"]
69
- #print(f"my_api_key::{my_api_key}")
70
 
71
  # Initialize the OpenAI embedding function for Chroma
72
  embedding_function = chromadb.utils.embedding_functions.OpenAIEmbeddingFunction(
@@ -74,7 +70,6 @@ embedding_function = chromadb.utils.embedding_functions.OpenAIEmbeddingFunction(
74
  api_key=api_key, # Complete the code to define the API key
75
  model_name='text-embedding-ada-002' # This is a fixed value and does not need modification
76
  )
77
- print("embedding_function initialized...")
78
 
79
  # This initializes the OpenAI embedding function for the Chroma vectorstore, using the provided endpoint and API key.
80
 
@@ -84,7 +79,6 @@ embedding_model = OpenAIEmbeddings(
84
  openai_api_key=api_key,
85
  model='text-embedding-ada-002'
86
  )
87
- print("embedding_model initialized...")
88
 
89
  # Initialize the Chat OpenAI model
90
  llm = ChatOpenAI(
@@ -93,7 +87,6 @@ llm = ChatOpenAI(
93
  model="gpt-4o", # used gpt4o instead of gpt-4o-mini to get improved results
94
  streaming=False
95
  )
96
- print("llm initialized...")
97
 
98
  # This initializes the Chat OpenAI model with the provided endpoint, API key, deployment name, and a temperature setting of 0 (to control response variability).
99
 
@@ -161,7 +154,6 @@ vector_store = Chroma(
161
  persist_directory="./nutritional_db",
162
  embedding_function=embedding_model
163
  )
164
- print("vector_store initialized...")
165
 
166
  # Create a retriever from the vector store
167
  retriever = vector_store.as_retriever(
@@ -180,7 +172,8 @@ def retrieve_context(state):
180
  Dict: The updated state with the retrieved context.
181
  """
182
  print("---------retrieve_context---------")
183
- query = state['query'] # Complete the code to define the key for the expanded query
 
184
  #print("Query used for retrieval:", query) # Debugging: Print the query
185
 
186
  # Retrieve documents from the vector store
@@ -575,17 +568,13 @@ class NutritionBot:
575
  # Initialize a memory client to store and retrieve customer interactions
576
  self.memory = MemoryClient(api_key=MEM0_api_key) # userdata.get("mem0")) # Complete the code to define the memory client API key
577
 
578
- #st.write(f"my_api_key:{my_api_key}")
579
- api_key = os.environ["API_KEY"]
580
- st.write(f"api_key:{api_key}")
581
- '''
582
- model="gpt-4o",
583
- temperature=0.0,
584
- streaming=False, # Explicitly disabling streaming
585
- api_key=my_api_key,
586
- openai_api_key=my_api_key,
587
- #base_url=endpoint,
588
- #openai_api_base=endpoint # api_key
589
  '''
590
  try:
591
  self.client = ChatOpenAI(
@@ -600,6 +589,7 @@ class NutritionBot:
600
  except Exception as e:
601
  print(f"Error with OpenAI: {str(e)}")
602
  return None
 
603
 
604
  # Define tools available to the chatbot, such as web search
605
  tools = [agentic_rag]
 
29
  from langchain_core.prompts import ChatPromptTemplate
30
 
31
  # LangChain OpenAI imports
32
+ # Commented cout - not used
33
  #from langchain_openai import AzureOpenAIEmbeddings, AzureChatOpenAI # OpenAI embeddings and models
34
  #from langchain.embeddings.openai import OpenAIEmbeddings # OpenAI embeddings for text vectors
35
+ # Added and used below
36
  from langchain_openai import OpenAIEmbeddings, ChatOpenAI
37
  #from langchain_openai import ChatOpenAI
38
 
 
59
  #====================================SETUP=====================================#
60
  # Fetch secrets from Hugging Face Spaces
61
  api_key = os.environ["API_KEY"]
 
62
  endpoint = os.environ["OPENAI_API_BASE"]
 
63
  groq_api_key = os.environ['LLAMA_API_KEY'] # llama_api_key = os.environ['GROQ_API_KEY']
 
64
  MEM0_api_key = os.environ['MEM0_API_KEY'] # MEM0_api_key = os.environ['mem0']
 
65
  my_api_key = os.environ["MY_API_KEY"]
 
66
 
67
  # Initialize the OpenAI embedding function for Chroma
68
  embedding_function = chromadb.utils.embedding_functions.OpenAIEmbeddingFunction(
 
70
  api_key=api_key, # Complete the code to define the API key
71
  model_name='text-embedding-ada-002' # This is a fixed value and does not need modification
72
  )
 
73
 
74
  # This initializes the OpenAI embedding function for the Chroma vectorstore, using the provided endpoint and API key.
75
 
 
79
  openai_api_key=api_key,
80
  model='text-embedding-ada-002'
81
  )
 
82
 
83
  # Initialize the Chat OpenAI model
84
  llm = ChatOpenAI(
 
87
  model="gpt-4o", # used gpt4o instead of gpt-4o-mini to get improved results
88
  streaming=False
89
  )
 
90
 
91
  # This initializes the Chat OpenAI model with the provided endpoint, API key, deployment name, and a temperature setting of 0 (to control response variability).
92
 
 
154
  persist_directory="./nutritional_db",
155
  embedding_function=embedding_model
156
  )
 
157
 
158
  # Create a retriever from the vector store
159
  retriever = vector_store.as_retriever(
 
172
  Dict: The updated state with the retrieved context.
173
  """
174
  print("---------retrieve_context---------")
175
+ # Add original query to the state to improve result.
176
+ query = f"{state['query']}; {state['expanded_query']}" # Complete the code to define the key for the expanded query
177
  #print("Query used for retrieval:", query) # Debugging: Print the query
178
 
179
  # Retrieve documents from the vector store
 
568
  # Initialize a memory client to store and retrieve customer interactions
569
  self.memory = MemoryClient(api_key=MEM0_api_key) # userdata.get("mem0")) # Complete the code to define the memory client API key
570
 
571
+ self.client = ChatOpenAI(
572
+ model_name="gpt-4o-mini", # Specify the model to use (e.g., GPT-4 optimized version)
573
+ api_key = api_key, # config.get("API_KEY"), # API key for authentication
574
+ endpoint = endpoint, # config.get("OPENAI_API_BASE"),
575
+ temperature=0 # Controls randomness in responses; 0 ensures deterministic results
576
+ )
577
+
 
 
 
 
578
  '''
579
  try:
580
  self.client = ChatOpenAI(
 
589
  except Exception as e:
590
  print(f"Error with OpenAI: {str(e)}")
591
  return None
592
+ '''
593
 
594
  # Define tools available to the chatbot, such as web search
595
  tools = [agentic_rag]