amanmaurya0704 commited on
Commit
0e49bbd
·
1 Parent(s): 20cac53

added gemini

Browse files
src/langgraphagenticai/LLMS/__pycache__/geminillm.cpython-312.pyc ADDED
Binary file (1.45 kB). View file
 
src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-312.pyc CHANGED
Binary files a/src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-312.pyc and b/src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-312.pyc differ
 
src/langgraphagenticai/LLMS/geminillm.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+ from langchain_google_genai import ChatGoogleGenerativeAI
4
+
5
+ class GeminiLLM:
6
+ def __init__(self,user_controls_input):
7
+ self.user_controls_input=user_controls_input
8
+
9
+ def get_gemini_llm_model(self):
10
+ try:
11
+ gemini_api_key=self.user_controls_input['GEMINI_API_KEY']
12
+ #print(groq_api_key)
13
+ selected_gemini_model=self.user_controls_input['selected_gemini_model']
14
+ if gemini_api_key=='' and os.environ["GEMINI_API_KEY"] =='':
15
+ st.error("Please Enter the Gemini API KEY")
16
+ print(selected_gemini_model)
17
+
18
+ llm = ChatGoogleGenerativeAI(api_key =gemini_api_key, model=selected_gemini_model)
19
+
20
+ except Exception as e:
21
+ raise ValueError(f"Error Occurred with Exception : {e}")
22
+ return llm
src/langgraphagenticai/LLMS/groqllm.py CHANGED
@@ -6,12 +6,14 @@ class GroqLLM:
6
  def __init__(self,user_controls_input):
7
  self.user_controls_input=user_controls_input
8
 
9
- def get_llm_model(self):
10
  try:
11
  groq_api_key=self.user_controls_input['GROQ_API_KEY']
 
12
  selected_groq_model=self.user_controls_input['selected_groq_model']
13
  if groq_api_key=='' and os.environ["GROQ_API_KEY"] =='':
14
  st.error("Please Enter the Groq API KEY")
 
15
 
16
  llm = ChatGroq(api_key =groq_api_key, model=selected_groq_model)
17
 
 
6
  def __init__(self,user_controls_input):
7
  self.user_controls_input=user_controls_input
8
 
9
+ def get_groq_llm_model(self):
10
  try:
11
  groq_api_key=self.user_controls_input['GROQ_API_KEY']
12
+ #print(groq_api_key)
13
  selected_groq_model=self.user_controls_input['selected_groq_model']
14
  if groq_api_key=='' and os.environ["GROQ_API_KEY"] =='':
15
  st.error("Please Enter the Groq API KEY")
16
+ print(selected_groq_model)
17
 
18
  llm = ChatGroq(api_key =groq_api_key, model=selected_groq_model)
19
 
src/langgraphagenticai/__pycache__/main.cpython-312.pyc CHANGED
Binary files a/src/langgraphagenticai/__pycache__/main.cpython-312.pyc and b/src/langgraphagenticai/__pycache__/main.cpython-312.pyc differ
 
src/langgraphagenticai/main.py CHANGED
@@ -3,6 +3,7 @@ import streamlit as st
3
  import json
4
  from src.langgraphagenticai.ui.streamlitui.loadui import LoadStreamlitUI
5
  from src.langgraphagenticai.LLMS.groqllm import GroqLLM
 
6
  from src.langgraphagenticai.graph.graph_builder import GraphBuilder
7
  from src.langgraphagenticai.ui.streamlitui.display_result import DisplayResultStreamlit
8
 
@@ -32,8 +33,12 @@ def load_langgraph_agenticai_app():
32
  if user_message:
33
  try:
34
  # Configure LLM
35
- obj_llm_config = GroqLLM(user_controls_input=user_input)
36
- model = obj_llm_config.get_llm_model()
 
 
 
 
37
 
38
  if not model:
39
  st.error("Error: LLM model could not be initialized.")
 
3
  import json
4
  from src.langgraphagenticai.ui.streamlitui.loadui import LoadStreamlitUI
5
  from src.langgraphagenticai.LLMS.groqllm import GroqLLM
6
+ from src.langgraphagenticai.LLMS.geminillm import GeminiLLM
7
  from src.langgraphagenticai.graph.graph_builder import GraphBuilder
8
  from src.langgraphagenticai.ui.streamlitui.display_result import DisplayResultStreamlit
9
 
 
33
  if user_message:
34
  try:
35
  # Configure LLM
36
+ if user_input.get('selected_llm') == 'Groq':
37
+ obj_llm_config = GroqLLM(user_controls_input=user_input)
38
+ model = obj_llm_config.get_groq_llm_model()
39
+ elif user_input.get('selected_llm') == 'Gemini':
40
+ obj_llm_config = GeminiLLM(user_controls_input=user_input)
41
+ model = obj_llm_config.get_gemini_llm_model()
42
 
43
  if not model:
44
  st.error("Error: LLM model could not be initialized.")
src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc CHANGED
Binary files a/src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc and b/src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc differ
 
src/langgraphagenticai/ui/streamlitui/__pycache__/uiconfigfile.cpython-312.pyc CHANGED
Binary files a/src/langgraphagenticai/ui/streamlitui/__pycache__/uiconfigfile.cpython-312.pyc and b/src/langgraphagenticai/ui/streamlitui/__pycache__/uiconfigfile.cpython-312.pyc differ
 
src/langgraphagenticai/ui/streamlitui/loadui.py CHANGED
@@ -49,6 +49,18 @@ class LoadStreamlitUI:
49
  # Validate API key
50
  if not self.user_control["GROQ_API_KEY"]:
51
  st.warning("⚠️ Please enter your GROQ API key to proceed. Don't have? refer : https://console.groq.com/keys ")
 
 
 
 
 
 
 
 
 
 
 
 
52
 
53
 
54
  # Use case selection
 
49
  # Validate API key
50
  if not self.user_control["GROQ_API_KEY"]:
51
  st.warning("⚠️ Please enter your GROQ API key to proceed. Don't have? refer : https://console.groq.com/keys ")
52
+
53
+ elif self.user_control["selected_llm"] == 'Gemini':
54
+ # Model selection
55
+ model_options = self.config.get_gemini_model_options()
56
+
57
+ self.user_control["selected_gemini_model"] = st.selectbox("Select Model", model_options)
58
+ # API key input
59
+ self.user_control["GEMINI_API_KEY"] = st.session_state["GEMINI_API_KEY"] = st.text_input("API Key",
60
+ type="password")
61
+ # Validate API key
62
+ if not self.user_control["GEMINI_API_KEY"]:
63
+ st.warning("⚠️ Please enter your Gemini API key to proceed. Don't have? refer : https://console.cloud.google.com/apis/credentials")
64
 
65
 
66
  # Use case selection
src/langgraphagenticai/ui/streamlitui/uiconfigfile.ini CHANGED
@@ -1,6 +1,6 @@
1
  [DEFAULT]
2
  PAGE_TITLE = Langgraph: Build Stateful Agentic AI graph
3
- LLM_OPTIONS = Groq, Gemini
4
- USECASE_OPTIONS = Basic Chatbot
5
- GROQ_MODEL_OPRIONS = mistral-8x7b-32768, llama3-8b-8192, llama3-70b-8192
6
  GEMINI_MODEL_OPTIONS = gemini-1.5-flash
 
1
  [DEFAULT]
2
  PAGE_TITLE = Langgraph: Build Stateful Agentic AI graph
3
+ LLM_OPTIONS = Groq,Gemini
4
+ USECASE_OPTIONS = Basic Chatbot,Chatbot with Tool
5
+ GROQ_MODEL_OPTIONS = llama3-8b-8192,llama3-70b-8192,gemma2-9b-it,meta-llama/Llama-Guard-4-12B,distil-whisper-large-v3-en,whisper-large-v3
6
  GEMINI_MODEL_OPTIONS = gemini-1.5-flash
src/langgraphagenticai/ui/streamlitui/uiconfigfile.py CHANGED
@@ -14,6 +14,9 @@ class Config:
14
  def get_groq_model_options(self):
15
  return self.config["DEFAULT"].get("GROQ_MODEL_OPTIONS", "").split(",")
16
 
 
 
 
17
  def get_page_title(self):
18
  return self.config["DEFAULT"].get("PAGE_TITLE")
19
 
 
14
  def get_groq_model_options(self):
15
  return self.config["DEFAULT"].get("GROQ_MODEL_OPTIONS", "").split(",")
16
 
17
+ def get_gemini_model_options(self):
18
+ return self.config["DEFAULT"].get("GEMINI_MODEL_OPTIONS", "").split(",")
19
+
20
  def get_page_title(self):
21
  return self.config["DEFAULT"].get("PAGE_TITLE")
22