Spaces:
Sleeping
Sleeping
File size: 3,422 Bytes
2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 f19a654 2ba71b5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 |
from langchain_core.prompts import ChatPromptTemplate, PromptTemplate
from langchain_groq import ChatGroq
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain.schema.output_parser import StrOutputParser
from dotenv import load_dotenv
from huggingface_hub import login
import os
# Load environment variables
load_dotenv()
login(token=os.environ["HUGGING_FACE_API_KEY"])
os.environ['CURL_CA_BUNDLE'] = '' # Optional workaround for certificate issues
class Bot():
def __init__(self):
self.groq_models = ['gemma-7b-it', 'llama3-70b-8192',
'llama3-8b-8192', 'mixtral-8x22b'] # Updated here
self.hf_models = ["01-ai/Yi-1.5-34B-Chat", "google/gemma-1.1-2b-it",
"google/gemma-1.1-7b-it"]
self.google_models = ["gemini-1.0-pro", "gemini-1.5-flash",
"gemini-1.5-pro"]
# Main model pool
self.models = self.google_models + self.hf_models + self.groq_models
def call_groq(self, model, temp=0.7, given_prompt="Hi"):
try:
llm = ChatGroq(
model=model,
temperature=temp,
groq_api_key=os.environ["GROQ_API_KEY"]
)
prompt = ChatPromptTemplate.from_messages([
("system", "You are a helpful assistant."),
("human", "{text}")
])
chain = prompt | llm | StrOutputParser()
return chain.invoke({"text": given_prompt})
except Exception as e:
return f"Error (Groq): {str(e)}"
def call_hf(self, model, temp=0.7, given_prompt="Hi"):
try:
llm = HuggingFaceEndpoint(
repo_id=model,
temperature=temp
)
chat = ChatHuggingFace(llm=llm, verbose=True)
prompt = PromptTemplate(
template="""
You are a helpful assistant
User: {query}
Answer:
""",
input_variables=["query"]
)
chain = prompt | chat | StrOutputParser()
return chain.invoke({"query": given_prompt})
except Exception as e:
return f"Error (HF): {str(e)}"
def call_google(self, model, temp=0.7, given_prompt="Hi"):
try:
llm = ChatGoogleGenerativeAI(
model=model,
temperature=temp,
google_api_key=os.environ["GOOGLE_API_KEY"]
)
prompt = ChatPromptTemplate.from_messages([
("human", "{text}")
])
chain = prompt | llm | StrOutputParser()
return chain.invoke({"text": given_prompt})
except Exception as e:
return f"Error (Google): {str(e)}"
def response(self, model, prompt="Hi", temperature=0.7):
if model in self.groq_models:
return self.call_groq(model=model, temp=temperature, given_prompt=prompt)
elif model in self.hf_models:
return self.call_hf(model=model, temp=temperature, given_prompt=prompt)
elif model in self.google_models:
return self.call_google(model=model, temp=temperature, given_prompt=prompt)
else:
return "Sorry! App not working properly – unknown model"
|