Rivalcoder
Update The Model issues and Prompt
ea49415
raw
history blame
5.03 kB
import google.generativeai as genai
import os
import json
import time
from dotenv import load_dotenv
load_dotenv()
api_key = os.getenv("GOOGLE_API_KEY")
if not api_key:
raise ValueError("GOOGLE_API_KEY environment variable is not set. Please add it to your .env file")
print(f"Google API Key loaded: {api_key[:10]}..." if api_key else "No API key found")
genai.configure(api_key=api_key)
def query_gemini(questions, contexts):
start_time = time.time()
print(f"Starting LLM processing for {len(questions)} questions with {len(contexts)} context chunks")
try:
# Time context preparation
context_start = time.time()
context = "\n\n".join(contexts)
context_time = time.time() - context_start
print(f"Context preparation took: {context_time:.2f} seconds")
print(f"Total context length: {len(context)} characters")
# Time prompt preparation
prompt_start = time.time()
# Create a numbered list of questions
questions_text = "\n".join([f"{i+1}. {q}" for i, q in enumerate(questions)])
prompt = f"""
You are a skilled insurance policy assistant. Based only on the provided context, answer each question clearly and briefly.
🎯 GOAL:
- Each answer should be precise, informative, and phrased naturally β€” around 1–2 full sentences.
- Avoid long clauses or repeating policy language verbatim.
- Focus on **clarity**, **brevity**, and **accuracy** β€” your answers will be judged for quality and relevance.
πŸ“˜ INSTRUCTIONS:
1. Only use the context below. If the answer is not present, respond with exactly: "Not Found".
2. Answers must be helpful and compact β€” **complete, but not wordy**. Avoid long multi-clause sentences.
3. Do NOT include section references (like β€œas per section 3.1.6”) or legal codes.
4. Avoid copying large phrases from the document. Rephrase naturally using simpler language where appropriate.
5. Follow the tone and length of these examples:
- "A grace period of thirty days is provided for premium payment after the due date."
- "Yes, maternity expenses are covered after 24 months of continuous coverage, limited to two deliveries or terminations."
- "Yes, the policy covers organ donor expenses for harvesting the organ if donated to the insured person."
- "Not Found"
πŸ“€ RETURN FORMAT:
Respond in the exact JSON format below β€” no extra text or explanations.
{{
"answers": [
"Answer to question 1",
"Answer to question 2",
...
]
}}
πŸ“š CONTEXT:
{context}
❓ QUESTIONS:
{questions_text}
Your task: Answer each question concisely and professionally. Use plain phrasing, stay within 1–2 clear sentences, and avoid unnecessary detail or repetition.
"""
prompt_time = time.time() - prompt_start
print(f"Prompt preparation took: {prompt_time:.2f} seconds")
print(f"Total prompt length: {len(prompt)} characters")
# Time model initialization and API call
api_start = time.time()
model = genai.GenerativeModel('gemini-2.0-flash-exp')
response = model.generate_content(prompt)
api_time = time.time() - api_start
print(f"Gemini API call took: {api_time:.2f} seconds")
# Time response processing
process_start = time.time()
response_text = response.text.strip()
print(f"Raw response length: {len(response_text)} characters")
# Try to parse the response as JSON
try:
# Remove any markdown code blocks if present
if response_text.startswith("```json"):
response_text = response_text.replace("```json", "").replace("```", "").strip()
elif response_text.startswith("```"):
response_text = response_text.replace("```", "").strip()
parsed_response = json.loads(response_text)
process_time = time.time() - process_start
print(f"Response processing took: {process_time:.2f} seconds")
total_time = time.time() - start_time
print(f"Total LLM processing took: {total_time:.2f} seconds")
return parsed_response
except json.JSONDecodeError:
# If JSON parsing fails, return a structured response
process_time = time.time() - process_start
print(f"Response processing took: {process_time:.2f} seconds (JSON parsing failed)")
print(f"Failed to parse JSON response: {response_text}")
total_time = time.time() - start_time
print(f"Total LLM processing took: {total_time:.2f} seconds")
return {"answers": ["Error parsing response"] * len(questions)}
except Exception as e:
total_time = time.time() - start_time
print(f"Error in query_gemini after {total_time:.2f} seconds: {str(e)}")
return {"answers": [f"Error generating response: {str(e)}"] * len(questions)}