File size: 4,093 Bytes
23a2cfa b85987d c43f083 23a2cfa a1bab47 23a2cfa a1bab47 e44fdc4 c43f083 23a2cfa b956497 23a2cfa b956497 23a2cfa b956497 23a2cfa b956497 23a2cfa b956497 23a2cfa b956497 23a2cfa b956497 a1bab47 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 |
import streamlit as st
import requests
from fpdf import FPDF
import os
from dotenv import load_dotenv
from datetime import datetime
import zipfile
from langchain.chains import ConversationChain
from langchain.memory import ConversationBufferMemory
from langchain.llms import HuggingFacePipeline
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
# π Extract .streamlit folder if zipped
if not os.path.exists(".streamlit"):
with zipfile.ZipFile(".streamlit.zip", 'r') as zip_ref:
zip_ref.extractall(".")
# β
Load .env variables
load_dotenv()
HF_API_TOKEN = os.getenv("HF_API_TOKEN")
# β
Initialize LangChain LLM
model_id = "TheBloke/Mistral-7B-Instruct-v0.1-GPTQ"
# Load tokenizer and model locally
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto")
# Create Hugging Face pipeline
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer, max_new_tokens=2048, temperature=0.7)
# LangChain wrapper
llm = HuggingFacePipeline(pipeline=pipe)
# β
Setup LangChain memory and conversation
if "memory" not in st.session_state:
st.session_state.memory = ConversationBufferMemory()
if "conversation" not in st.session_state:
st.session_state.conversation = ConversationChain(
llm=llm,
memory=st.session_state.memory,
verbose=False
)
# β
PDF generation function
def save_trip_plan_as_pdf(text, filename="trip_plan.pdf"):
pdf = FPDF()
pdf.add_page()
pdf.set_font("Arial", size=12)
safe_text = text.encode('latin-1', 'ignore').decode('latin-1')
pdf.multi_cell(0, 10, safe_text)
pdf.output(filename)
# β
Streamlit UI
st.set_page_config(page_title="Intelligent Travel Planner", layout="wide")
st.title("π§³ Intelligent Travel Planner Agent")
# π― User inputs
col1, col2 = st.columns(2)
with col1:
from_location = st.text_input("π Your Current Location", placeholder="e.g., Mumbai")
destination = st.text_input("π Destination", placeholder="e.g., Manali")
start_date = st.date_input("π
Start Date")
with col2:
end_date = st.date_input("π
End Date")
budget = st.text_input("π° Budget (in INR)", placeholder="e.g., 5000")
preferences = st.text_input("π― Preferences", placeholder="e.g., Adventure, Culture, Beaches")
generate_button = st.button("βοΈ Generate Trip Plan")
# β
Generate and display AI trip plan
if generate_button:
if from_location and destination and budget and preferences and start_date and end_date:
user_prompt = (
f"Create a detailed,day-wise travel itinerary for a trip from {from_location} to {destination}."
f"The trip should start on {start_date.strftime('%B %d, %Y')} and end on {end_date.strftime('%B %d, %Y')}, "
f"with a total budget of βΉ{budget} INR. The traveller prefers a trip with the theme:{preferences.lower()}.\n"
f"Provide a **day-wise breakdown** of the trip including:\n"
f"- Top tourist attractions\n"
f"- Recommended local food\n"
f"- Suggested places to visit(such as markets,museums,temples etc.)\n"
f"End with final tips for the trip."
)
with st.spinner("π§ Generating trip itinerary..."):
ai_response = st.session_state.conversation.run(user_prompt)
st.subheader("π Your AI-Generated Trip Itinerary")
st.write(ai_response)
save_trip_plan_as_pdf(ai_response)
with open("trip_itinerary.pdf", "rb") as f:
st.download_button("π Download as PDF", f, file_name="trip_plan.pdf")
else:
st.warning("π¨ Please fill out all fields above.")
# π§ Follow-up Chat Section
st.markdown("---")
st.subheader("π¬ Ask Follow-Up Questions")
follow_up = st.text_input("Ask a follow-up about your trip plan")
if follow_up:
with st.spinner("π‘ Thinking..."):
response = st.session_state.conversation.run(user_prompt+"Also,"+follow_up)
st.write("Sure,here is your updated travel itinerary: \n"+response)
|