from typing import TypedDict, Annotated, List, Union from langchain_core.messages import HumanMessage, AIMessage from langchain_core.prompts import ChatPromptTemplate from langchain_groq import ChatGroq import requests import gradio as gr class PlannerState(TypedDict): messages: Annotated[List[Union[HumanMessage, AIMessage]], "The messages in the conversation"] city: str interests: List[str] itinerary: str # Initialize the LLM with better engagement llm = ChatGroq( temperature=0.7, groq_api_key="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", model_name="llama-3.3-70b-versatile" ) # Prompt for generating an itinerary itinerary_prompt = ChatPromptTemplate.from_messages([ ("system", "You are a smart travel agent who creates engaging, fun, and optimized day trip itineraries for {city}. \ Tailor recommendations based on the user's interests: {interests}. \ Include hidden gems, famous spots, and local cuisine. Keep it structured, with timestamps."), ("human", "Plan my perfect day trip!") ]) # Prompt for generating a fun fact fun_fact_prompt = ChatPromptTemplate.from_messages([ ("system", "You are a knowledgeable travel guide. Share an interesting and unique fun fact about {city} that most travelers don’t know."), ("human", "Tell me a fun fact about {city}!") ]) # Function to collect city input def input_city(city: str, state: PlannerState) -> PlannerState: return { **state, "city": city, "messages": state["messages"] + [HumanMessage(content=f"City: {city}")] } # Function to collect interests input def input_interests(interests: str, state: PlannerState) -> PlannerState: interest_list = [interest.strip() for interest in interests.split(",")] return { **state, "interests": interest_list, "messages": state["messages"] + [HumanMessage(content=f"Interests: {', '.join(interest_list)}")] } # Function to create the itinerary def create_itinerary(state: PlannerState) -> str: response = llm.invoke(itinerary_prompt.format_messages(city=state["city"], interests=', '.join(state["interests"]))) return response.content # Function to fetch real-time weather data def get_weather(city: str) -> str: api_key = "b787841c42e84b32b70235141251102" url = f"http://api.weatherapi.com/v1/current.json?key={api_key}&q={city}&aqi=no" try: response = requests.get(url) data = response.json() if "current" in data: weather_desc = data["current"]["condition"]["text"] temp = data["current"]["temp_c"] return f"Current weather in {city}: {weather_desc}, {temp}°C" else: return "Weather data unavailable." except: return "Error fetching weather." # Function to generate a fun fact about the city def fun_fact(city: str) -> str: response = llm.invoke(fun_fact_prompt.format_messages(city=city)) return response.content # Gradio function to generate travel plan def travel_planner(city: str, interests: str): # Initialize state state = { "messages": [], "city": "", "interests": [], "itinerary": "", } # Update state with user inputs state = input_city(city, state) state = input_interests(interests, state) # Generate itinerary, weather, and fun fact itinerary = create_itinerary(state) weather = get_weather(city) fact = fun_fact(city) return itinerary, weather, fact # Build the Gradio interface with enhanced UI interface = gr.Interface( fn=travel_planner, inputs=[ gr.Textbox(label="Enter the city for your trip", placeholder="e.g., Paris, Tokyo, New York"), gr.Textbox(label="Enter your interests (comma-separated)", placeholder="e.g., history, food, adventure"), ], outputs=[ gr.Textbox(label="Generated Itinerary", interactive=False), gr.Textbox(label="Current Weather", interactive=False), gr.Textbox(label="Fun Fact", interactive=False) ], theme="soft", title="✈️ AI Travel Planner", description="Plan your next adventure! Get a custom itinerary, weather forecast, and a fun fact!", live=True, ) # Launch the Gradio application interface.launch()