Skip to main content

Travel Assistant

Complete example of a travel assistant chatbot:
import asyncio
from langchat import LangChat
from langchat.llm import OpenAI
from langchat.vector_db import Pinecone
from langchat.database import Supabase

# Custom travel prompt
TRAVEL_PROMPT = """You are a helpful travel assistant.

Your expertise:
- Destination recommendations
- Flight and hotel booking
- Local attractions
- Travel tips and safety

Be friendly, concise, and helpful.

Context: {context}
History: {chat_history}
Question: {question}

Answer:"""

# Setup providers
llm = OpenAI(api_key="sk-...", model="gpt-4o-mini")
vector_db = Pinecone(api_key="...", index_name="travel-index")
db = Supabase(url="https://...", key="...")

# Create chatbot
ai = LangChat(
    llm=llm,
    vector_db=vector_db,
    db=db,
    prompt_template=TRAVEL_PROMPT
)

# Chat
async def main():
    queries = [
        "What are the best travel destinations in Europe?",
        "What about budget-friendly options?",
        "Which one has the best beaches?"
    ]
    
    for query in queries:
        result = await ai.chat(
            query=query,
            user_id="traveler123",
            domain="travel"
        )
        print(f"Q: {query}")
        print(f"A: {result['response']}\n")

asyncio.run(main())

As API Server

from langchat.api.app import create_app
from langchat.llm import OpenAI
from langchat.vector_db import Pinecone
from langchat.database import Supabase
import uvicorn

# Setup
llm = OpenAI(api_key="sk-...", model="gpt-4o-mini")
vector_db = Pinecone(api_key="...", index_name="travel-index")
db = Supabase(url="https://...", key="...")

# Create server
app = create_app(
    llm=llm,
    vector_db=vector_db,
    db=db
)

if __name__ == "__main__":
    uvicorn.run(app, host="0.0.0.0", port=8000)
Index travel documents first using load_and_index_documents() for best results.

Next Steps


Built with ❤️ by NeuroBrain