Simple Chat
Basic example:Copy
import asyncio
from langchat import LangChat
from langchat.llm import OpenAI
from langchat.vector_db import Pinecone
from langchat.database import Supabase
async def main():
llm = OpenAI(api_key="sk-...", model="gpt-4o-mini")
vector_db = Pinecone(api_key="...", index_name="...")
db = Supabase(url="https://...", key="...")
ai = LangChat(llm=llm, vector_db=vector_db, db=db)
result = await ai.chat(
query="Hello! What can you help me with?",
user_id="user123"
)
print(result["response"])
asyncio.run(main())
Conversation
LangChat remembers previous messages:Copy
import asyncio
from langchat import LangChat
from langchat.llm import OpenAI
from langchat.vector_db import Pinecone
from langchat.database import Supabase
async def main():
llm = OpenAI(api_key="sk-...", model="gpt-4o-mini")
vector_db = Pinecone(api_key="...", index_name="...")
db = Supabase(url="https://...", key="...")
ai = LangChat(llm=llm, vector_db=vector_db, db=db)
user_id = "user123"
# First message
result1 = await ai.chat(
query="What universities offer computer science?",
user_id=user_id
)
# Second message (remembers context)
result2 = await ai.chat(
query="What about in Europe?",
user_id=user_id
)
# Third message (continues conversation)
result3 = await ai.chat(
query="Which accept IELTS 6.5?",
user_id=user_id
)
asyncio.run(main())
LangChat automatically maintains conversation history for each user. No manual memory management needed!
Custom Prompts
Control how your chatbot responds:Copy
from langchat import LangChat
from langchat.llm import OpenAI
from langchat.vector_db import Pinecone
from langchat.database import Supabase
llm = OpenAI(api_key="sk-...", model="gpt-4o-mini")
vector_db = Pinecone(api_key="...", index_name="...")
db = Supabase(url="https://...", key="...")
# Custom system prompt
custom_prompt = """You are a helpful assistant.
Answer questions clearly and concisely.
Always be friendly and professional."""
ai = LangChat(
llm=llm,
vector_db=vector_db,
db=db,
prompt_template=custom_prompt
)
result = await ai.chat(
query="What is Python?",
user_id="user123"
)
Error Handling
Handle errors gracefully:Copy
import asyncio
from langchat import LangChat
from langchat.llm import OpenAI
from langchat.vector_db import Pinecone
from langchat.database import Supabase
async def main():
try:
llm = OpenAI(api_key="sk-...", model="gpt-4o-mini")
vector_db = Pinecone(api_key="...", index_name="...")
db = Supabase(url="https://...", key="...")
ai = LangChat(llm=llm, vector_db=vector_db, db=db)
result = await ai.chat(
query="Hello!",
user_id="user123"
)
if result["status"] == "success":
print(result["response"])
else:
print(f"Error: {result.get('error')}")
except Exception as e:
print(f"Error: {e}")
asyncio.run(main())
Next Steps
- Custom Prompts - Advanced prompt customization
- API Server - Run as web API
- Document Indexing - Add your own knowledge base
Built with ❤️ by NeuroBrain
