Quick Setup
Initialize providers directly:
from langchat import LangChat
from langchat.llm import OpenAI
from langchat.vector_db import Pinecone
from langchat.database import Supabase
# Setup providers
llm = OpenAI(api_key="sk-...", model="gpt-4o-mini", temperature=0.7)
vector_db = Pinecone(api_key="...", index_name="...")
db = Supabase(url="https://...", key="...")
# Create chatbot
ai = LangChat(llm=llm, vector_db=vector_db, db=db)
LLM Configuration
OpenAI
from langchat.llm import OpenAI
# Basic
llm = OpenAI(api_key="sk-...", model="gpt-4o-mini")
# With options
llm = OpenAI(
api_key="sk-...",
model="gpt-4o-mini",
temperature=0.7,
max_tokens=1000
)
Available Models:
gpt-4o-mini (recommended)
gpt-4o
gpt-4-turbo
gpt-3.5-turbo
Other LLMs
from langchat.llm import Anthropic, Gemini, Ollama
# Anthropic
llm = Anthropic(api_key="...", model="claude-3-5-sonnet-20241022")
# Gemini
llm = Gemini(api_key="...", model="gemini-1.5-flash")
# Ollama (local)
llm = Ollama(model="llama2", base_url="http://localhost:11434")
Vector Database
Pinecone
from langchat.vector_db import Pinecone
vector_db = Pinecone(
api_key="pcsk-...",
index_name="your-index",
embedding_model="text-embedding-3-large" # Optional
)
Make sure your Pinecone index exists before using it.
Database
Supabase
from langchat.database import Supabase
db = Supabase(
url="https://xxxxx.supabase.co",
key="eyJhbGc..."
)
LangChat automatically creates database tables on first run.
Advanced Configuration
Custom Prompts
custom_prompt = """You are a helpful assistant.
Use this context: {context}
History: {chat_history}
Question: {question}
Answer:"""
ai = LangChat(
llm=llm,
vector_db=vector_db,
db=db,
prompt_template=custom_prompt
)
Reranker
from langchat.reranker import Flashrank
reranker = Flashrank(
model_name="ms-marco-MiniLM-L-12-v2",
top_n=3
)
ai = LangChat(
llm=llm,
vector_db=vector_db,
db=db,
reranker=reranker
)
Session Settings
ai = LangChat(
llm=llm,
vector_db=vector_db,
db=db,
max_chat_history=50 # Keep last 50 messages
)
Best Practices
1. Use Environment Variables
import os
llm = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
vector_db = Pinecone(
api_key=os.getenv("PINECONE_API_KEY"),
index_name=os.getenv("PINECONE_INDEX_NAME")
)
2. Multiple API Keys
# OpenAI supports multiple keys for rotation
llm = OpenAI(api_keys=["key1", "key2", "key3"])
3. Error Handling
try:
ai = LangChat(llm=llm, vector_db=vector_db, db=db)
except ValueError as e:
print(f"Configuration error: {e}")
Next Steps
Built with ❤️ by NeuroBrain