Skip to main content

Overview

LangChat uses direct provider initialization. No configuration class needed.

LLM Providers

OpenAI

from langchat.llm import OpenAI

llm = OpenAI(
    api_key="sk-...",
    model="gpt-4o-mini",
    temperature=0.7
)

Other LLMs

from langchat.llm import Anthropic, Gemini, Ollama

# Anthropic
llm = Anthropic(api_key="...", model="claude-3-5-sonnet-20241022")

# Gemini
llm = Gemini(api_key="...", model="gemini-1.5-flash")

# Ollama (local)
llm = Ollama(model="llama2", base_url="http://localhost:11434")

Vector Database

Pinecone

from langchat.vector_db import Pinecone

vector_db = Pinecone(
    api_key="pcsk-...",
    index_name="your-index",
    embedding_model="text-embedding-3-large"  # Optional
)

Database

Supabase

from langchat.database import Supabase

db = Supabase(
    url="https://xxxxx.supabase.co",
    key="eyJhbGc..."
)

Complete Setup

from langchat import LangChat
from langchat.llm import OpenAI
from langchat.vector_db import Pinecone
from langchat.database import Supabase

# Setup providers
llm = OpenAI(api_key="sk-...", model="gpt-4o-mini")
vector_db = Pinecone(api_key="...", index_name="...")
db = Supabase(url="https://...", key="...")

# Create LangChat
ai = LangChat(llm=llm, vector_db=vector_db, db=db)

Next Steps


Built with ❤️ by NeuroBrain