Skip to main content

Class: LangChat

The main entry point for the LangChat SDK. Wraps LangChatEngine and returns typed ChatResponse objects.
from langchat import LangChat
from langchat.providers import OpenAI, Pinecone, Supabase

Constructor

LangChat(
    *,
    llm,
    vector_db,
    db,
    reranker=None,
    prompt_template=None,
    standalone_question_prompt=None,
    verbose=False,
    max_chat_history=20,
)
All parameters are keyword-only (note the *).
llm
Any
required
LLM provider instance. From langchat.providers: OpenAI, Anthropic, Gemini, Mistral, Cohere, Ollama.
vector_db
Any
required
Vector database provider. From langchat.providers: Pinecone.
db
Any
required
History database provider. From langchat.providers: Supabase.
reranker
Any | None
default:"FlashrankRerankAdapter"
Reranker instance. Defaults to FlashrankRerankAdapter(model_name="ms-marco-MiniLM-L-12-v2", top_n=3).
prompt_template
str | None
default:"None"
Custom system prompt. Must include {context}, {chat_history}, {question}. See Prompts guide.
standalone_question_prompt
str | None
default:"None"
Custom standalone question prompt. Must include {chat_history} and {question}.
verbose
bool
default:"False"
Enable verbose LangChain logging.
max_chat_history
int
default:"20"
Number of recent exchanges to include in each prompt. One exchange = one (user, AI) pair.
Example:
lc = LangChat(
    llm=OpenAI("gpt-4o-mini"),
    vector_db=Pinecone("my-index"),
    db=Supabase(),
    max_chat_history=10,
)

Methods

chat()

Send a message and receive a typed response. Async.
async def chat(
    self,
    query: str,
    user_id: str,
    platform: str = "default",
) -> ChatResponse
query
str
required
The user’s message.
user_id
str
required
Unique identifier for the user. Scopes conversation history.
platform
str
default:"default"
Namespace for the conversation. Use different values to separate conversations for the same user across multiple apps.
Returns: ChatResponse Example:
response = await lc.chat(
    query="What is our return policy?",
    user_id="alice",
    platform="web",
)

if response:                                    # True when status == "success"
    print(response.text)
    print(f"{response.response_time:.2f}s")
else:
    print(f"Error: {response.error}")

chat_sync()

Synchronous wrapper around chat(). Blocks until the response is ready.
def chat_sync(
    self,
    query: str,
    user_id: str,
    platform: str = "default",
) -> ChatResponse
Same parameters and return type as chat(). Example:
# Useful in scripts, notebooks, and synchronous frameworks
response = lc.chat_sync(query="Hello!", user_id="alice")
print(response)

index()

Index documents into Pinecone. Can accept a file path, a list of paths, or a directory.
def index(
    self,
    paths: str | list[str],
    *,
    chunk_size: int = 1000,
    chunk_overlap: int = 200,
    namespace: str | None = None,
    prevent_duplicates: bool = True,
) -> dict
paths
str | list[str]
required
Path to a file, a list of file paths, or a directory path.
chunk_size
int
default:"1000"
Maximum characters per chunk.
chunk_overlap
int
default:"200"
Character overlap between adjacent chunks.
namespace
str | None
default:"None"
Pinecone namespace to index into.
prevent_duplicates
bool
default:"True"
Skip chunks that are already in Pinecone (detected by content hash).
Returns: dict with:
  • chunks_indexed (int) — chunks added
  • chunks_skipped (int) — duplicates skipped
  • files_processed (int) — files successfully loaded
  • errors (list) — files that failed to load
Example:
result = lc.index(["docs/faq.pdf", "docs/manual.pdf"], namespace="support")
print(f"Indexed {result['chunks_indexed']} chunks")

get_session()

Get or create a UserSession for a user.
def get_session(
    self,
    user_id: str,
    platform: str = "default",
) -> UserSession
Example:
session = lc.get_session("alice", platform="web")
print(session.chat_history)   # list of (query, response) tuples

load_env() (class method)

Load a .env file into the environment. Call this before creating LangChat.
@classmethod
def load_env(cls, path: str = ".env") -> None
Example:
LangChat.load_env()            # loads .env from current directory
LangChat.load_env(".env.prod") # loads a specific file

Properties

engine
LangChatEngine
Access to the underlying LangChatEngine instance.