import os
from langchain_community.vectorstores import Chroma
from langchain_openai import OpenAIEmbeddings
class ChromaAdapter:
"""Custom Chroma vector database adapter."""
def __init__(self, collection_name: str, persist_dir: str = "./chroma_db"):
self.embeddings = OpenAIEmbeddings(model="text-embedding-3-small")
self.store = Chroma(
collection_name=collection_name,
embedding_function=self.embeddings,
persist_directory=persist_dir,
)
def get_retriever(self, k: int = 5):
return self.store.as_retriever(search_kwargs={"k": k})
# Use with LangChat
from langchat import LangChat
from langchat.providers import OpenAI, Supabase
lc = LangChat(
llm=OpenAI("gpt-4o-mini"),
vector_db=ChromaAdapter("my-collection"),
db=Supabase(),
)