From 9b307799ce5607ba575fb967921ef2c7af81b82e Mon Sep 17 00:00:00 2001 From: Wendy Liga Date: Thu, 27 Jul 2023 23:41:53 +0700 Subject: [PATCH] fix missing openai_api_base on langchain-chroma example (#818) --- examples/langchain-chroma/query.py | 2 +- examples/langchain-chroma/store.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/langchain-chroma/query.py b/examples/langchain-chroma/query.py index 33848818..61f4c3ea 100644 --- a/examples/langchain-chroma/query.py +++ b/examples/langchain-chroma/query.py @@ -9,7 +9,7 @@ from langchain.vectorstores.base import VectorStoreRetriever base_path = os.environ.get('OPENAI_API_BASE', 'http://localhost:8080/v1') # Load and process the text -embedding = OpenAIEmbeddings() +embedding = OpenAIEmbeddings(model="text-embedding-ada-002", openai_api_base=base_path) persist_directory = 'db' # Now we can load the persisted database from disk, and use it as normal. diff --git a/examples/langchain-chroma/store.py b/examples/langchain-chroma/store.py index b9cbad0e..a52cfe04 100755 --- a/examples/langchain-chroma/store.py +++ b/examples/langchain-chroma/store.py @@ -18,8 +18,8 @@ texts = text_splitter.split_documents(documents) # Supplying a persist_directory will store the embeddings on disk persist_directory = 'db' -embedding = OpenAIEmbeddings(model="text-embedding-ada-002") +embedding = OpenAIEmbeddings(model="text-embedding-ada-002", openai_api_base=base_path) vectordb = Chroma.from_documents(documents=texts, embedding=embedding, persist_directory=persist_directory) vectordb.persist() -vectordb = None \ No newline at end of file +vectordb = None