diff --git a/ingest.py b/ingest.py index 2a9a161..79434b3 100644 --- a/ingest.py +++ b/ingest.py @@ -1,4 +1,5 @@ import os +from dotenv import load_dotenv from langchain.document_loaders import TextLoader, PDFMinerLoader, CSVLoader from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain.vectorstores import Chroma @@ -6,6 +7,8 @@ from langchain.embeddings import LlamaCppEmbeddings from constants import PERSIST_DIRECTORY from constants import CHROMA_SETTINGS +load_dotenv() + def main(): llama_embeddings_model = os.environ.get('LLAMA_EMBEDDINGS_MODEL') persist_directory = os.environ.get('PERSIST_DIRECTORY') @@ -30,4 +33,4 @@ def main(): db = None if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/privateGPT.py b/privateGPT.py index 38fab48..4c603a2 100644 --- a/privateGPT.py +++ b/privateGPT.py @@ -1,3 +1,4 @@ +from dotenv import load_dotenv from langchain.chains import RetrievalQA from langchain.embeddings import LlamaCppEmbeddings from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler @@ -5,6 +6,8 @@ from langchain.vectorstores import Chroma from langchain.llms import GPT4All, LlamaCpp import os +load_dotenv() + llama_embeddings_model = os.environ.get("LLAMA_EMBEDDINGS_MODEL") persist_directory = os.environ.get('PERSIST_DIRECTORY') @@ -51,4 +54,4 @@ def main(): print(document.page_content) if __name__ == "__main__": - main() \ No newline at end of file + main()