From d540adedcca68202df7369122aba4b9d770323d1 Mon Sep 17 00:00:00 2001 From: Daniel McDonald Date: Fri, 16 Jun 2023 09:18:49 -0400 Subject: [PATCH] raise exeception --- privateGPT.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/privateGPT.py b/privateGPT.py index a47a745..0e3b9d0 100755 --- a/privateGPT.py +++ b/privateGPT.py @@ -37,8 +37,9 @@ def main(): case "GPT4All": llm = GPT4All(model=model_path, n_ctx=model_n_ctx, backend='gptj', n_batch=model_n_batch, callbacks=callbacks, verbose=False) case _default: - print(f"Model {model_type} not supported!") - exit; + # raise exception if model_type is not supported + raise Exception(f"Model type {model_type} is not supported. Please choose one of the following: LlamaCpp, GPT4All") + qa = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=retriever, return_source_documents= not args.hide_source) # Interactive questions and answers while True: