Add inference time to output

This commit is contained in:
ivan-ontruck 2023-06-11 21:16:17 +02:00
parent d1de57291e
commit 52eb020256
1 changed files with 4 additions and 1 deletions

View File

@ -7,6 +7,7 @@ from langchain.vectorstores import Chroma
from langchain.llms import GPT4All, LlamaCpp from langchain.llms import GPT4All, LlamaCpp
import os import os
import argparse import argparse
import time
load_dotenv() load_dotenv()
@ -47,13 +48,15 @@ def main():
continue continue
# Get the answer from the chain # Get the answer from the chain
start = time.time()
res = qa(query) res = qa(query)
answer, docs = res['result'], [] if args.hide_source else res['source_documents'] answer, docs = res['result'], [] if args.hide_source else res['source_documents']
end = time.time()
# Print the result # Print the result
print("\n\n> Question:") print("\n\n> Question:")
print(query) print(query)
print("\n> Answer:") print(f"\n> Answer (took {round(end - start, 2)} s.):")
print(answer) print(answer)
# Print the relevant sources used for the answer # Print the relevant sources used for the answer