add logs
This commit is contained in:
@@ -99,7 +99,9 @@ if __name__ == "__main__":
|
||||
print("- 'What are the main techniques LEANN uses?'")
|
||||
print("- 'What is the technique DLPM?'")
|
||||
print("- 'Who does Elizabeth Bennet marry?'")
|
||||
print("- 'What is the problem of developing pan gu model? (盘古大模型开发中遇到什么问题?)'")
|
||||
print(
|
||||
"- 'What is the problem of developing pan gu model Huawei meets? (盘古大模型开发中遇到什么问题?)'"
|
||||
)
|
||||
print("\nOr run without --query for interactive mode\n")
|
||||
|
||||
rag = DocumentRAG()
|
||||
|
||||
Submodule packages/leann-backend-diskann/third_party/DiskANN updated: 67a2611ad1...af2a26481e
@@ -636,7 +636,10 @@ class LeannChat:
|
||||
"Please provide the best answer you can based on this context and your knowledge."
|
||||
)
|
||||
|
||||
ask_time = time.time()
|
||||
ans = self.llm.ask(prompt, **llm_kwargs)
|
||||
ask_time = time.time() - ask_time
|
||||
logger.info(f" Ask time: {ask_time} seconds")
|
||||
return ans
|
||||
|
||||
def start_interactive(self):
|
||||
|
||||
@@ -358,7 +358,11 @@ def validate_model_and_suggest(model_name: str, llm_type: str) -> str | None:
|
||||
error_msg += f"\n\nModel '{model_name}' was not found in Ollama's library."
|
||||
|
||||
if suggestions:
|
||||
error_msg += "\n\nDid you mean one of these installed models?\n"
|
||||
error_msg += (
|
||||
"\n\nDid you mean one of these installed models?\n"
|
||||
+ "\nTry to use ollama pull to install the model you need\n"
|
||||
)
|
||||
|
||||
for i, suggestion in enumerate(suggestions, 1):
|
||||
error_msg += f" {i}. {suggestion}\n"
|
||||
else:
|
||||
|
||||
Reference in New Issue
Block a user