Add messages regarding the use of token during query (#147)

* Add messages regarding the use of token during query

* fix: apply ruff format
This commit is contained in:
CelineNi2
2025-10-16 01:48:48 +02:00
committed by GitHub
parent 6495833887
commit 28085f6f04

View File

@@ -834,6 +834,11 @@ class OpenAIChat(LLMInterface):
try:
response = self.client.chat.completions.create(**params)
print(
f"Total tokens = {response.usage.total_tokens}, prompt tokens = {response.usage.prompt_tokens}, completion tokens = {response.usage.completion_tokens}"
)
if response.choices[0].finish_reason == "length":
print("The query is exceeding the maximum allowed number of tokens")
return response.choices[0].message.content.strip()
except Exception as e:
logger.error(f"Error communicating with OpenAI: {e}")