Compare commits
1 Commits
fix/chunki
...
fix/chunki
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
64b92a04a7 |
@@ -96,6 +96,7 @@ def get_model_token_limit(model_name: str) -> int:
|
||||
logger.warning(f"Unknown model '{model_name}', using default 512 token limit")
|
||||
return 512
|
||||
|
||||
|
||||
# Set up logger with proper level
|
||||
logger = logging.getLogger(__name__)
|
||||
LOG_LEVEL = os.getenv("LEANN_LOG_LEVEL", "WARNING").upper()
|
||||
@@ -866,7 +867,9 @@ def compute_embeddings_ollama(
|
||||
if retry_count >= max_retries:
|
||||
# Enhanced error detection for token limit violations
|
||||
error_msg = str(e).lower()
|
||||
if "token" in error_msg and ("limit" in error_msg or "exceed" in error_msg or "length" in error_msg):
|
||||
if "token" in error_msg and (
|
||||
"limit" in error_msg or "exceed" in error_msg or "length" in error_msg
|
||||
):
|
||||
logger.error(
|
||||
f"Token limit exceeded for batch. Error: {e}. "
|
||||
f"Consider reducing chunk sizes or check token truncation."
|
||||
|
||||
Reference in New Issue
Block a user