chore: align core deps with transformers pin

This commit is contained in:
Andy Lee
2025-10-05 19:01:58 -07:00
parent 761ec1f0ac
commit 1484406a8d
2 changed files with 6 additions and 6 deletions

View File

@@ -18,14 +18,14 @@ dependencies = [
"pyzmq>=23.0.0",
"msgpack>=1.0.0",
"torch>=2.0.0",
"sentence-transformers>=2.2.0",
"sentence-transformers>=2.2.0,<3.0",
"llama-index-core>=0.12.0",
"llama-index-readers-file>=0.4.0", # Essential for document reading
"llama-index-embeddings-huggingface>=0.5.5", # For embeddings
"python-dotenv>=1.0.0",
"openai>=1.0.0",
"huggingface-hub>=0.20.0",
"transformers>=4.30.0",
"transformers>=4.30.0,<4.43",
"requests>=2.25.0",
"accelerate>=0.20.0",
"PyPDF2>=3.0.0",
@@ -40,7 +40,7 @@ dependencies = [
[project.optional-dependencies]
colab = [
"torch>=2.0.0,<3.0.0", # Limit torch version to avoid conflicts
"transformers>=4.30.0,<5.0.0", # Limit transformers version
"transformers>=4.30.0,<4.43", # Limit transformers version for Python 3.9 compatibility
"accelerate>=0.20.0,<1.0.0", # Limit accelerate version
]