From fe9381fc8b4348a5805d41cb5933e495e23165d0 Mon Sep 17 00:00:00 2001 From: Andy Lee Date: Sun, 10 Aug 2025 18:48:10 -0700 Subject: [PATCH] fix: complete Python 3.9 type annotation compatibility fixes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fix remaining Python 3.9 incompatible type annotations throughout the leann-core package that were causing test failures in CI. The union operator (|) syntax for type hints was introduced in Python 3.10 and causes "TypeError: unsupported operand type(s) for |" errors in Python 3.9. Changes: - Convert dict[str, Any] | None to Optional[dict[str, Any]] - Convert int | None to Optional[int] - Convert subprocess.Popen | None to Optional[subprocess.Popen] - Convert LeannBackendFactoryInterface | None to Optional[LeannBackendFactoryInterface] - Add missing Optional imports to all affected files This resolves all test failures related to type annotation syntax and ensures compatibility with Python 3.9 as specified in pyproject.toml. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude --- packages/leann-core/src/leann/api.py | 14 +++++++------- packages/leann-core/src/leann/chat.py | 2 +- .../src/leann/embedding_server_manager.py | 5 +++-- packages/leann-core/src/leann/searcher_base.py | 4 ++-- 4 files changed, 13 insertions(+), 12 deletions(-) diff --git a/packages/leann-core/src/leann/api.py b/packages/leann-core/src/leann/api.py index 0ae40af..411e142 100644 --- a/packages/leann-core/src/leann/api.py +++ b/packages/leann-core/src/leann/api.py @@ -10,7 +10,7 @@ import time import warnings from dataclasses import dataclass, field from pathlib import Path -from typing import Any, Literal +from typing import Any, Literal, Optional import numpy as np @@ -33,7 +33,7 @@ def compute_embeddings( model_name: str, mode: str = "sentence-transformers", use_server: bool = True, - port: int | None = None, + port: Optional[int] = None, is_build=False, ) -> np.ndarray: """ @@ -157,12 +157,12 @@ class LeannBuilder: self, backend_name: str, embedding_model: str = "facebook/contriever", - dimensions: int | None = None, + dimensions: Optional[int] = None, embedding_mode: str = "sentence-transformers", **backend_kwargs, ): self.backend_name = backend_name - backend_factory: LeannBackendFactoryInterface | None = BACKEND_REGISTRY.get(backend_name) + backend_factory: Optional[LeannBackendFactoryInterface] = BACKEND_REGISTRY.get(backend_name) if backend_factory is None: raise ValueError(f"Backend '{backend_name}' not found or not registered.") self.backend_factory = backend_factory @@ -242,7 +242,7 @@ class LeannBuilder: self.backend_kwargs = backend_kwargs self.chunks: list[dict[str, Any]] = [] - def add_text(self, text: str, metadata: dict[str, Any] | None = None): + def add_text(self, text: str, metadata: Optional[dict[str, Any]] = None): if metadata is None: metadata = {} passage_id = metadata.get("id", str(len(self.chunks))) @@ -592,7 +592,7 @@ class LeannChat: def __init__( self, index_path: str, - llm_config: dict[str, Any] | None = None, + llm_config: Optional[dict[str, Any]] = None, enable_warmup: bool = False, **kwargs, ): @@ -608,7 +608,7 @@ class LeannChat: prune_ratio: float = 0.0, recompute_embeddings: bool = True, pruning_strategy: Literal["global", "local", "proportional"] = "global", - llm_kwargs: dict[str, Any] | None = None, + llm_kwargs: Optional[dict[str, Any]] = None, expected_zmq_port: int = 5557, **search_kwargs, ): diff --git a/packages/leann-core/src/leann/chat.py b/packages/leann-core/src/leann/chat.py index 4d65f36..665e1bd 100644 --- a/packages/leann-core/src/leann/chat.py +++ b/packages/leann-core/src/leann/chat.py @@ -761,7 +761,7 @@ class SimulatedChat(LLMInterface): return "This is a simulated answer from the LLM based on the retrieved context." -def get_llm(llm_config: dict[str, Any] | None = None) -> LLMInterface: +def get_llm(llm_config: Optional[dict[str, Any]] = None) -> LLMInterface: """ Factory function to get an LLM interface based on configuration. diff --git a/packages/leann-core/src/leann/embedding_server_manager.py b/packages/leann-core/src/leann/embedding_server_manager.py index 2e1c12b..74ecd69 100644 --- a/packages/leann-core/src/leann/embedding_server_manager.py +++ b/packages/leann-core/src/leann/embedding_server_manager.py @@ -6,6 +6,7 @@ import subprocess import sys import time from pathlib import Path +from typing import Optional import psutil @@ -182,8 +183,8 @@ class EmbeddingServerManager: e.g., "leann_backend_diskann.embedding_server" """ self.backend_module_name = backend_module_name - self.server_process: subprocess.Popen | None = None - self.server_port: int | None = None + self.server_process: Optional[subprocess.Popen] = None + self.server_port: Optional[int] = None self._atexit_registered = False def start_server( diff --git a/packages/leann-core/src/leann/searcher_base.py b/packages/leann-core/src/leann/searcher_base.py index 02ec430..ff368c8 100644 --- a/packages/leann-core/src/leann/searcher_base.py +++ b/packages/leann-core/src/leann/searcher_base.py @@ -1,7 +1,7 @@ import json from abc import ABC, abstractmethod from pathlib import Path -from typing import Any, Literal +from typing import Any, Literal, Optional import numpy as np @@ -169,7 +169,7 @@ class BaseSearcher(LeannBackendSearcherInterface, ABC): prune_ratio: float = 0.0, recompute_embeddings: bool = False, pruning_strategy: Literal["global", "local", "proportional"] = "global", - zmq_port: int | None = None, + zmq_port: Optional[int] = None, **kwargs, ) -> dict[str, Any]: """