fix: complete Python 3.9 type annotation compatibility fixes

Fix remaining Python 3.9 incompatible type annotations throughout the
leann-core package that were causing test failures in CI. The union operator
(|) syntax for type hints was introduced in Python 3.10 and causes
"TypeError: unsupported operand type(s) for |" errors in Python 3.9.

Changes:
- Convert dict[str, Any] | None to Optional[dict[str, Any]]
- Convert int | None to Optional[int]
- Convert subprocess.Popen | None to Optional[subprocess.Popen]
- Convert LeannBackendFactoryInterface | None to Optional[LeannBackendFactoryInterface]
- Add missing Optional imports to all affected files

This resolves all test failures related to type annotation syntax and ensures
compatibility with Python 3.9 as specified in pyproject.toml.

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Andy Lee
2025-08-10 18:48:10 -07:00
parent 037aad0870
commit fe9381fc8b
4 changed files with 13 additions and 12 deletions

View File

@@ -10,7 +10,7 @@ import time
import warnings import warnings
from dataclasses import dataclass, field from dataclasses import dataclass, field
from pathlib import Path from pathlib import Path
from typing import Any, Literal from typing import Any, Literal, Optional
import numpy as np import numpy as np
@@ -33,7 +33,7 @@ def compute_embeddings(
model_name: str, model_name: str,
mode: str = "sentence-transformers", mode: str = "sentence-transformers",
use_server: bool = True, use_server: bool = True,
port: int | None = None, port: Optional[int] = None,
is_build=False, is_build=False,
) -> np.ndarray: ) -> np.ndarray:
""" """
@@ -157,12 +157,12 @@ class LeannBuilder:
self, self,
backend_name: str, backend_name: str,
embedding_model: str = "facebook/contriever", embedding_model: str = "facebook/contriever",
dimensions: int | None = None, dimensions: Optional[int] = None,
embedding_mode: str = "sentence-transformers", embedding_mode: str = "sentence-transformers",
**backend_kwargs, **backend_kwargs,
): ):
self.backend_name = backend_name self.backend_name = backend_name
backend_factory: LeannBackendFactoryInterface | None = BACKEND_REGISTRY.get(backend_name) backend_factory: Optional[LeannBackendFactoryInterface] = BACKEND_REGISTRY.get(backend_name)
if backend_factory is None: if backend_factory is None:
raise ValueError(f"Backend '{backend_name}' not found or not registered.") raise ValueError(f"Backend '{backend_name}' not found or not registered.")
self.backend_factory = backend_factory self.backend_factory = backend_factory
@@ -242,7 +242,7 @@ class LeannBuilder:
self.backend_kwargs = backend_kwargs self.backend_kwargs = backend_kwargs
self.chunks: list[dict[str, Any]] = [] self.chunks: list[dict[str, Any]] = []
def add_text(self, text: str, metadata: dict[str, Any] | None = None): def add_text(self, text: str, metadata: Optional[dict[str, Any]] = None):
if metadata is None: if metadata is None:
metadata = {} metadata = {}
passage_id = metadata.get("id", str(len(self.chunks))) passage_id = metadata.get("id", str(len(self.chunks)))
@@ -592,7 +592,7 @@ class LeannChat:
def __init__( def __init__(
self, self,
index_path: str, index_path: str,
llm_config: dict[str, Any] | None = None, llm_config: Optional[dict[str, Any]] = None,
enable_warmup: bool = False, enable_warmup: bool = False,
**kwargs, **kwargs,
): ):
@@ -608,7 +608,7 @@ class LeannChat:
prune_ratio: float = 0.0, prune_ratio: float = 0.0,
recompute_embeddings: bool = True, recompute_embeddings: bool = True,
pruning_strategy: Literal["global", "local", "proportional"] = "global", pruning_strategy: Literal["global", "local", "proportional"] = "global",
llm_kwargs: dict[str, Any] | None = None, llm_kwargs: Optional[dict[str, Any]] = None,
expected_zmq_port: int = 5557, expected_zmq_port: int = 5557,
**search_kwargs, **search_kwargs,
): ):

View File

@@ -761,7 +761,7 @@ class SimulatedChat(LLMInterface):
return "This is a simulated answer from the LLM based on the retrieved context." return "This is a simulated answer from the LLM based on the retrieved context."
def get_llm(llm_config: dict[str, Any] | None = None) -> LLMInterface: def get_llm(llm_config: Optional[dict[str, Any]] = None) -> LLMInterface:
""" """
Factory function to get an LLM interface based on configuration. Factory function to get an LLM interface based on configuration.

View File

@@ -6,6 +6,7 @@ import subprocess
import sys import sys
import time import time
from pathlib import Path from pathlib import Path
from typing import Optional
import psutil import psutil
@@ -182,8 +183,8 @@ class EmbeddingServerManager:
e.g., "leann_backend_diskann.embedding_server" e.g., "leann_backend_diskann.embedding_server"
""" """
self.backend_module_name = backend_module_name self.backend_module_name = backend_module_name
self.server_process: subprocess.Popen | None = None self.server_process: Optional[subprocess.Popen] = None
self.server_port: int | None = None self.server_port: Optional[int] = None
self._atexit_registered = False self._atexit_registered = False
def start_server( def start_server(

View File

@@ -1,7 +1,7 @@
import json import json
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from pathlib import Path from pathlib import Path
from typing import Any, Literal from typing import Any, Literal, Optional
import numpy as np import numpy as np
@@ -169,7 +169,7 @@ class BaseSearcher(LeannBackendSearcherInterface, ABC):
prune_ratio: float = 0.0, prune_ratio: float = 0.0,
recompute_embeddings: bool = False, recompute_embeddings: bool = False,
pruning_strategy: Literal["global", "local", "proportional"] = "global", pruning_strategy: Literal["global", "local", "proportional"] = "global",
zmq_port: int | None = None, zmq_port: Optional[int] = None,
**kwargs, **kwargs,
) -> dict[str, Any]: ) -> dict[str, Any]:
""" """