Fix all test type errors and enable ty check on tests

- Fix test_basic.py: search() takes str not list
- Fix test_cli_prompt_template.py: add type: ignore for Mock assignments
- Fix test_prompt_template_persistence.py: match BaseSearcher.search signature
- Fix test_prompt_template_e2e.py: add type narrowing asserts after skip
- Fix test_readme_examples.py: use explicit kwargs instead of **model_args
- Fix metadata_filter.py: allow Optional[MetadataFilters]
- Update CI to run ty check on tests
This commit is contained in:
Andy Lee
2025-12-23 09:42:08 +00:00
parent 931051e33b
commit b754474c44
7 changed files with 30 additions and 27 deletions

View File

@@ -47,8 +47,8 @@ jobs:
- name: Run ty type checker - name: Run ty type checker
run: | run: |
# Run ty on core packages and apps, excluding tests # Run ty on core packages, apps, and tests
ty check --exclude "tests/**" packages/leann-core/src apps ty check packages/leann-core/src apps tests
build: build:
needs: [lint, type-check] needs: [lint, type-check]

View File

@@ -7,7 +7,7 @@ operators for different data types including numbers, strings, booleans, and lis
""" """
import logging import logging
from typing import Any, Union from typing import Any, Optional, Union
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -47,7 +47,7 @@ class MetadataFilterEngine:
} }
def apply_filters( def apply_filters(
self, search_results: list[dict[str, Any]], metadata_filters: MetadataFilters self, search_results: list[dict[str, Any]], metadata_filters: Optional[MetadataFilters]
) -> list[dict[str, Any]]: ) -> list[dict[str, Any]]:
""" """
Apply metadata filters to a list of search results. Apply metadata filters to a list of search results.

View File

@@ -91,7 +91,7 @@ def test_large_index():
builder.build_index(index_path) builder.build_index(index_path)
searcher = LeannSearcher(index_path) searcher = LeannSearcher(index_path)
results = searcher.search(["word10 word20"], top_k=10) results = searcher.search("word10 word20", top_k=10)
assert len(results[0]) == 10 assert len(results) == 10
# Cleanup # Cleanup
searcher.cleanup() searcher.cleanup()

View File

@@ -123,7 +123,7 @@ class TestPromptTemplateStoredInEmbeddingOptions:
cli = LeannCLI() cli = LeannCLI()
# Mock load_documents to return a document so builder is created # Mock load_documents to return a document so builder is created
cli.load_documents = Mock(return_value=[{"text": "test content", "metadata": {}}]) cli.load_documents = Mock(return_value=[{"text": "test content", "metadata": {}}]) # type: ignore[assignment]
parser = cli.create_parser() parser = cli.create_parser()
@@ -175,7 +175,7 @@ class TestPromptTemplateStoredInEmbeddingOptions:
cli = LeannCLI() cli = LeannCLI()
# Mock load_documents to return a document so builder is created # Mock load_documents to return a document so builder is created
cli.load_documents = Mock(return_value=[{"text": "test content", "metadata": {}}]) cli.load_documents = Mock(return_value=[{"text": "test content", "metadata": {}}]) # type: ignore[assignment]
parser = cli.create_parser() parser = cli.create_parser()
@@ -230,7 +230,7 @@ class TestPromptTemplateStoredInEmbeddingOptions:
cli = LeannCLI() cli = LeannCLI()
# Mock load_documents to return a document so builder is created # Mock load_documents to return a document so builder is created
cli.load_documents = Mock(return_value=[{"text": "test content", "metadata": {}}]) cli.load_documents = Mock(return_value=[{"text": "test content", "metadata": {}}]) # type: ignore[assignment]
parser = cli.create_parser() parser = cli.create_parser()
@@ -307,7 +307,7 @@ class TestPromptTemplateStoredInEmbeddingOptions:
cli = LeannCLI() cli = LeannCLI()
# Mock load_documents to return a document so builder is created # Mock load_documents to return a document so builder is created
cli.load_documents = Mock(return_value=[{"text": "test content", "metadata": {}}]) cli.load_documents = Mock(return_value=[{"text": "test content", "metadata": {}}]) # type: ignore[assignment]
parser = cli.create_parser() parser = cli.create_parser()
@@ -376,7 +376,7 @@ class TestPromptTemplateStoredInEmbeddingOptions:
cli = LeannCLI() cli = LeannCLI()
# Mock load_documents to return a document so builder is created # Mock load_documents to return a document so builder is created
cli.load_documents = Mock(return_value=[{"text": "test content", "metadata": {}}]) cli.load_documents = Mock(return_value=[{"text": "test content", "metadata": {}}]) # type: ignore[assignment]
parser = cli.create_parser() parser = cli.create_parser()
@@ -432,7 +432,7 @@ class TestPromptTemplateFlowsToComputeEmbeddings:
cli = LeannCLI() cli = LeannCLI()
# Mock load_documents to return a simple document # Mock load_documents to return a simple document
cli.load_documents = Mock(return_value=[{"text": "test content", "metadata": {}}]) cli.load_documents = Mock(return_value=[{"text": "test content", "metadata": {}}]) # type: ignore[assignment]
parser = cli.create_parser() parser = cli.create_parser()

View File

@@ -67,7 +67,7 @@ def check_lmstudio_available() -> bool:
return False return False
def get_lmstudio_first_model() -> str: def get_lmstudio_first_model() -> str | None:
"""Get the first available model from LM Studio.""" """Get the first available model from LM Studio."""
try: try:
response = requests.get("http://localhost:1234/v1/models", timeout=5.0) response = requests.get("http://localhost:1234/v1/models", timeout=5.0)
@@ -91,6 +91,7 @@ class TestPromptTemplateOpenAI:
model_name = get_lmstudio_first_model() model_name = get_lmstudio_first_model()
if not model_name: if not model_name:
pytest.skip("No models loaded in LM Studio") pytest.skip("No models loaded in LM Studio")
assert model_name is not None # Type narrowing for type checker
texts = ["artificial intelligence", "machine learning"] texts = ["artificial intelligence", "machine learning"]
prompt_template = "search_query: " prompt_template = "search_query: "
@@ -120,6 +121,7 @@ class TestPromptTemplateOpenAI:
model_name = get_lmstudio_first_model() model_name = get_lmstudio_first_model()
if not model_name: if not model_name:
pytest.skip("No models loaded in LM Studio") pytest.skip("No models loaded in LM Studio")
assert model_name is not None # Type narrowing for type checker
text = "machine learning" text = "machine learning"
base_url = "http://localhost:1234/v1" base_url = "http://localhost:1234/v1"
@@ -271,6 +273,7 @@ class TestLMStudioSDK:
model_name = get_lmstudio_first_model() model_name = get_lmstudio_first_model()
if not model_name: if not model_name:
pytest.skip("No models loaded in LM Studio") pytest.skip("No models loaded in LM Studio")
assert model_name is not None # Type narrowing for type checker
try: try:
from leann.embedding_compute import _query_lmstudio_context_limit from leann.embedding_compute import _query_lmstudio_context_limit

View File

@@ -581,7 +581,7 @@ class TestQueryTemplateApplicationInComputeEmbedding:
# Create a concrete implementation for testing # Create a concrete implementation for testing
class TestSearcher(BaseSearcher): class TestSearcher(BaseSearcher):
def search(self, query_vectors, top_k, complexity, beam_width=1, **kwargs): def search(self, query, top_k, complexity=64, beam_width=1, prune_ratio=0.0, recompute_embeddings=False, pruning_strategy="global", zmq_port=None, **kwargs):
return {"labels": [], "distances": []} return {"labels": [], "distances": []}
searcher = object.__new__(TestSearcher) searcher = object.__new__(TestSearcher)
@@ -625,7 +625,7 @@ class TestQueryTemplateApplicationInComputeEmbedding:
# Create a concrete implementation for testing # Create a concrete implementation for testing
class TestSearcher(BaseSearcher): class TestSearcher(BaseSearcher):
def search(self, query_vectors, top_k, complexity, beam_width=1, **kwargs): def search(self, query, top_k, complexity=64, beam_width=1, prune_ratio=0.0, recompute_embeddings=False, pruning_strategy="global", zmq_port=None, **kwargs):
return {"labels": [], "distances": []} return {"labels": [], "distances": []}
searcher = object.__new__(TestSearcher) searcher = object.__new__(TestSearcher)
@@ -671,7 +671,7 @@ class TestQueryTemplateApplicationInComputeEmbedding:
from leann.searcher_base import BaseSearcher from leann.searcher_base import BaseSearcher
class TestSearcher(BaseSearcher): class TestSearcher(BaseSearcher):
def search(self, query_vectors, top_k, complexity, beam_width=1, **kwargs): def search(self, query, top_k, complexity=64, beam_width=1, prune_ratio=0.0, recompute_embeddings=False, pruning_strategy="global", zmq_port=None, **kwargs):
return {"labels": [], "distances": []} return {"labels": [], "distances": []}
searcher = object.__new__(TestSearcher) searcher = object.__new__(TestSearcher)
@@ -710,7 +710,7 @@ class TestQueryTemplateApplicationInComputeEmbedding:
from leann.searcher_base import BaseSearcher from leann.searcher_base import BaseSearcher
class TestSearcher(BaseSearcher): class TestSearcher(BaseSearcher):
def search(self, query_vectors, top_k, complexity, beam_width=1, **kwargs): def search(self, query, top_k, complexity=64, beam_width=1, prune_ratio=0.0, recompute_embeddings=False, pruning_strategy="global", zmq_port=None, **kwargs):
return {"labels": [], "distances": []} return {"labels": [], "distances": []}
searcher = object.__new__(TestSearcher) searcher = object.__new__(TestSearcher)
@@ -774,7 +774,7 @@ class TestQueryTemplateApplicationInComputeEmbedding:
from leann.searcher_base import BaseSearcher from leann.searcher_base import BaseSearcher
class TestSearcher(BaseSearcher): class TestSearcher(BaseSearcher):
def search(self, query_vectors, top_k, complexity, beam_width=1, **kwargs): def search(self, query, top_k, complexity=64, beam_width=1, prune_ratio=0.0, recompute_embeddings=False, pruning_strategy="global", zmq_port=None, **kwargs):
return {"labels": [], "distances": []} return {"labels": [], "distances": []}
searcher = object.__new__(TestSearcher) searcher = object.__new__(TestSearcher)

View File

@@ -97,17 +97,15 @@ def test_backend_options():
with tempfile.TemporaryDirectory() as temp_dir: with tempfile.TemporaryDirectory() as temp_dir:
# Use smaller model in CI to avoid memory issues # Use smaller model in CI to avoid memory issues
if os.environ.get("CI") == "true": is_ci = os.environ.get("CI") == "true"
model_args = { embedding_model = "sentence-transformers/all-MiniLM-L6-v2" if is_ci else "facebook/contriever"
"embedding_model": "sentence-transformers/all-MiniLM-L6-v2", dimensions = 384 if is_ci else None
"dimensions": 384,
}
else:
model_args = {}
# Test HNSW backend (as shown in README) # Test HNSW backend (as shown in README)
hnsw_path = str(Path(temp_dir) / "test_hnsw.leann") hnsw_path = str(Path(temp_dir) / "test_hnsw.leann")
builder_hnsw = LeannBuilder(backend_name="hnsw", **model_args) builder_hnsw = LeannBuilder(
backend_name="hnsw", embedding_model=embedding_model, dimensions=dimensions
)
builder_hnsw.add_text("Test document for HNSW backend") builder_hnsw.add_text("Test document for HNSW backend")
builder_hnsw.build_index(hnsw_path) builder_hnsw.build_index(hnsw_path)
assert Path(hnsw_path).parent.exists() assert Path(hnsw_path).parent.exists()
@@ -115,7 +113,9 @@ def test_backend_options():
# Test DiskANN backend (mentioned as available option) # Test DiskANN backend (mentioned as available option)
diskann_path = str(Path(temp_dir) / "test_diskann.leann") diskann_path = str(Path(temp_dir) / "test_diskann.leann")
builder_diskann = LeannBuilder(backend_name="diskann", **model_args) builder_diskann = LeannBuilder(
backend_name="diskann", embedding_model=embedding_model, dimensions=dimensions
)
builder_diskann.add_text("Test document for DiskANN backend") builder_diskann.add_text("Test document for DiskANN backend")
builder_diskann.build_index(diskann_path) builder_diskann.build_index(diskann_path)
assert Path(diskann_path).parent.exists() assert Path(diskann_path).parent.exists()