From a0e53ef8f19b661d33c8cd8e4a39fbf58173fcbc Mon Sep 17 00:00:00 2001 From: Andy Lee Date: Wed, 24 Dec 2025 00:23:25 +0000 Subject: [PATCH] Format code with ruff --- apps/history_data/wechat_history.py | 4 +- .../vidore_v2_benchmark.py | 3 +- tests/test_prompt_template_persistence.py | 65 +++++++++++++++++-- tests/test_readme_examples.py | 4 +- 4 files changed, 68 insertions(+), 8 deletions(-) diff --git a/apps/history_data/wechat_history.py b/apps/history_data/wechat_history.py index f65f77c..9c99f77 100644 --- a/apps/history_data/wechat_history.py +++ b/apps/history_data/wechat_history.py @@ -314,7 +314,9 @@ class WeChatHistoryReader(BaseReader): return concatenated_groups - def _create_concatenated_content(self, message_group: dict, contact_name: str) -> tuple[str, str]: + def _create_concatenated_content( + self, message_group: dict, contact_name: str + ) -> tuple[str, str]: """ Create concatenated content from a group of messages. diff --git a/apps/multimodal/vision-based-pdf-multi-vector/vidore_v2_benchmark.py b/apps/multimodal/vision-based-pdf-multi-vector/vidore_v2_benchmark.py index d6130d8..be4eb4f 100644 --- a/apps/multimodal/vision-based-pdf-multi-vector/vidore_v2_benchmark.py +++ b/apps/multimodal/vision-based-pdf-multi-vector/vidore_v2_benchmark.py @@ -113,7 +113,8 @@ def load_vidore_v2_data( # Try to get a sample to see actual language values try: sample_ds = cast( - Dataset, load_dataset(dataset_path, "queries", split=split, revision=revision) + Dataset, + load_dataset(dataset_path, "queries", split=split, revision=revision), ) if len(sample_ds) > 0 and "language" in sample_ds.column_names: sample_langs = set(sample_ds["language"]) diff --git a/tests/test_prompt_template_persistence.py b/tests/test_prompt_template_persistence.py index 55e24e3..56391ff 100644 --- a/tests/test_prompt_template_persistence.py +++ b/tests/test_prompt_template_persistence.py @@ -581,7 +581,18 @@ class TestQueryTemplateApplicationInComputeEmbedding: # Create a concrete implementation for testing class TestSearcher(BaseSearcher): - def search(self, query, top_k, complexity=64, beam_width=1, prune_ratio=0.0, recompute_embeddings=False, pruning_strategy="global", zmq_port=None, **kwargs): + def search( + self, + query, + top_k, + complexity=64, + beam_width=1, + prune_ratio=0.0, + recompute_embeddings=False, + pruning_strategy="global", + zmq_port=None, + **kwargs, + ): return {"labels": [], "distances": []} searcher = object.__new__(TestSearcher) @@ -625,7 +636,18 @@ class TestQueryTemplateApplicationInComputeEmbedding: # Create a concrete implementation for testing class TestSearcher(BaseSearcher): - def search(self, query, top_k, complexity=64, beam_width=1, prune_ratio=0.0, recompute_embeddings=False, pruning_strategy="global", zmq_port=None, **kwargs): + def search( + self, + query, + top_k, + complexity=64, + beam_width=1, + prune_ratio=0.0, + recompute_embeddings=False, + pruning_strategy="global", + zmq_port=None, + **kwargs, + ): return {"labels": [], "distances": []} searcher = object.__new__(TestSearcher) @@ -671,7 +693,18 @@ class TestQueryTemplateApplicationInComputeEmbedding: from leann.searcher_base import BaseSearcher class TestSearcher(BaseSearcher): - def search(self, query, top_k, complexity=64, beam_width=1, prune_ratio=0.0, recompute_embeddings=False, pruning_strategy="global", zmq_port=None, **kwargs): + def search( + self, + query, + top_k, + complexity=64, + beam_width=1, + prune_ratio=0.0, + recompute_embeddings=False, + pruning_strategy="global", + zmq_port=None, + **kwargs, + ): return {"labels": [], "distances": []} searcher = object.__new__(TestSearcher) @@ -710,7 +743,18 @@ class TestQueryTemplateApplicationInComputeEmbedding: from leann.searcher_base import BaseSearcher class TestSearcher(BaseSearcher): - def search(self, query, top_k, complexity=64, beam_width=1, prune_ratio=0.0, recompute_embeddings=False, pruning_strategy="global", zmq_port=None, **kwargs): + def search( + self, + query, + top_k, + complexity=64, + beam_width=1, + prune_ratio=0.0, + recompute_embeddings=False, + pruning_strategy="global", + zmq_port=None, + **kwargs, + ): return {"labels": [], "distances": []} searcher = object.__new__(TestSearcher) @@ -774,7 +818,18 @@ class TestQueryTemplateApplicationInComputeEmbedding: from leann.searcher_base import BaseSearcher class TestSearcher(BaseSearcher): - def search(self, query, top_k, complexity=64, beam_width=1, prune_ratio=0.0, recompute_embeddings=False, pruning_strategy="global", zmq_port=None, **kwargs): + def search( + self, + query, + top_k, + complexity=64, + beam_width=1, + prune_ratio=0.0, + recompute_embeddings=False, + pruning_strategy="global", + zmq_port=None, + **kwargs, + ): return {"labels": [], "distances": []} searcher = object.__new__(TestSearcher) diff --git a/tests/test_readme_examples.py b/tests/test_readme_examples.py index 371d13c..3ff0829 100644 --- a/tests/test_readme_examples.py +++ b/tests/test_readme_examples.py @@ -98,7 +98,9 @@ def test_backend_options(): with tempfile.TemporaryDirectory() as temp_dir: # Use smaller model in CI to avoid memory issues is_ci = os.environ.get("CI") == "true" - embedding_model = "sentence-transformers/all-MiniLM-L6-v2" if is_ci else "facebook/contriever" + embedding_model = ( + "sentence-transformers/all-MiniLM-L6-v2" if is_ci else "facebook/contriever" + ) dimensions = 384 if is_ci else None # Test HNSW backend (as shown in README)