fix: clean build system and Python 3.9 compatibility

Build system improvements:
- Simplify macOS environment detection using brew --prefix
- Remove complex hardcoded paths and CMAKE_ARGS
- Let CMake automatically find Homebrew packages via CMAKE_PREFIX_PATH
- Clean separation between Intel (/usr/local) and Apple Silicon (/opt/homebrew)

Python 3.9 compatibility:
- Set ruff target-version to py39 to match project requirements
- Replace str | None with Union[str, None] in type annotations
- Add Union imports where needed
- Fix core interface, CLI, chat, and embedding server files

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Andy Lee
2025-08-09 17:27:00 -07:00
parent 5f5b97fb54
commit 4a5db385f0
6 changed files with 27 additions and 31 deletions

View File

@@ -10,6 +10,7 @@ import sys
import threading
import time
from pathlib import Path
from typing import Union
import msgpack
import numpy as np
@@ -33,7 +34,7 @@ if not logger.handlers:
def create_hnsw_embedding_server(
passages_file: str | None = None,
passages_file: Union[str, None] = None,
zmq_port: int = 5555,
model_name: str = "sentence-transformers/all-mpnet-base-v2",
distance_metric: str = "mips",

View File

@@ -8,7 +8,7 @@ import difflib
import logging
import os
from abc import ABC, abstractmethod
from typing import Any
from typing import Any, Union
import torch
@@ -309,7 +309,7 @@ def search_hf_models(query: str, limit: int = 10) -> list[str]:
return search_hf_models_fuzzy(query, limit)
def validate_model_and_suggest(model_name: str, llm_type: str) -> str | None:
def validate_model_and_suggest(model_name: str, llm_type: str) -> Union[str, None]:
"""Validate model name and provide suggestions if invalid"""
if llm_type == "ollama":
available_models = check_ollama_models()
@@ -683,7 +683,7 @@ class HFChat(LLMInterface):
class OpenAIChat(LLMInterface):
"""LLM interface for OpenAI models."""
def __init__(self, model: str = "gpt-4o", api_key: str | None = None):
def __init__(self, model: str = "gpt-4o", api_key: Union[str, None] = None):
self.model = model
self.api_key = api_key or os.getenv("OPENAI_API_KEY")

View File

@@ -1,6 +1,7 @@
import argparse
import asyncio
from pathlib import Path
from typing import Union
from llama_index.core import SimpleDirectoryReader
from llama_index.core.node_parser import SentenceSplitter
@@ -270,7 +271,7 @@ Examples:
print(f' leann search {example_name} "your query"')
print(f" leann ask {example_name} --interactive")
def load_documents(self, docs_dir: str, custom_file_types: str | None = None):
def load_documents(self, docs_dir: str, custom_file_types: Union[str, None] = None):
print(f"Loading documents from {docs_dir}...")
if custom_file_types:
print(f"Using custom file types: {custom_file_types}")

View File

@@ -1,5 +1,5 @@
from abc import ABC, abstractmethod
from typing import Any, Literal
from typing import Any, Literal, Union
import numpy as np
@@ -34,7 +34,9 @@ class LeannBackendSearcherInterface(ABC):
pass
@abstractmethod
def _ensure_server_running(self, passages_source_file: str, port: int | None, **kwargs) -> int:
def _ensure_server_running(
self, passages_source_file: str, port: Union[int, None], **kwargs
) -> int:
"""Ensure server is running"""
pass
@@ -48,7 +50,7 @@ class LeannBackendSearcherInterface(ABC):
prune_ratio: float = 0.0,
recompute_embeddings: bool = False,
pruning_strategy: Literal["global", "local", "proportional"] = "global",
zmq_port: int | None = None,
zmq_port: Union[int, None] = None,
**kwargs,
) -> dict[str, Any]:
"""Search for nearest neighbors
@@ -74,7 +76,7 @@ class LeannBackendSearcherInterface(ABC):
self,
query: str,
use_server_if_available: bool = True,
zmq_port: int | None = None,
zmq_port: Union[int, None] = None,
) -> np.ndarray:
"""Compute embedding for a query string