From 65bbff1d93b8d68124ad4bbe056ccc5ec0f7531f Mon Sep 17 00:00:00 2001 From: Andy Lee Date: Thu, 7 Aug 2025 15:01:09 -0700 Subject: [PATCH] fix(py39): replace union type syntax in chat.py - validate_model_and_suggest: str | None -> Optional[str] - OpenAIChat.__init__: api_key: str | None -> Optional[str] - get_llm: dict[str, Any] | None -> Optional[dict[str, Any]] Ensures Python 3.9 compatibility for CI macOS 3.9. --- packages/leann-core/src/leann/chat.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/leann-core/src/leann/chat.py b/packages/leann-core/src/leann/chat.py index 2d69bec..541da07 100644 --- a/packages/leann-core/src/leann/chat.py +++ b/packages/leann-core/src/leann/chat.py @@ -8,7 +8,7 @@ import difflib import logging import os from abc import ABC, abstractmethod -from typing import Any +from typing import Any, Optional import torch @@ -309,7 +309,7 @@ def search_hf_models(query: str, limit: int = 10) -> list[str]: return search_hf_models_fuzzy(query, limit) -def validate_model_and_suggest(model_name: str, llm_type: str) -> str | None: +def validate_model_and_suggest(model_name: str, llm_type: str) -> Optional[str]: """Validate model name and provide suggestions if invalid""" if llm_type == "ollama": available_models = check_ollama_models() @@ -683,7 +683,7 @@ class HFChat(LLMInterface): class OpenAIChat(LLMInterface): """LLM interface for OpenAI models.""" - def __init__(self, model: str = "gpt-4o", api_key: str | None = None): + def __init__(self, model: str = "gpt-4o", api_key: Optional[str] = None): self.model = model self.api_key = api_key or os.getenv("OPENAI_API_KEY") @@ -759,7 +759,7 @@ class SimulatedChat(LLMInterface): return "This is a simulated answer from the LLM based on the retrieved context." -def get_llm(llm_config: dict[str, Any] | None = None) -> LLMInterface: +def get_llm(llm_config: Optional[dict[str, Any]] = None) -> LLMInterface: """ Factory function to get an LLM interface based on configuration.