From 9f2e82a838bc73d43bba1b0279e7b73b059c6250 Mon Sep 17 00:00:00 2001 From: joshuashaffer Date: Fri, 8 Aug 2025 18:31:15 -0400 Subject: [PATCH] Propagate hosts argument for ollama through chat.py (#21) * Propigate hosts argument for ollama through chat.py * Apply suggestions from code review Good AI slop suggestions. Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- packages/leann-core/src/leann/chat.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/packages/leann-core/src/leann/chat.py b/packages/leann-core/src/leann/chat.py index 2d69bec..3a5acb1 100644 --- a/packages/leann-core/src/leann/chat.py +++ b/packages/leann-core/src/leann/chat.py @@ -17,12 +17,12 @@ logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) -def check_ollama_models() -> list[str]: +def check_ollama_models(host: str) -> list[str]: """Check available Ollama models and return a list""" try: import requests - response = requests.get("http://localhost:11434/api/tags", timeout=5) + response = requests.get(f"{host}/api/tags", timeout=5) if response.status_code == 200: data = response.json() return [model["name"] for model in data.get("models", [])] @@ -309,10 +309,12 @@ def search_hf_models(query: str, limit: int = 10) -> list[str]: return search_hf_models_fuzzy(query, limit) -def validate_model_and_suggest(model_name: str, llm_type: str) -> str | None: +def validate_model_and_suggest( + model_name: str, llm_type: str, host: str = "http://localhost:11434" +) -> str | None: """Validate model name and provide suggestions if invalid""" if llm_type == "ollama": - available_models = check_ollama_models() + available_models = check_ollama_models(host) if available_models and model_name not in available_models: error_msg = f"Model '{model_name}' not found in your local Ollama installation." @@ -469,7 +471,7 @@ class OllamaChat(LLMInterface): requests.get(host) # Pre-check model availability with helpful suggestions - model_error = validate_model_and_suggest(model, "ollama") + model_error = validate_model_and_suggest(model, "ollama", host) if model_error: raise ValueError(model_error)