From 6217bb5638c3e01d37da2f627000851db3d89fda Mon Sep 17 00:00:00 2001 From: yichuan520030910320 Date: Tue, 22 Jul 2025 22:05:28 -0700 Subject: [PATCH] fix readme --- README.md | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 88b9482..95bc2bc 100755 --- a/README.md +++ b/README.md @@ -103,19 +103,28 @@ Just 3 lines of code. Our declarative API makes RAG as easy as writing a config ```python from leann.api import LeannBuilder, LeannSearcher, LeannChat -# 1. Build index (no embeddings stored!) + +# 1. Build the index (no embeddings stored!) builder = LeannBuilder(backend_name="hnsw") -builder.add_text("C# is a powerful programming language ") +builder.add_text("C# is a powerful programming language") builder.add_text("Python is a powerful programming language and it is very popular") -builder.add_text("Machine learning transforms industries") +builder.add_text("Machine learning transforms industries") builder.add_text("Neural networks process complex data") -builder.add_text("Leann is a great storage saving engine for RAG on your macbook") +builder.add_text("Leann is a great storage saving engine for RAG on your MacBook") builder.build_index("knowledge.leann") + # 2. Search with real-time embeddings -results = LeannSearcher("knowledge.leann").search("programming languages", top_k=2) -# 3. Chat with LEANN -llm_config={"type": "ollama", "model": "llama3.2:1b"} -response = LeannChat(index_path="knowledge.leann",llm_config=llm_config ).ask( +searcher = LeannSearcher("knowledge.leann") +results = searcher.search("programming languages", top_k=2) + +# 3. Chat with LEANN using retrieved results +llm_config = { + "type": "ollama", + "model": "llama3.2:1b" +} + +chat = LeannChat(index_path="knowledge.leann", llm_config=llm_config) +response = chat.ask( "Compare the two retrieved programming languages and say which one is more popular today.", top_k=2, )