update readme and main example

This commit is contained in:
yichuan520030910320
2025-07-17 15:03:22 -07:00
parent 0db81c16cd
commit 90d9f27383
4 changed files with 9471 additions and 338 deletions

View File

@@ -85,6 +85,8 @@ uv sync
**Ollama Setup (Optional for Local LLM):** **Ollama Setup (Optional for Local LLM):**
*macOS:* *macOS:*
First, [download Ollama for macOS](https://ollama.com/download/mac).
```bash ```bash
# Install Ollama # Install Ollama
brew install ollama brew install ollama

9790
demo.ipynb
View File

File diff suppressed because it is too large Load Diff

View File

Binary file not shown.

View File

@@ -10,7 +10,7 @@ from pathlib import Path
dotenv.load_dotenv() dotenv.load_dotenv()
node_parser = SentenceSplitter( node_parser = SentenceSplitter(
chunk_size=256, chunk_overlap=64, separator=" ", paragraph_separator="\n\n" chunk_size=256, chunk_overlap=128, separator=" ", paragraph_separator="\n\n"
) )
print("Loading documents...") print("Loading documents...")
documents = SimpleDirectoryReader( documents = SimpleDirectoryReader(
@@ -58,17 +58,16 @@ async def main(args):
print(f"\n[PHASE 2] Starting Leann chat session...") print(f"\n[PHASE 2] Starting Leann chat session...")
llm_config = {"type": "hf", "model": "Qwen/Qwen3-4B"} # llm_config = {"type": "hf", "model": "Qwen/Qwen3-4B"}
llm_config = {"type": "ollama", "model": "qwen3:8b"}
chat = LeannChat(index_path=INDEX_PATH, llm_config=llm_config) chat = LeannChat(index_path=INDEX_PATH, llm_config=llm_config)
query = "Based on the paper, what are the main techniques LEANN explores to reduce the storage overhead and DLPM explore to achieve Fairness and Efiiciency trade-off?" query = "Based on the paper, what are the main techniques LEANN explores to reduce the storage overhead and DLPM explore to achieve Fairness and Efiiciency trade-off?"
query = (
"What is the main idea of RL and give me 5 exapmle of classic RL algorithms?" # query = (
) # "什么是盘古大模型以及盘古开发过程中遇到了什么阴暗面,任务令一般在什么城市颁发"
query = ( # )
"什么是盘古大模型以及盘古开发过程中遇到了什么阴暗面,任务令一般在什么城市颁发"
)
print(f"You: {query}") print(f"You: {query}")
chat_response = chat.ask( chat_response = chat.ask(
@@ -103,7 +102,7 @@ if __name__ == "__main__":
parser.add_argument( parser.add_argument(
"--index-dir", "--index-dir",
type=str, type=str,
default="./test_pdf_index_pangu_test", default="./test_doc_files",
help="Directory where the Leann index will be stored.", help="Directory where the Leann index will be stored.",
) )
args = parser.parse_args() args = parser.parse_args()