Files
LEANN/demo.ipynb
2025-07-22 22:32:18 -07:00

312 lines
13 KiB
Plaintext

{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Quick Start in 30s"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Build the index"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Writing passages: 100%|██████████| 5/5 [00:00<00:00, 8106.50chunk/s]\n",
"Batches: 100%|██████████| 1/1 [00:00<00:00, 32.12it/s]\n",
"WARNING:leann_backend_hnsw.hnsw_backend:Converting data to float32, shape: (5, 768)\n",
"INFO:leann_backend_hnsw.hnsw_backend:INFO: Converting HNSW index to CSR-pruned format...\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"M: 64 for level: 0\n",
"Starting conversion: knowledge.index -> knowledge.csr.tmp\n",
"[0.00s] Reading Index HNSW header...\n",
"[0.00s] Header read: d=768, ntotal=5\n",
"[0.00s] Reading HNSW struct vectors...\n",
" Reading vector (dtype=<class 'numpy.float64'>, fmt='d')... Count=6, Bytes=48\n",
"[0.00s] Read assign_probas (6)\n",
" Reading vector (dtype=<class 'numpy.int32'>, fmt='i')... Count=7, Bytes=28\n",
"[0.17s] Read cum_nneighbor_per_level (7)\n",
" Reading vector (dtype=<class 'numpy.int32'>, fmt='i')... Count=5, Bytes=20\n",
"[0.26s] Read levels (5)\n",
"[0.35s] Probing for compact storage flag...\n",
"[0.35s] Found compact flag: False\n",
"[0.35s] Compact flag is False, reading original format...\n",
"[0.35s] Probing for potential extra byte before non-compact offsets...\n",
"[0.35s] Found and consumed an unexpected 0x00 byte.\n",
" Reading vector (dtype=<class 'numpy.uint64'>, fmt='Q')... Count=6, Bytes=48\n",
"[0.35s] Read offsets (6)\n",
"[0.43s] Attempting to read neighbors vector...\n",
" Reading vector (dtype=<class 'numpy.int32'>, fmt='i')... Count=320, Bytes=1280\n",
"[0.43s] Read neighbors (320)\n",
"[0.55s] Read scalar params (ep=4, max_lvl=0)\n",
"[0.55s] Checking for storage data...\n",
"[0.55s] Found storage fourcc: 49467849.\n",
"[0.55s] Converting to CSR format...\n",
"[0.55s] Conversion loop finished. \n",
"[0.55s] Running validation checks...\n",
" Checking total valid neighbor count...\n",
" OK: Total valid neighbors = 20\n",
" Checking final pointer indices...\n",
" OK: Final pointers match data size.\n",
"[0.55s] Deleting original neighbors and offsets arrays...\n",
" CSR Stats: |data|=20, |level_ptr|=10\n",
"[0.64s] Writing CSR HNSW graph data in FAISS-compatible order...\n",
" Pruning embeddings: Writing NULL storage marker.\n",
"[0.73s] Conversion complete.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:leann_backend_hnsw.hnsw_backend:✅ CSR conversion successful.\n",
"INFO:leann_backend_hnsw.hnsw_backend:INFO: Replaced original index with CSR-pruned version at 'knowledge.index'\n"
]
}
],
"source": [
"from leann.api import LeannBuilder\n",
"\n",
"builder = LeannBuilder(backend_name=\"hnsw\")\n",
"builder.add_text(\"C# is a powerful programming language\")\n",
"builder.add_text(\"Python is a powerful programming language and it is very popular\")\n",
"builder.add_text(\"Machine learning transforms industries\")\n",
"builder.add_text(\"Neural networks process complex data\")\n",
"builder.add_text(\"Leann is a great storage saving engine for RAG on your MacBook\")\n",
"builder.build_index(\"knowledge.leann\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Search with real-time embeddings"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:leann.embedding_server_manager:Terminating server process (PID: 76679) for backend leann_backend_hnsw.hnsw_embedding_server...\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"[read_HNSW - CSR NL v4] Reading metadata & CSR indices (manual offset)...\n",
"[read_HNSW NL v4] Read levels vector, size: 5\n",
"[read_HNSW NL v4] Reading Compact Storage format indices...\n",
"[read_HNSW NL v4] Read compact_level_ptr, size: 10\n",
"[read_HNSW NL v4] Read compact_node_offsets, size: 6\n",
"[read_HNSW NL v4] Read entry_point: 4, max_level: 0\n",
"[read_HNSW NL v4] Read storage fourcc: 0x6c6c756e\n",
"[read_HNSW NL v4 FIX] Detected FileIOReader. Neighbors size field offset: 326\n",
"[read_HNSW NL v4] Reading neighbors data into memory.\n",
"[read_HNSW NL v4] Read neighbors data, size: 20\n",
"[read_HNSW NL v4] Finished reading metadata and CSR indices.\n",
"INFO: Skipping external storage loading, since is_recompute is true.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:leann.embedding_server_manager:Server process 76679 terminated.\n",
"INFO:leann.api:🔍 LeannSearcher.search() called:\n",
"INFO:leann.api: Query: 'programming languages'\n",
"INFO:leann.api: Top_k: 2\n",
"INFO:leann.api: Additional kwargs: {}\n",
"INFO:leann.embedding_server_manager:Starting embedding server on port 5557...\n",
"INFO:leann.embedding_server_manager:Command: /Users/andyl/Projects/LEANN-RAG/.venv/bin/python -m leann_backend_hnsw.hnsw_embedding_server --zmq-port 5557 --model-name facebook/contriever-msmarco --passages-file knowledge.leann.meta.json\n",
"huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n",
"To disable this warning, you can either:\n",
"\t- Avoid using `tokenizers` before the fork if possible\n",
"\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n",
"INFO:leann.embedding_server_manager:Server process started with PID: 93303\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"INFO: Registering backend 'diskann'\n",
"INFO: Registering backend 'hnsw'\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:leann.embedding_server_manager:Embedding server is ready!\n",
"INFO:leann.api: Launching server time: 1.067044973373413 seconds\n",
"INFO:leann.embedding_server_manager:Existing server process (PID 93303) is compatible\n",
"INFO:datasets:PyTorch version 2.7.1 available.\n",
"INFO:sentence_transformers.SentenceTransformer:Load pretrained SentenceTransformer: facebook/contriever-msmarco\n",
"WARNING:sentence_transformers.SentenceTransformer:No sentence-transformers model found with name facebook/contriever-msmarco. Creating a new one with mean pooling.\n",
"INFO:leann.api: Generated embedding shape: (1, 768)\n",
"INFO:leann.api: Embedding time: 3.6515679359436035 seconds\n",
"INFO:leann.api: Search time: 0.12426590919494629 seconds\n",
"INFO:leann.api: Backend returned: labels=2 results\n",
"INFO:leann.api: Processing 2 passage IDs:\n",
"INFO:leann.api: 1. passage_id='0' -> SUCCESS: C# is a powerful programming language...\n",
"INFO:leann.api: 2. passage_id='1' -> SUCCESS: Python is a powerful programming language and it is very popular...\n",
"INFO:leann.api: Final enriched results: 2 passages\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"ZmqDistanceComputer initialized: d=768, metric=0\n"
]
},
{
"data": {
"text/plain": [
"[SearchResult(id='0', score=np.float32(1.444752), text='C# is a powerful programming language', metadata={}),\n",
" SearchResult(id='1', score=np.float32(1.394647), text='Python is a powerful programming language and it is very popular', metadata={})]"
]
},
"execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"from leann.api import LeannSearcher\n",
"\n",
"searcher = LeannSearcher(\"knowledge.leann\")\n",
"results = searcher.search(\"programming languages\", top_k=2)\n",
"results"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Chat with LEANN using retrieved results"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:leann.chat:Attempting to create LLM of type='ollama' with model='llama3.2:1b'\n",
"INFO:leann.chat:Initializing OllamaChat with model='llama3.2:1b' and host='http://localhost:11434'\n",
"INFO:leann.api:🔍 LeannSearcher.search() called:\n",
"INFO:leann.api: Query: 'Compare the two retrieved programming languages and say which one is more popular today.'\n",
"INFO:leann.api: Top_k: 2\n",
"INFO:leann.api: Additional kwargs: {}\n",
"INFO:leann.embedding_server_manager:Found compatible server on port 5557\n",
"INFO:leann.embedding_server_manager:Using existing compatible server on port 5557\n",
"INFO:leann.api: Launching server time: 0.012424945831298828 seconds\n",
"INFO:leann.embedding_server_manager:Found compatible server on port 5557\n",
"INFO:leann.embedding_server_manager:Using existing compatible server on port 5557\n",
"INFO:leann.api: Generated embedding shape: (1, 768)\n",
"INFO:leann.api: Embedding time: 0.0868520736694336 seconds\n",
"INFO:leann.api: Search time: 0.03928709030151367 seconds\n",
"INFO:leann.api: Backend returned: labels=2 results\n",
"INFO:leann.api: Processing 2 passage IDs:\n",
"INFO:leann.api: 1. passage_id='1' -> SUCCESS: Python is a powerful programming language and it is very popular...\n",
"INFO:leann.api: 2. passage_id='0' -> SUCCESS: C# is a powerful programming language...\n",
"INFO:leann.api: Final enriched results: 2 passages\n",
"INFO:leann.chat:Sending request to Ollama and waiting for response...\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"[read_HNSW - CSR NL v4] Reading metadata & CSR indices (manual offset)...\n",
"[read_HNSW NL v4] Read levels vector, size: 5\n",
"[read_HNSW NL v4] Reading Compact Storage format indices...\n",
"[read_HNSW NL v4] Read compact_level_ptr, size: 10\n",
"[read_HNSW NL v4] Read compact_node_offsets, size: 6\n",
"[read_HNSW NL v4] Read entry_point: 4, max_level: 0\n",
"[read_HNSW NL v4] Read storage fourcc: 0x6c6c756e\n",
"[read_HNSW NL v4 FIX] Detected FileIOReader. Neighbors size field offset: 326\n",
"[read_HNSW NL v4] Reading neighbors data into memory.\n",
"[read_HNSW NL v4] Read neighbors data, size: 20\n",
"[read_HNSW NL v4] Finished reading metadata and CSR indices.\n",
"INFO: Skipping external storage loading, since is_recompute is true.\n",
"ZmqDistanceComputer initialized: d=768, metric=0\n"
]
},
{
"data": {
"text/plain": [
"\"Based on the information provided, I would compare Python and C# as follows:\\n\\nPython has gained significant popularity over the years due to its simplicity, readability, and extensive libraries. It's often used for web development, data analysis, machine learning, and more.\\n\\nC#, on the other hand, is widely used in Windows-based systems, Android app development, and .NET Core applications. However, its popularity has waxed and waned over time, and it no longer holds as much ground as Python in terms of overall usage.\\n\\nConsidering the current landscape, I would say that C# is still more popular than Python today. Although Python's ecosystem is vast and powerful, C#'s widespread adoption in Windows and .NET development suggests a stronger market presence. Additionally, C# has been the preferred language for many Microsoft products and services over the years.\\n\\nThat being said, Python remains an extremely popular language with a large and active community, making it a great choice for various projects and applications. Its ease of use and flexibility have contributed to its continued popularity.\""
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"from leann.api import LeannChat\n",
"\n",
"llm_config = {\n",
" \"type\": \"ollama\",\n",
" \"model\": \"llama3.2:1b\"\n",
"}\n",
"\n",
"chat = LeannChat(index_path=\"knowledge.leann\", llm_config=llm_config)\n",
"response = chat.ask(\n",
" \"Compare the two retrieved programming languages and say which one is more popular today.\",\n",
" top_k=2,\n",
")\n",
"response"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".venv",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.11"
}
},
"nbformat": 4,
"nbformat_minor": 2
}