{ "cells": [ { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "INFO: LeannBuilder initialized with 'diskann' backend.\n", "INFO: Computing embeddings for 6 chunks using 'sentence-transformers/all-mpnet-base-v2'...\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "Batches: 100%|██████████| 1/1 [00:00<00:00, 77.61it/s]" ] }, { "name": "stdout", "output_type": "stream", "text": [ "INFO: Building DiskANN index for 6 vectors with metric Metric.INNER_PRODUCT...\n", "Using Inner Product search, so need to pre-process base data into temp file. Please ensure there is additional (n*(d+1)*4) bytes for storing pre-processed base vectors, apart from the interim indices created by DiskANN and the final index.\n", "Pre-processing base file by adding extra coordinate\n", "✅ DiskANN index built successfully at 'knowledge'\n", "Writing bin: knowledge_disk.index_max_base_norm.bin\n", "bin: #pts = 1, #dims = 1, size = 12B\n", "Finished writing bin.\n", "Time for preprocessing data for inner product: 0.000165 seconds\n", "Reading max_norm_of_base from knowledge_disk.index_max_base_norm.bin\n", "Reading bin file knowledge_disk.index_max_base_norm.bin ...\n", "Opening bin file knowledge_disk.index_max_base_norm.bin... \n", "Metadata: #pts = 1, #dims = 1...\n", "done.\n", "max_norm_of_base: 1\n", "! Using prepped_base file at knowledge_prepped_base.bin\n", "Starting index build: R=32 L=64 Query RAM budget: 4.02653e+09 Indexing ram budget: 8 T: 8\n", "getting bin metadata\n", "Time for getting bin metadata: 0.000008 seconds\n", "Compressing 769-dimensional data into 512 bytes per vector.\n", "Opened: knowledge_prepped_base.bin, size: 18464, cache_size: 18464\n", "Training data with 6 samples loaded.\n", "Reading bin file knowledge_pq_pivots.bin ...\n", "Opening bin file knowledge_pq_pivots.bin... \n", "Metadata: #pts = 256, #dims = 769...\n", "done.\n", "PQ pivot file exists. Not generating again\n", "Opened: knowledge_prepped_base.bin, size: 18464, cache_size: 18464\n", "Reading bin file knowledge_pq_pivots.bin ...\n", "Opening bin file knowledge_pq_pivots.bin... \n", "Metadata: #pts = 4, #dims = 1...\n", "done.\n", "Reading bin file knowledge_pq_pivots.bin ...\n", "Opening bin file knowledge_pq_pivots.bin... \n", "Metadata: #pts = 256, #dims = 769...\n", "done.\n", "Reading bin file knowledge_pq_pivots.bin ...\n", "Opening bin file knowledge_pq_pivots.bin... \n", "Metadata: #pts = 769, #dims = 1...\n", "done.\n", "Reading bin file knowledge_pq_pivots.bin ...\n", "Opening bin file knowledge_pq_pivots.bin... \n", "Metadata: #pts = 513, #dims = 1...\n", "done.\n", "Loaded PQ pivot information\n", "Processing points [0, 6)...done.\n", "Time for generating quantized data: 0.023918 seconds\n", "Full index fits in RAM budget, should consume at most 2.03973e-05GiBs, so building in one shot\n", "L2: Using AVX2 distance computation DistanceL2Float\n", "Passed, empty search_params while creating index config\n", "Using only first 6 from file.. \n", "Starting index build with 6 points... \n", "0% of index build completed.Starting final cleanup..done. Link time: 9e-05s\n", "Index built with degree: max:5 avg:5 min:5 count(deg<2):0\n", "Not saving tags as they are not enabled.\n", "Time taken for save: 0.000178s.\n", "Time for building merged vamana index: 0.000579 seconds\n", "Opened: knowledge_prepped_base.bin, size: 18464, cache_size: 18464\n", "Vamana index file size=168\n", "Opened: knowledge_disk.index, cache_size: 67108864\n", "medoid: 0B\n", "max_node_len: 3100B\n", "nnodes_per_sector: 1B\n", "# sectors: 6\n", "Sector #0written\n", "Finished writing 28672B\n", "Writing bin: knowledge_disk.index\n", "bin: #pts = 9, #dims = 1, size = 80B\n", "Finished writing bin.\n", "Output disk index file written to knowledge_disk.index\n", "Finished writing 28672B\n", "Time for generating disk layout: 0.043488 seconds\n", "Opened: knowledge_prepped_base.bin, size: 18464, cache_size: 18464\n", "Loading base knowledge_prepped_base.bin. #points: 6. #dim: 769.\n", "Wrote 1 points to sample file: knowledge_sample_data.bin\n", "Indexing time: 0.0684344\n", "INFO: Leann metadata saved to knowledge.leann.meta.json\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "\n", "Opened file : knowledge_disk.index\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Since data is floating point, we assume that it has been appropriately pre-processed (normalization for cosine, and convert-to-l2 by adding extra dimension for MIPS). So we shall invoke an l2 distance function.\n", "L2: Using AVX2 distance computation DistanceL2Float\n", "L2: Using AVX2 distance computation DistanceL2Float\n", "Before index load\n", "✅ DiskANN index loaded successfully.\n", "INFO: LeannSearcher initialized with 'diskann' backend using index 'knowledge.leann'.\n", "Reading bin file knowledge_pq_compressed.bin ...\n", "Opening bin file knowledge_pq_compressed.bin... \n", "Metadata: #pts = 6, #dims = 512...\n", "done.\n", "Reading bin file knowledge_pq_pivots.bin ...\n", "Opening bin file knowledge_pq_pivots.bin... \n", "Metadata: #pts = 4, #dims = 1...\n", "done.\n", "Offsets: 4096 791560 794644 796704\n", "Reading bin file knowledge_pq_pivots.bin ...\n", "Opening bin file knowledge_pq_pivots.bin... \n", "Metadata: #pts = 256, #dims = 769...\n", "done.\n", "Reading bin file knowledge_pq_pivots.bin ...\n", "Opening bin file knowledge_pq_pivots.bin... \n", "Metadata: #pts = 769, #dims = 1...\n", "done.\n", "Reading bin file knowledge_pq_pivots.bin ...\n", "Opening bin file knowledge_pq_pivots.bin... \n", "Metadata: #pts = 513, #dims = 1...\n", "done.\n", "Loaded PQ Pivots: #ctrs: 256, #dims: 769, #chunks: 512\n", "Loaded PQ centroids and in-memory compressed vectors. #points: 6 #dim: 769 #aligned_dim: 776 #chunks: 512\n", "Loading index metadata from knowledge_disk.index\n", "Disk-Index File Meta-data: # nodes per sector: 1, max node len (bytes): 3100, max node degree: 5\n", "Disk-Index Meta: nodes per sector: 1, max node len: 3100, max node degree: 5\n", "Setting up thread-specific contexts for nthreads: 8\n", "allocating ctx: 0x78348f4de000 to thread-id:132170359560000\n", "allocating ctx: 0x78348f4cd000 to thread-id:132158431693760\n", "allocating ctx: 0x78348f4bc000 to thread-id:132158442179392\n", "allocating ctx: 0x78348f4ab000 to thread-id:132158421208128\n", "allocating ctx: 0x78348f49a000 to thread-id:132158452665024\n", "allocating ctx: 0x78348f489000 to thread-id:132158389751232\n", "allocating ctx: 0x78348f478000 to thread-id:132158410722496\n", "allocating ctx: 0x78348f467000 to thread-id:132158400236864\n", "Loading centroid data from medoids vector data of 1 medoid(s)\n", "Reading bin file knowledge_disk.index_max_base_norm.bin ...\n", "Opening bin file knowledge_disk.index_max_base_norm.bin... \n", "Metadata: #pts = 1, #dims = 1...\n", "done.\n", "Setting re-scaling factor of base vectors to 1\n", "load_from_separate_paths done.\n", "Reading (with alignment) bin file knowledge_sample_data.bin ...Metadata: #pts = 1, #dims = 769, aligned_dim = 776... allocating aligned memory of 3104 bytes... done. Copying data to mem_aligned buffer... done.\n", "reserve ratio: 1\n", "Graph traversal completed, hops: 3\n", "Loading the cache list into memory....done.\n", "After index load\n", "Clearing scratch\n", "INFO: Computing embeddings for 1 chunks using 'sentence-transformers/all-mpnet-base-v2'...\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "Batches: 100%|██████████| 1/1 [00:00<00:00, 92.66it/s]" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Score: -0.481 - C++ is a powerful programming language\n", "Score: -1.049 - Java is a powerful programming language\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "reserve ratio: 1\n", "Graph traversal completed, hops: 3\n" ] } ], "source": [ "from leann.api import LeannBuilder, LeannSearcher\n", "import leann_backend_diskann\n", "# 1. Build index (no embeddings stored!)\n", "builder = LeannBuilder(backend_name=\"diskann\")\n", "builder.add_text(\"Python is a powerful programming language\")\n", "builder.add_text(\"Machine learning transforms industries\") \n", "builder.add_text(\"Neural networks process complex data\")\n", "builder.add_text(\"Java is a powerful programming language\")\n", "builder.add_text(\"C++ is a powerful programming language\")\n", "builder.add_text(\"C# is a powerful programming language\")\n", "builder.build_index(\"knowledge.leann\")\n", "\n", "# 2. Search with real-time embeddings\n", "searcher = LeannSearcher(\"knowledge.leann\")\n", "results = searcher.search(\"C++ programming languages\", top_k=2)\n", "\n", "for result in results:\n", " print(f\"Score: {result['score']:.3f} - {result['text']}\")" ] } ], "metadata": { "kernelspec": { "display_name": ".venv", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.11.11" } }, "nbformat": 4, "nbformat_minor": 2 }