Datastore reproduce (#3)
* fix: diskann zmq port and passages * feat: auto discovery of packages and fix passage gen for diskann * docs: embedding pruning * refactor: passage structure * feat: reproducible research datas, rpj_wiki & dpr * refactor: chat and base searcher * feat: chat on mps
This commit is contained in:
@@ -468,16 +468,27 @@ def convert_hnsw_graph_to_csr(input_filename, output_filename, prune_embeddings=
|
||||
# --- Write CSR HNSW graph data using unified function ---
|
||||
print(f"[{time.time() - start_time:.2f}s] Writing CSR HNSW graph data in FAISS-compatible order...")
|
||||
|
||||
# Determine storage fourcc based on prune_embeddings
|
||||
output_storage_fourcc = NULL_INDEX_FOURCC if prune_embeddings else (storage_fourcc if 'storage_fourcc' in locals() else NULL_INDEX_FOURCC)
|
||||
# Determine storage fourcc and data based on prune_embeddings
|
||||
if prune_embeddings:
|
||||
print(f" Pruning embeddings: Writing NULL storage marker.")
|
||||
storage_data = b''
|
||||
output_storage_fourcc = NULL_INDEX_FOURCC
|
||||
storage_data = b''
|
||||
else:
|
||||
# Keep embeddings - read and preserve original storage data
|
||||
if storage_fourcc and storage_fourcc != NULL_INDEX_FOURCC:
|
||||
print(f" Preserving embeddings: Reading original storage data...")
|
||||
storage_data = f_in.read() # Read remaining storage data
|
||||
output_storage_fourcc = storage_fourcc
|
||||
print(f" Read {len(storage_data)} bytes of storage data")
|
||||
else:
|
||||
print(f" No embeddings found in original file (NULL storage)")
|
||||
output_storage_fourcc = NULL_INDEX_FOURCC
|
||||
storage_data = b''
|
||||
|
||||
# Use the unified write function
|
||||
write_compact_format(f_out, original_hnsw_data, assign_probas_np, cum_nneighbor_per_level_np,
|
||||
levels_np, compact_level_ptr, compact_node_offsets_np,
|
||||
compact_neighbors_data, output_storage_fourcc, storage_data if not prune_embeddings else b'')
|
||||
compact_neighbors_data, output_storage_fourcc, storage_data)
|
||||
|
||||
# Clean up memory
|
||||
del assign_probas_np, cum_nneighbor_per_level_np, levels_np
|
||||
|
||||
@@ -1,18 +1,12 @@
|
||||
import numpy as np
|
||||
import os
|
||||
import json
|
||||
import struct
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any
|
||||
import contextlib
|
||||
import threading
|
||||
import time
|
||||
import atexit
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Dict, Any, List
|
||||
import pickle
|
||||
import shutil
|
||||
|
||||
from leann.embedding_server_manager import EmbeddingServerManager
|
||||
from leann.searcher_base import BaseSearcher
|
||||
from .convert_to_csr import convert_hnsw_graph_to_csr
|
||||
|
||||
from leann.registry import register_backend
|
||||
@@ -38,349 +32,130 @@ class HNSWBackend(LeannBackendFactoryInterface):
|
||||
|
||||
@staticmethod
|
||||
def searcher(index_path: str, **kwargs) -> LeannBackendSearcherInterface:
|
||||
path = Path(index_path)
|
||||
meta_path = path.parent / f"{path.name}.meta.json"
|
||||
if not meta_path.exists():
|
||||
raise FileNotFoundError(f"Leann metadata file not found at {meta_path}.")
|
||||
|
||||
with open(meta_path, 'r') as f:
|
||||
meta = json.load(f)
|
||||
|
||||
kwargs['meta'] = meta
|
||||
return HNSWSearcher(index_path, **kwargs)
|
||||
|
||||
class HNSWBuilder(LeannBackendBuilderInterface):
|
||||
def __init__(self, **kwargs):
|
||||
self.build_params = kwargs.copy()
|
||||
|
||||
# --- Configuration defaults with standardized names ---
|
||||
self.is_compact = self.build_params.setdefault("is_compact", True)
|
||||
self.is_recompute = self.build_params.setdefault("is_recompute", True)
|
||||
|
||||
# --- Additional Options ---
|
||||
self.is_skip_neighbors = self.build_params.setdefault("is_skip_neighbors", False)
|
||||
self.disk_cache_ratio = self.build_params.setdefault("disk_cache_ratio", 0.0)
|
||||
self.external_storage_path = self.build_params.get("external_storage_path", None)
|
||||
|
||||
# --- Standard HNSW parameters ---
|
||||
self.M = self.build_params.setdefault("M", 32)
|
||||
self.efConstruction = self.build_params.setdefault("efConstruction", 200)
|
||||
self.distance_metric = self.build_params.setdefault("distance_metric", "mips")
|
||||
self.dimensions = self.build_params.get("dimensions")
|
||||
|
||||
if self.is_skip_neighbors and not self.is_compact:
|
||||
raise ValueError("is_skip_neighbors can only be used with is_compact=True")
|
||||
|
||||
if self.is_recompute and not self.is_compact:
|
||||
raise ValueError("is_recompute requires is_compact=True for efficiency")
|
||||
|
||||
def build(self, data: np.ndarray, index_path: str, **kwargs):
|
||||
"""Build HNSW index using FAISS"""
|
||||
def build(self, data: np.ndarray, ids: List[str], index_path: str, **kwargs):
|
||||
from . import faiss
|
||||
|
||||
path = Path(index_path)
|
||||
index_dir = path.parent
|
||||
index_prefix = path.stem
|
||||
|
||||
index_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if data.dtype != np.float32:
|
||||
data = data.astype(np.float32)
|
||||
if not data.flags['C_CONTIGUOUS']:
|
||||
data = np.ascontiguousarray(data)
|
||||
|
||||
metric_str = self.distance_metric.lower()
|
||||
metric_enum = get_metric_map().get(metric_str)
|
||||
|
||||
label_map = {i: str_id for i, str_id in enumerate(ids)}
|
||||
label_map_file = index_dir / "leann.labels.map"
|
||||
with open(label_map_file, 'wb') as f:
|
||||
pickle.dump(label_map, f)
|
||||
|
||||
metric_enum = get_metric_map().get(self.distance_metric.lower())
|
||||
if metric_enum is None:
|
||||
raise ValueError(f"Unsupported distance_metric '{metric_str}'.")
|
||||
raise ValueError(f"Unsupported distance_metric '{self.distance_metric}'.")
|
||||
|
||||
M = self.M
|
||||
efConstruction = self.efConstruction
|
||||
dim = self.dimensions
|
||||
if not dim:
|
||||
dim = data.shape[1]
|
||||
dim = self.dimensions or data.shape[1]
|
||||
index = faiss.IndexHNSWFlat(dim, self.M, metric_enum)
|
||||
index.hnsw.efConstruction = self.efConstruction
|
||||
|
||||
print(f"INFO: Building HNSW index for {data.shape[0]} vectors with metric {metric_enum}...")
|
||||
|
||||
try:
|
||||
index = faiss.IndexHNSWFlat(dim, M, metric_enum)
|
||||
index.hnsw.efConstruction = efConstruction
|
||||
|
||||
if metric_str == "cosine":
|
||||
faiss.normalize_L2(data)
|
||||
|
||||
index.add(data.shape[0], faiss.swig_ptr(data))
|
||||
|
||||
index_file = index_dir / f"{index_prefix}.index"
|
||||
faiss.write_index(index, str(index_file))
|
||||
|
||||
print(f"✅ HNSW index built successfully at '{index_file}'")
|
||||
if self.distance_metric.lower() == "cosine":
|
||||
faiss.normalize_L2(data)
|
||||
|
||||
if self.is_compact:
|
||||
self._convert_to_csr(index_file)
|
||||
|
||||
if self.is_recompute:
|
||||
self._generate_passages_file(index_dir, index_prefix, **kwargs)
|
||||
|
||||
except Exception as e:
|
||||
print(f"💥 ERROR: HNSW index build failed. Exception: {e}")
|
||||
raise
|
||||
index.add(data.shape[0], faiss.swig_ptr(data))
|
||||
index_file = index_dir / f"{index_prefix}.index"
|
||||
faiss.write_index(index, str(index_file))
|
||||
|
||||
if self.is_compact:
|
||||
self._convert_to_csr(index_file)
|
||||
|
||||
def _convert_to_csr(self, index_file: Path):
|
||||
"""Convert built index to CSR format"""
|
||||
try:
|
||||
mode_str = "CSR-pruned" if self.is_recompute else "CSR-standard"
|
||||
print(f"INFO: Converting HNSW index to {mode_str} format...")
|
||||
|
||||
csr_temp_file = index_file.with_suffix(".csr.tmp")
|
||||
|
||||
success = convert_hnsw_graph_to_csr(
|
||||
str(index_file),
|
||||
str(csr_temp_file),
|
||||
prune_embeddings=self.is_recompute
|
||||
)
|
||||
|
||||
if success:
|
||||
print("✅ CSR conversion successful.")
|
||||
import shutil
|
||||
# rename index_file to index_file.old
|
||||
index_file_old = index_file.with_suffix(".old")
|
||||
shutil.move(str(index_file), str(index_file_old))
|
||||
shutil.move(str(csr_temp_file), str(index_file))
|
||||
print(f"INFO: Replaced original index with {mode_str} version at '{index_file}'")
|
||||
else:
|
||||
# Clean up and fail fast
|
||||
if csr_temp_file.exists():
|
||||
os.remove(csr_temp_file)
|
||||
raise RuntimeError("CSR conversion failed - cannot proceed with compact format")
|
||||
|
||||
except Exception as e:
|
||||
print(f"💥 ERROR: CSR conversion failed. Exception: {e}")
|
||||
raise
|
||||
mode_str = "CSR-pruned" if self.is_recompute else "CSR-standard"
|
||||
print(f"INFO: Converting HNSW index to {mode_str} format...")
|
||||
|
||||
def _generate_passages_file(self, index_dir: Path, index_prefix: str, **kwargs):
|
||||
"""Generate passages file for recompute mode"""
|
||||
try:
|
||||
chunks = kwargs.get('chunks', [])
|
||||
if not chunks:
|
||||
print("INFO: No chunks data provided, skipping passages file generation")
|
||||
return
|
||||
|
||||
# Generate node_id to text mapping
|
||||
passages_data = {}
|
||||
for node_id, chunk in enumerate(chunks):
|
||||
passages_data[str(node_id)] = chunk["text"]
|
||||
|
||||
# Save passages file
|
||||
passages_file = index_dir / f"{index_prefix}.passages.json"
|
||||
with open(passages_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(passages_data, f, ensure_ascii=False, indent=2)
|
||||
|
||||
print(f"✅ Generated passages file for recompute mode at '{passages_file}' ({len(passages_data)} passages)")
|
||||
|
||||
except Exception as e:
|
||||
print(f"💥 ERROR: Failed to generate passages file. Exception: {e}")
|
||||
# Don't raise - this is not critical for index building
|
||||
pass
|
||||
csr_temp_file = index_file.with_suffix(".csr.tmp")
|
||||
|
||||
class HNSWSearcher(LeannBackendSearcherInterface):
|
||||
def _get_index_storage_status(self, index_file: Path) -> tuple[bool, bool]:
|
||||
"""
|
||||
Robustly determines the index's storage status by parsing the file.
|
||||
|
||||
Returns:
|
||||
A tuple (is_compact, is_pruned).
|
||||
"""
|
||||
if not index_file.exists():
|
||||
return False, False
|
||||
|
||||
with open(index_file, 'rb') as f:
|
||||
try:
|
||||
def read_struct(fmt):
|
||||
size = struct.calcsize(fmt)
|
||||
data = f.read(size)
|
||||
if len(data) != size:
|
||||
raise EOFError(f"File ended unexpectedly reading struct fmt '{fmt}'.")
|
||||
return struct.unpack(fmt, data)[0]
|
||||
success = convert_hnsw_graph_to_csr(
|
||||
str(index_file),
|
||||
str(csr_temp_file),
|
||||
prune_embeddings=self.is_recompute
|
||||
)
|
||||
|
||||
def skip_vector(element_size):
|
||||
count = read_struct('<Q')
|
||||
f.seek(count * element_size, 1)
|
||||
|
||||
# 1. Read up to the compact flag
|
||||
read_struct('<I'); read_struct('<i'); read_struct('<q');
|
||||
read_struct('<q'); read_struct('<q'); read_struct('<?')
|
||||
metric_type = read_struct('<i')
|
||||
if metric_type > 1: read_struct('<f')
|
||||
skip_vector(8); skip_vector(4); skip_vector(4)
|
||||
|
||||
# 2. Check if there's a compact flag byte
|
||||
# Try to read the compact flag, but handle both old and new formats
|
||||
pos_before_compact = f.tell()
|
||||
try:
|
||||
is_compact = read_struct('<?')
|
||||
print(f"INFO: Detected is_compact flag as: {is_compact}")
|
||||
except (EOFError, struct.error):
|
||||
# Old format without compact flag - assume non-compact
|
||||
f.seek(pos_before_compact)
|
||||
is_compact = False
|
||||
print(f"INFO: No compact flag found, assuming is_compact=False")
|
||||
|
||||
# 3. Read storage FourCC to determine if pruned
|
||||
is_pruned = False
|
||||
try:
|
||||
if is_compact:
|
||||
# For compact, we need to skip pointers and scalars to get to the storage FourCC
|
||||
skip_vector(8) # level_ptr
|
||||
skip_vector(8) # node_offsets
|
||||
read_struct('<i'); read_struct('<i'); read_struct('<i');
|
||||
read_struct('<i'); read_struct('<i')
|
||||
storage_fourcc = read_struct('<I')
|
||||
else:
|
||||
# For non-compact, we need to read the flag probe, then skip offsets and neighbors
|
||||
pos_before_probe = f.tell()
|
||||
flag_byte = f.read(1)
|
||||
if not (flag_byte and flag_byte == b'\x00'):
|
||||
f.seek(pos_before_probe)
|
||||
skip_vector(8); skip_vector(4) # offsets, neighbors
|
||||
read_struct('<i'); read_struct('<i'); read_struct('<i');
|
||||
read_struct('<i'); read_struct('<i')
|
||||
# Now we are at the storage. The entire rest is storage blob.
|
||||
storage_fourcc = struct.unpack('<I', f.read(4))[0]
|
||||
|
||||
NULL_INDEX_FOURCC = int.from_bytes(b'null', 'little')
|
||||
if storage_fourcc == NULL_INDEX_FOURCC:
|
||||
is_pruned = True
|
||||
except (EOFError, struct.error):
|
||||
# Cannot determine pruning status, assume not pruned
|
||||
pass
|
||||
|
||||
print(f"INFO: Detected is_pruned as: {is_pruned}")
|
||||
return is_compact, is_pruned
|
||||
|
||||
except (EOFError, struct.error) as e:
|
||||
print(f"WARNING: Could not parse index file to detect format: {e}. Assuming standard, not pruned.")
|
||||
return False, False
|
||||
if success:
|
||||
print("✅ CSR conversion successful.")
|
||||
index_file_old = index_file.with_suffix(".old")
|
||||
shutil.move(str(index_file), str(index_file_old))
|
||||
shutil.move(str(csr_temp_file), str(index_file))
|
||||
print(f"INFO: Replaced original index with {mode_str} version at '{index_file}'")
|
||||
else:
|
||||
# Clean up and fail fast
|
||||
if csr_temp_file.exists():
|
||||
os.remove(csr_temp_file)
|
||||
raise RuntimeError("CSR conversion failed - cannot proceed with compact format")
|
||||
|
||||
class HNSWSearcher(BaseSearcher):
|
||||
def __init__(self, index_path: str, **kwargs):
|
||||
super().__init__(index_path, backend_module_name="leann_backend_hnsw.hnsw_embedding_server", **kwargs)
|
||||
from . import faiss
|
||||
self.meta = kwargs.get("meta", {})
|
||||
if not self.meta:
|
||||
raise ValueError("HNSWSearcher requires metadata from .meta.json.")
|
||||
|
||||
self.dimensions = self.meta.get("dimensions")
|
||||
if not self.dimensions:
|
||||
raise ValueError("Dimensions not found in Leann metadata.")
|
||||
|
||||
self.distance_metric = self.meta.get("distance_metric", "mips").lower()
|
||||
metric_enum = get_metric_map().get(self.distance_metric)
|
||||
if metric_enum is None:
|
||||
raise ValueError(f"Unsupported distance_metric '{self.distance_metric}'.")
|
||||
|
||||
self.embedding_model = self.meta.get("embedding_model")
|
||||
if not self.embedding_model:
|
||||
print("WARNING: embedding_model not found in meta.json. Recompute will fail if attempted.")
|
||||
self.is_compact, self.is_pruned = (
|
||||
self.meta.get('is_compact', True),
|
||||
self.meta.get('is_pruned', True)
|
||||
)
|
||||
|
||||
path = Path(index_path)
|
||||
self.index_dir = path.parent
|
||||
self.index_prefix = path.stem
|
||||
|
||||
index_file = self.index_dir / f"{self.index_prefix}.index"
|
||||
index_file = self.index_dir / f"{self.index_path.stem}.index"
|
||||
if not index_file.exists():
|
||||
raise FileNotFoundError(f"HNSW index file not found at {index_file}")
|
||||
|
||||
self.is_compact, self.is_pruned = self._get_index_storage_status(index_file)
|
||||
|
||||
# Validate configuration constraints
|
||||
if not self.is_compact and kwargs.get("is_skip_neighbors", False):
|
||||
raise ValueError("is_skip_neighbors can only be used with is_compact=True")
|
||||
|
||||
if kwargs.get("is_recompute", False) and kwargs.get("external_storage_path"):
|
||||
raise ValueError("Cannot use both is_recompute and external_storage_path simultaneously")
|
||||
|
||||
hnsw_config = faiss.HNSWIndexConfig()
|
||||
hnsw_config.is_compact = self.is_compact
|
||||
|
||||
# Apply additional configuration options with strict validation
|
||||
hnsw_config.is_skip_neighbors = kwargs.get("is_skip_neighbors", False)
|
||||
hnsw_config.is_recompute = self.is_pruned or kwargs.get("is_recompute", False)
|
||||
hnsw_config.disk_cache_ratio = kwargs.get("disk_cache_ratio", 0.0)
|
||||
hnsw_config.external_storage_path = kwargs.get("external_storage_path")
|
||||
|
||||
self.zmq_port = kwargs.get("zmq_port", 5557)
|
||||
|
||||
if self.is_pruned and not hnsw_config.is_recompute:
|
||||
raise RuntimeError("Index is pruned (embeddings removed) but recompute is disabled. This is impossible - recompute must be enabled for pruned indices.")
|
||||
|
||||
print(f"INFO: Loading index with is_compact={self.is_compact}, is_pruned={self.is_pruned}")
|
||||
print(f"INFO: Config - skip_neighbors={hnsw_config.is_skip_neighbors}, recompute={hnsw_config.is_recompute}")
|
||||
|
||||
self._index = faiss.read_index(str(index_file), faiss.IO_FLAG_MMAP, hnsw_config)
|
||||
|
||||
if self.is_compact:
|
||||
print("✅ Compact CSR format HNSW index loaded successfully.")
|
||||
else:
|
||||
print("✅ Standard HNSW index loaded successfully.")
|
||||
|
||||
self.embedding_server_manager = EmbeddingServerManager(
|
||||
backend_module_name="leann_backend_hnsw.hnsw_embedding_server"
|
||||
)
|
||||
if self.is_pruned and not hnsw_config.is_recompute:
|
||||
raise RuntimeError("Index is pruned but recompute is disabled.")
|
||||
|
||||
self._index = faiss.read_index(str(index_file), faiss.IO_FLAG_MMAP, hnsw_config)
|
||||
|
||||
def search(self, query: np.ndarray, top_k: int, **kwargs) -> Dict[str, Any]:
|
||||
"""Search using HNSW index with optional recompute functionality"""
|
||||
from . import faiss
|
||||
|
||||
ef = kwargs.get("complexity", 200)
|
||||
|
||||
|
||||
if self.is_pruned:
|
||||
print(f"INFO: Index is pruned - ensuring embedding server is running for recompute.")
|
||||
if not self.embedding_model:
|
||||
raise ValueError("Cannot use recompute mode without 'embedding_model' in meta.json.")
|
||||
|
||||
passages_file = kwargs.get("passages_file")
|
||||
if not passages_file:
|
||||
potential_passages_file = self.index_dir / f"{self.index_prefix}.passages.json"
|
||||
if potential_passages_file.exists():
|
||||
passages_file = str(potential_passages_file)
|
||||
print(f"INFO: Automatically found passages file: {passages_file}")
|
||||
else:
|
||||
raise RuntimeError(f"FATAL: Index is pruned but no passages file found.")
|
||||
|
||||
meta_file_path = self.index_dir / f"{self.index_path.name}.meta.json"
|
||||
if not meta_file_path.exists():
|
||||
raise RuntimeError(f"FATAL: Index is pruned but metadata file not found: {meta_file_path}")
|
||||
zmq_port = kwargs.get("zmq_port", 5557)
|
||||
server_started = self.embedding_server_manager.start_server(
|
||||
port=zmq_port,
|
||||
model_name=self.embedding_model,
|
||||
passages_file=passages_file,
|
||||
distance_metric=self.distance_metric
|
||||
)
|
||||
if not server_started:
|
||||
raise RuntimeError(f"Failed to start HNSW embedding server on port {zmq_port}")
|
||||
|
||||
self._ensure_server_running(str(meta_file_path), port=zmq_port, **kwargs)
|
||||
|
||||
if query.dtype != np.float32:
|
||||
query = query.astype(np.float32)
|
||||
if query.ndim == 1:
|
||||
query = np.expand_dims(query, axis=0)
|
||||
|
||||
if self.distance_metric == "cosine":
|
||||
faiss.normalize_L2(query)
|
||||
|
||||
try:
|
||||
params = faiss.SearchParametersHNSW()
|
||||
params.efSearch = ef
|
||||
params.zmq_port = kwargs.get("zmq_port", self.zmq_port)
|
||||
|
||||
batch_size = query.shape[0]
|
||||
distances = np.empty((batch_size, top_k), dtype=np.float32)
|
||||
labels = np.empty((batch_size, top_k), dtype=np.int64)
|
||||
|
||||
self._index.search(query.shape[0], faiss.swig_ptr(query), top_k, faiss.swig_ptr(distances), faiss.swig_ptr(labels), params)
|
||||
|
||||
return {"labels": labels, "distances": distances}
|
||||
|
||||
except Exception as e:
|
||||
print(f"💥 ERROR: HNSW search failed. Exception: {e}")
|
||||
raise
|
||||
|
||||
def __del__(self):
|
||||
if hasattr(self, 'embedding_server_manager'):
|
||||
self.embedding_server_manager.stop_server()
|
||||
|
||||
params = faiss.SearchParametersHNSW()
|
||||
params.zmq_port = kwargs.get("zmq_port", 5557)
|
||||
params.efSearch = kwargs.get("complexity", 32)
|
||||
params.beam_size = kwargs.get("beam_width", 1)
|
||||
|
||||
batch_size = query.shape[0]
|
||||
distances = np.empty((batch_size, top_k), dtype=np.float32)
|
||||
labels = np.empty((batch_size, top_k), dtype=np.int64)
|
||||
|
||||
self._index.search(query.shape[0], faiss.swig_ptr(query), top_k, faiss.swig_ptr(distances), faiss.swig_ptr(labels), params)
|
||||
|
||||
string_labels = [[self.label_map.get(int_label, f"unknown_{int_label}") for int_label in batch_labels] for batch_labels in labels]
|
||||
|
||||
return {"labels": string_labels, "distances": distances}
|
||||
@@ -56,23 +56,73 @@ class SimplePassageLoader:
|
||||
def __len__(self) -> int:
|
||||
return len(self.passages_data)
|
||||
|
||||
def load_passages_from_file(passages_file: str) -> SimplePassageLoader:
|
||||
def load_passages_from_metadata(meta_file: str) -> SimplePassageLoader:
|
||||
"""
|
||||
Load passages from a JSON file
|
||||
Expected format: {"passage_id": "passage_text", ...}
|
||||
Load passages using metadata file with PassageManager for lazy loading
|
||||
"""
|
||||
if not os.path.exists(passages_file):
|
||||
print(f"Warning: Passages file {passages_file} not found. Using empty loader.")
|
||||
return SimplePassageLoader()
|
||||
# Load metadata to get passage sources
|
||||
with open(meta_file, 'r') as f:
|
||||
meta = json.load(f)
|
||||
|
||||
# Import PassageManager dynamically to avoid circular imports
|
||||
import sys
|
||||
import importlib.util
|
||||
|
||||
# Find the leann package directory relative to this file
|
||||
current_dir = Path(__file__).parent
|
||||
leann_core_path = current_dir.parent.parent / "leann-core" / "src"
|
||||
sys.path.insert(0, str(leann_core_path))
|
||||
|
||||
try:
|
||||
with open(passages_file, 'r', encoding='utf-8') as f:
|
||||
passages_data = json.load(f)
|
||||
print(f"Loaded {len(passages_data)} passages from {passages_file}")
|
||||
return SimplePassageLoader(passages_data)
|
||||
except Exception as e:
|
||||
print(f"Error loading passages from {passages_file}: {e}")
|
||||
return SimplePassageLoader()
|
||||
from leann.api import PassageManager
|
||||
passage_manager = PassageManager(meta['passage_sources'])
|
||||
finally:
|
||||
sys.path.pop(0)
|
||||
|
||||
# Load label map
|
||||
passages_dir = Path(meta_file).parent
|
||||
label_map_file = passages_dir / "leann.labels.map"
|
||||
|
||||
if label_map_file.exists():
|
||||
import pickle
|
||||
with open(label_map_file, 'rb') as f:
|
||||
label_map = pickle.load(f)
|
||||
print(f"Loaded label map with {len(label_map)} entries")
|
||||
else:
|
||||
raise FileNotFoundError(f"Label map file not found: {label_map_file}")
|
||||
|
||||
print(f"Initialized lazy passage loading for {len(label_map)} passages")
|
||||
|
||||
class LazyPassageLoader(SimplePassageLoader):
|
||||
def __init__(self, passage_manager, label_map):
|
||||
self.passage_manager = passage_manager
|
||||
self.label_map = label_map
|
||||
# Initialize parent with empty data
|
||||
super().__init__({})
|
||||
|
||||
def __getitem__(self, passage_id: Union[str, int]) -> Dict[str, str]:
|
||||
"""Get passage by ID with lazy loading"""
|
||||
try:
|
||||
int_id = int(passage_id)
|
||||
if int_id in self.label_map:
|
||||
string_id = self.label_map[int_id]
|
||||
passage_data = self.passage_manager.get_passage(string_id)
|
||||
if passage_data and passage_data.get("text"):
|
||||
return {"text": passage_data["text"]}
|
||||
else:
|
||||
print(f"DEBUG: Empty text for ID {int_id} -> {string_id}")
|
||||
return {"text": ""}
|
||||
else:
|
||||
print(f"DEBUG: ID {int_id} not found in label_map")
|
||||
return {"text": ""}
|
||||
except Exception as e:
|
||||
print(f"DEBUG: Exception getting passage {passage_id}: {e}")
|
||||
return {"text": ""}
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self.label_map)
|
||||
|
||||
return LazyPassageLoader(passage_manager, label_map)
|
||||
|
||||
def create_hnsw_embedding_server(
|
||||
passages_file: Optional[str] = None,
|
||||
@@ -158,7 +208,20 @@ def create_hnsw_embedding_server(
|
||||
passages = SimplePassageLoader(passages_data)
|
||||
print(f"Using provided passages data: {len(passages)} passages")
|
||||
elif passages_file:
|
||||
passages = load_passages_from_file(passages_file)
|
||||
# Check if it's a metadata file or a single passages file
|
||||
if passages_file.endswith('.meta.json'):
|
||||
passages = load_passages_from_metadata(passages_file)
|
||||
else:
|
||||
# Try to find metadata file in same directory
|
||||
passages_dir = Path(passages_file).parent
|
||||
meta_files = list(passages_dir.glob("*.meta.json"))
|
||||
if meta_files:
|
||||
print(f"Found metadata file: {meta_files[0]}, using lazy loading")
|
||||
passages = load_passages_from_metadata(str(meta_files[0]))
|
||||
else:
|
||||
# Fallback to original single file loading (will cause warnings)
|
||||
print("WARNING: No metadata file found, using single file loading (may cause missing passage warnings)")
|
||||
passages = SimplePassageLoader() # Use empty loader to avoid massive warnings
|
||||
else:
|
||||
passages = SimplePassageLoader()
|
||||
print("No passages provided, using empty loader")
|
||||
@@ -227,6 +290,11 @@ def create_hnsw_embedding_server(
|
||||
_is_bge_model = "bge" in model_name.lower()
|
||||
batch_size = len(texts_batch)
|
||||
|
||||
# Validate no empty texts
|
||||
for i, text in enumerate(texts_batch):
|
||||
if not text or text.strip() == "":
|
||||
raise RuntimeError(f"FATAL: Empty text at batch index {i}, ID: {ids_batch[i] if i < len(ids_batch) else 'unknown'}")
|
||||
|
||||
# E5 model preprocessing
|
||||
if _is_e5_model:
|
||||
processed_texts_batch = [f"passage: {text}" for text in texts_batch]
|
||||
@@ -373,14 +441,12 @@ def create_hnsw_embedding_server(
|
||||
missing_ids = []
|
||||
with lookup_timer.timing():
|
||||
for nid in node_ids:
|
||||
try:
|
||||
txtinfo = passages[nid]
|
||||
if txtinfo is None or txtinfo["text"] == "":
|
||||
raise RuntimeError(f"FATAL: Passage with ID {nid} not found - failing fast")
|
||||
else:
|
||||
txt = txtinfo["text"]
|
||||
except (KeyError, IndexError):
|
||||
raise RuntimeError(f"FATAL: Passage with ID {nid} not found - failing fast")
|
||||
print(f"DEBUG: Looking up passage ID {nid}")
|
||||
txtinfo = passages[nid]
|
||||
if txtinfo is None or txtinfo["text"] == "":
|
||||
raise RuntimeError(f"FATAL: Passage with ID {nid} returned empty text")
|
||||
txt = txtinfo["text"]
|
||||
print(f"DEBUG: Found text for ID {nid}, length: {len(txt)}")
|
||||
texts.append(txt)
|
||||
lookup_timer.print_elapsed()
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# 文件: packages/leann-backend-hnsw/pyproject.toml
|
||||
# packages/leann-backend-hnsw/pyproject.toml
|
||||
|
||||
[build-system]
|
||||
requires = ["scikit-build-core>=0.10", "numpy", "swig"]
|
||||
@@ -10,7 +10,6 @@ version = "0.1.0"
|
||||
description = "Custom-built HNSW (Faiss) backend for the Leann toolkit."
|
||||
dependencies = ["leann-core==0.1.0", "numpy"]
|
||||
|
||||
# 回归到最标准的 scikit-build-core 配置
|
||||
[tool.scikit-build]
|
||||
wheel.packages = ["leann_backend_hnsw"]
|
||||
editable.mode = "redirect"
|
||||
|
||||
Submodule packages/leann-backend-hnsw/third_party/cppzmq deleted from 3bcbd9dad2
Submodule packages/leann-backend-hnsw/third_party/faiss updated: 2547df4377...2365db59a7
Submodule packages/leann-backend-hnsw/third_party/libzmq deleted from 3e5ce5c1cd
Submodule packages/leann-backend-hnsw/third_party/msgpack-c deleted from 9b801f087a
Reference in New Issue
Block a user