This commit is contained in:
yichuan520030910320
2025-07-25 00:12:47 -07:00
8 changed files with 308 additions and 519 deletions

View File

@@ -1,262 +1,11 @@
name: CI - Build Multi-Platform Packages
name: CI
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
workflow_dispatch:
inputs:
publish:
description: 'Publish to PyPI (only use for emergency fixes)'
required: true
default: 'false'
type: choice
options:
- 'false'
- 'test'
- 'prod'
jobs:
# Build pure Python package: leann-core
build-core:
name: Build leann-core
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Install uv
uses: astral-sh/setup-uv@v4
- name: Install build dependencies
run: |
uv pip install --system build twine
- name: Build package
run: |
cd packages/leann-core
uv build
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: leann-core-dist
path: packages/leann-core/dist/
# Build binary package: leann-backend-hnsw (default backend)
build-hnsw:
name: Build leann-backend-hnsw
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
python-version: ['3.9', '3.10', '3.11', '3.12']
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v4
- name: Install system dependencies (Ubuntu)
if: runner.os == 'Linux'
run: |
sudo apt-get update
sudo apt-get install -y libomp-dev libboost-all-dev libzmq3-dev \
pkg-config libopenblas-dev patchelf
- name: Install system dependencies (macOS)
if: runner.os == 'macOS'
run: |
brew install libomp boost zeromq
- name: Install build dependencies
run: |
uv pip install --system scikit-build-core numpy swig
uv pip install --system auditwheel delocate
- name: Build wheel
run: |
cd packages/leann-backend-hnsw
uv build --wheel --python python
- name: Repair wheel (Linux)
if: runner.os == 'Linux'
run: |
cd packages/leann-backend-hnsw
auditwheel repair dist/*.whl -w dist_repaired
rm -rf dist
mv dist_repaired dist
- name: Repair wheel (macOS)
if: runner.os == 'macOS'
run: |
cd packages/leann-backend-hnsw
delocate-wheel -w dist_repaired -v dist/*.whl
rm -rf dist
mv dist_repaired dist
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: hnsw-${{ matrix.os }}-py${{ matrix.python-version }}
path: packages/leann-backend-hnsw/dist/
# Build binary package: leann-backend-diskann (multi-platform)
build-diskann:
name: Build leann-backend-diskann
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
python-version: ['3.9', '3.10', '3.11', '3.12']
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v4
- name: Install system dependencies (Ubuntu)
if: runner.os == 'Linux'
run: |
sudo apt-get update
sudo apt-get install -y libomp-dev libboost-all-dev libaio-dev libzmq3-dev \
protobuf-compiler libprotobuf-dev libabsl-dev patchelf
# Install Intel MKL using Intel's installer
wget https://registrationcenter-download.intel.com/akdlm/IRC_NAS/79153e0f-74d7-45af-b8c2-258941adf58a/intel-onemkl-2025.0.0.940.sh
sudo sh intel-onemkl-2025.0.0.940.sh -a --components intel.oneapi.lin.mkl.devel --action install --eula accept -s
source /opt/intel/oneapi/setvars.sh
echo "MKLROOT=/opt/intel/oneapi/mkl/latest" >> $GITHUB_ENV
echo "LD_LIBRARY_PATH=/opt/intel/oneapi/mkl/latest/lib/intel64:$LD_LIBRARY_PATH" >> $GITHUB_ENV
- name: Install system dependencies (macOS)
if: runner.os == 'macOS'
run: |
brew install libomp boost zeromq protobuf
# MKL is not available on Homebrew, but DiskANN can work without it
- name: Install build dependencies
run: |
uv pip install --system scikit-build-core numpy Cython pybind11
if [[ "$RUNNER_OS" == "Linux" ]]; then
uv pip install --system auditwheel
else
uv pip install --system delocate
fi
- name: Build wheel
run: |
cd packages/leann-backend-diskann
uv build --wheel --python python
- name: Repair wheel (Linux)
if: runner.os == 'Linux'
run: |
cd packages/leann-backend-diskann
auditwheel repair dist/*.whl -w dist_repaired
rm -rf dist
mv dist_repaired dist
- name: Repair wheel (macOS)
if: runner.os == 'macOS'
run: |
cd packages/leann-backend-diskann
delocate-wheel -w dist_repaired -v dist/*.whl
rm -rf dist
mv dist_repaired dist
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: diskann-${{ matrix.os }}-py${{ matrix.python-version }}
path: packages/leann-backend-diskann/dist/
# Build meta-package: leann (build last)
build-meta:
name: Build leann meta-package
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Install uv
uses: astral-sh/setup-uv@v4
- name: Install build dependencies
run: |
uv pip install --system build
- name: Build package
run: |
cd packages/leann
uv build
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: leann-meta-dist
path: packages/leann/dist/
# Publish to PyPI (only for emergency fixes or manual triggers)
publish:
name: Publish to PyPI (Emergency)
needs: [build-core, build-hnsw, build-diskann, build-meta]
runs-on: ubuntu-latest
if: github.event_name == 'workflow_dispatch' && github.event.inputs.publish != 'false'
steps:
- name: Download all artifacts
uses: actions/download-artifact@v4
with:
path: dist
- name: Flatten directory structure
run: |
mkdir -p all_wheels
find dist -name "*.whl" -exec cp {} all_wheels/ \;
find dist -name "*.tar.gz" -exec cp {} all_wheels/ \;
- name: Show what will be published
run: |
echo "📦 Packages to be published:"
ls -la all_wheels/
- name: Publish to Test PyPI
if: github.event.inputs.publish == 'test'
uses: pypa/gh-action-pypi-publish@release/v1
with:
password: ${{ secrets.TEST_PYPI_API_TOKEN }}
repository-url: https://test.pypi.org/legacy/
packages-dir: all_wheels/
skip-existing: true
- name: Publish to PyPI
if: github.event.inputs.publish == 'prod'
uses: pypa/gh-action-pypi-publish@release/v1
with:
password: ${{ secrets.PYPI_API_TOKEN }}
packages-dir: all_wheels/
skip-existing: true
build:
uses: ./.github/workflows/build-reusable.yml

230
.github/workflows/build-reusable.yml vendored Normal file
View File

@@ -0,0 +1,230 @@
name: Reusable Build
on:
workflow_call:
inputs:
ref:
description: 'Git ref to build'
required: false
type: string
default: ''
jobs:
build:
name: Build ${{ matrix.os }} Python ${{ matrix.python }}
strategy:
matrix:
include:
- os: ubuntu-latest
python: '3.9'
container: 'quay.io/pypa/manylinux2014_x86_64'
- os: ubuntu-latest
python: '3.10'
container: 'quay.io/pypa/manylinux2014_x86_64'
- os: ubuntu-latest
python: '3.11'
container: 'quay.io/pypa/manylinux2014_x86_64'
- os: ubuntu-latest
python: '3.12'
container: 'quay.io/pypa/manylinux2014_x86_64'
- os: macos-latest
python: '3.9'
container: ''
- os: macos-latest
python: '3.10'
container: ''
- os: macos-latest
python: '3.11'
container: ''
- os: macos-latest
python: '3.12'
container: ''
runs-on: ${{ matrix.os }}
container: ${{ matrix.container }}
steps:
# Use v3 for manylinux2014 compatibility (Node.js 16 instead of 20)
- uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
submodules: recursive
- name: Setup Python (macOS and regular Ubuntu)
if: matrix.container == ''
uses: actions/setup-python@v4 # v4 for better compatibility
with:
python-version: ${{ matrix.python }}
- name: Setup Python (manylinux container)
if: matrix.container != ''
run: |
# Use the pre-installed Python version in manylinux container
# Convert Python version format (3.9 -> 39, 3.10 -> 310, etc.)
PY_VER=$(echo "${{ matrix.python }}" | sed 's/\.//g')
/opt/python/cp${PY_VER}-*/bin/python -m pip install --upgrade pip
# Create symlinks for convenience
ln -sf /opt/python/cp${PY_VER}-*/bin/python /usr/local/bin/python
ln -sf /opt/python/cp${PY_VER}-*/bin/pip /usr/local/bin/pip
- name: Install uv (macOS and regular Ubuntu)
if: matrix.container == ''
uses: astral-sh/setup-uv@v4
- name: Install uv (manylinux container)
if: matrix.container != ''
run: |
curl -LsSf https://astral.sh/uv/install.sh | sh
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Install system dependencies (Ubuntu - regular)
if: runner.os == 'Linux' && matrix.container == ''
run: |
sudo apt-get update
sudo apt-get install -y libomp-dev libboost-all-dev protobuf-compiler libzmq3-dev \
pkg-config libopenblas-dev patchelf libabsl-dev libaio-dev libprotobuf-dev
# Install Intel MKL for DiskANN
wget -q https://registrationcenter-download.intel.com/akdlm/IRC_NAS/79153e0f-74d7-45af-b8c2-258941adf58a/intel-onemkl-2025.0.0.940.sh
sudo sh intel-onemkl-2025.0.0.940.sh -a --components intel.oneapi.lin.mkl.devel --action install --eula accept -s
source /opt/intel/oneapi/setvars.sh
echo "MKLROOT=/opt/intel/oneapi/mkl/latest" >> $GITHUB_ENV
echo "LD_LIBRARY_PATH=/opt/intel/oneapi/mkl/latest/lib/intel64:$LD_LIBRARY_PATH" >> $GITHUB_ENV
- name: Install system dependencies (manylinux container)
if: runner.os == 'Linux' && matrix.container != ''
run: |
# manylinux2014 uses yum instead of apt
yum install -y epel-release
yum install -y boost-devel protobuf-compiler zeromq-devel \
pkg-config openblas-devel libaio-devel protobuf-devel
# Build tools are pre-installed in manylinux
# MKL is more complex in container, skip for now and use OpenBLAS
- name: Install system dependencies (macOS)
if: runner.os == 'macOS'
run: |
brew install llvm libomp boost protobuf zeromq
- name: Install build dependencies
run: |
if [[ -n "${{ matrix.container }}" ]]; then
# In manylinux container, use regular pip
pip install scikit-build-core numpy swig Cython pybind11 auditwheel
else
# Regular environment, use uv
uv pip install --system scikit-build-core numpy swig Cython pybind11
if [[ "$RUNNER_OS" == "Linux" ]]; then
uv pip install --system auditwheel
else
uv pip install --system delocate
fi
fi
- name: Build packages
run: |
# Choose build command based on environment
if [[ -n "${{ matrix.container }}" ]]; then
BUILD_CMD="pip wheel . --no-deps -w dist"
else
BUILD_CMD="uv build --wheel --python python"
fi
# Build core (platform independent)
if [ "${{ matrix.os }}" == "ubuntu-latest" ]; then
cd packages/leann-core
if [[ -n "${{ matrix.container }}" ]]; then
pip wheel . --no-deps -w dist
else
uv build
fi
cd ../..
fi
# Build HNSW backend
cd packages/leann-backend-hnsw
if [ "${{ matrix.os }}" == "macos-latest" ]; then
CC=$(brew --prefix llvm)/bin/clang CXX=$(brew --prefix llvm)/bin/clang++ $BUILD_CMD
else
eval $BUILD_CMD
fi
cd ../..
# Build DiskANN backend
cd packages/leann-backend-diskann
if [ "${{ matrix.os }}" == "macos-latest" ]; then
CC=$(brew --prefix llvm)/bin/clang CXX=$(brew --prefix llvm)/bin/clang++ $BUILD_CMD
else
eval $BUILD_CMD
fi
cd ../..
# Build meta package (platform independent)
if [ "${{ matrix.os }}" == "ubuntu-latest" ]; then
cd packages/leann
if [[ -n "${{ matrix.container }}" ]]; then
pip wheel . --no-deps -w dist
else
uv build
fi
cd ../..
fi
- name: Repair wheels (Linux)
if: runner.os == 'Linux'
run: |
# Repair HNSW wheel
cd packages/leann-backend-hnsw
if [ -d dist ]; then
# Show what platform auditwheel will use
auditwheel show dist/*.whl || true
# Let auditwheel auto-detect the appropriate manylinux tag
auditwheel repair dist/*.whl -w dist_repaired
rm -rf dist
mv dist_repaired dist
fi
cd ../..
# Repair DiskANN wheel
cd packages/leann-backend-diskann
if [ -d dist ]; then
# Show what platform auditwheel will use
auditwheel show dist/*.whl || true
# Let auditwheel auto-detect the appropriate manylinux tag
auditwheel repair dist/*.whl -w dist_repaired
rm -rf dist
mv dist_repaired dist
fi
cd ../..
- name: Repair wheels (macOS)
if: runner.os == 'macOS'
run: |
# Repair HNSW wheel
cd packages/leann-backend-hnsw
if [ -d dist ]; then
delocate-wheel -w dist_repaired -v dist/*.whl
rm -rf dist
mv dist_repaired dist
fi
cd ../..
# Repair DiskANN wheel
cd packages/leann-backend-diskann
if [ -d dist ]; then
delocate-wheel -w dist_repaired -v dist/*.whl
rm -rf dist
mv dist_repaired dist
fi
cd ../..
- name: List built packages
run: |
echo "📦 Built packages:"
find packages/*/dist -name "*.whl" -o -name "*.tar.gz" | sort
- name: Upload artifacts
uses: actions/upload-artifact@v3 # v3 for manylinux2014 compatibility
with:
name: packages-${{ matrix.os }}-py${{ matrix.python }}
path: packages/*/dist/

View File

@@ -1,167 +1,79 @@
name: Manual Release
name: Release
on:
workflow_dispatch:
inputs:
version:
description: 'Version to release (e.g., 0.1.1)'
description: 'Version to release (e.g., 0.1.2)'
required: true
type: string
test_pypi:
description: 'Test on TestPyPI first'
required: false
type: boolean
default: true
jobs:
validate-and-release:
update-version:
name: Update Version
runs-on: ubuntu-latest
permissions:
contents: write
actions: read
outputs:
commit-sha: ${{ steps.push.outputs.commit-sha }}
steps:
- uses: actions/checkout@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/checkout@v3 # Compatibility with manylinux2014
- name: Check CI status
run: |
echo " This workflow will download build artifacts from the latest CI run."
echo " CI must have completed successfully on the current commit."
echo ""
- name: Validate version format
- name: Validate version
run: |
if ! [[ "${{ inputs.version }}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
echo "❌ Invalid version format. Use semantic versioning (e.g., 0.1.1)"
echo "❌ Invalid version format"
exit 1
fi
echo "✅ Version format valid: ${{ inputs.version }}"
echo "✅ Version format valid"
- name: Check if version already exists
run: |
if git tag | grep -q "^v${{ inputs.version }}$"; then
echo "❌ Version v${{ inputs.version }} already exists!"
exit 1
fi
echo "✅ Version is new"
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.13'
- name: Install uv
run: |
curl -LsSf https://astral.sh/uv/install.sh | sh
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Update versions
- name: Update versions and push
id: push
run: |
./scripts/bump_version.sh ${{ inputs.version }}
git config user.name "GitHub Actions"
git config user.email "actions@github.com"
git add packages/*/pyproject.toml
git commit -m "chore: release v${{ inputs.version }}"
- name: Push version update
run: |
git push origin HEAD:main
echo "✅ Pushed version update to main branch"
git push origin main
COMMIT_SHA=$(git rev-parse HEAD)
echo "commit-sha=$COMMIT_SHA" >> $GITHUB_OUTPUT
id: push-version
- name: Wait for CI to complete
id: wait-for-ci
run: |
echo "⏳ Waiting for CI to build new version..."
COMMIT_SHA="${{ steps.push-version.outputs.commit-sha }}"
# Wait up to 20 minutes for CI to complete
for i in {1..40}; do
RUN_ID=$(gh run list \
--workflow="CI - Build Multi-Platform Packages" \
--commit=$COMMIT_SHA \
--json databaseId,status \
--jq '.[] | select(.status == "completed") | .databaseId' | head -1)
if [ ! -z "$RUN_ID" ]; then
echo "✅ Found completed CI run: $RUN_ID"
echo "run-id=$RUN_ID" >> $GITHUB_OUTPUT
exit 0
fi
echo "⏳ Waiting for CI... (attempt $i/40)"
sleep 30
done
echo "❌ CI did not complete within 20 minutes"
exit 1
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
echo "✅ Pushed version update: $COMMIT_SHA"
build-packages:
name: Build packages
needs: update-version
uses: ./.github/workflows/build-reusable.yml
with:
ref: ${{ needs.update-version.outputs.commit-sha }}
publish:
name: Publish and Release
needs: build-packages
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/checkout@v3 # Consistency with build workflow
with:
ref: ${{ needs.update-version.outputs.commit-sha }}
- name: Download artifacts from CI run
run: |
echo "📦 Downloading artifacts from CI run ${{ steps.wait-for-ci.outputs.run-id }}..."
# Download all artifacts (not just wheels-*)
gh run download ${{ steps.wait-for-ci.outputs.run-id }} \
--dir ./dist-downloads
# Consolidate all wheels into packages/*/dist/
mkdir -p packages/leann-core/dist
mkdir -p packages/leann-backend-hnsw/dist
mkdir -p packages/leann-backend-diskann/dist
mkdir -p packages/leann/dist
find ./dist-downloads -name "*.whl" -exec cp {} ./packages/ \;
# Move wheels to correct package directories
for wheel in packages/*.whl; do
if [[ $wheel == *"leann_core"* ]]; then
mv "$wheel" packages/leann-core/dist/
elif [[ $wheel == *"leann_backend_hnsw"* ]]; then
mv "$wheel" packages/leann-backend-hnsw/dist/
elif [[ $wheel == *"leann_backend_diskann"* ]]; then
mv "$wheel" packages/leann-backend-diskann/dist/
elif [[ $wheel == *"leann-"* ]] && [[ $wheel != *"backend"* ]] && [[ $wheel != *"core"* ]]; then
mv "$wheel" packages/leann/dist/
fi
done
# List downloaded wheels
echo "✅ Downloaded wheels:"
find packages/*/dist -name "*.whl" -type f | sort
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Download all artifacts
uses: actions/download-artifact@v3 # Match upload-artifact version
with:
path: dist-artifacts
- name: Test on TestPyPI (optional)
if: inputs.test_pypi
continue-on-error: true
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.TEST_PYPI_API_TOKEN }}
- name: Collect packages
run: |
if [ -z "$TWINE_PASSWORD" ]; then
echo "⚠️ TEST_PYPI_API_TOKEN not configured, skipping TestPyPI upload"
echo " To enable TestPyPI testing, add TEST_PYPI_API_TOKEN to repository secrets"
exit 0
fi
mkdir -p dist
find dist-artifacts -name "*.whl" -exec cp {} dist/ \;
find dist-artifacts -name "*.tar.gz" -exec cp {} dist/ \;
pip install twine
echo "📦 Uploading to TestPyPI..."
twine upload --repository testpypi packages/*/dist/* --verbose || {
echo "⚠️ TestPyPI upload failed, but continuing with release"
echo " This is optional and won't block the release"
exit 0
}
echo "✅ Test upload successful!"
echo "📋 Check packages at: https://test.pypi.org/user/your-username/"
echo ""
echo "To test installation:"
echo "pip install -i https://test.pypi.org/simple/ leann"
echo "📦 Packages to publish:"
ls -la dist/
- name: Publish to PyPI
env:
@@ -170,46 +82,22 @@ jobs:
run: |
if [ -z "$TWINE_PASSWORD" ]; then
echo "❌ PYPI_API_TOKEN not configured!"
echo " Please add PYPI_API_TOKEN to repository secrets"
exit 1
fi
pip install twine
echo "📦 Publishing to PyPI..."
# Collect all wheels in one place
mkdir -p all_wheels
find packages/*/dist -name "*.whl" -exec cp {} all_wheels/ \;
find packages/*/dist -name "*.tar.gz" -exec cp {} all_wheels/ \;
echo "📋 Packages to publish:"
ls -la all_wheels/
# Upload to PyPI
twine upload all_wheels/* --skip-existing --verbose
twine upload dist/* --skip-existing --verbose
echo "✅ Published to PyPI!"
echo "🎉 Check packages at: https://pypi.org/project/leann/"
- name: Create and push tag
- name: Create release
run: |
git tag "v${{ inputs.version }}"
git push origin main
git push origin "v${{ inputs.version }}"
echo "✅ Tag v${{ inputs.version }} created and pushed"
- name: Create GitHub Release
uses: softprops/action-gh-release@v1
with:
tag_name: v${{ inputs.version }}
name: Release v${{ inputs.version }}
body: |
## 🚀 Release v${{ inputs.version }}
### What's Changed
See the [full changelog](https://github.com/${{ github.repository }}/compare/...v${{ inputs.version }})
### Installation
```bash
pip install leann==${{ inputs.version }}
```
gh release create "v${{ inputs.version }}" \
--title "Release v${{ inputs.version }}" \
--notes "🚀 Released to PyPI: https://pypi.org/project/leann/${{ inputs.version }}/" \
--latest
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -1,100 +1,22 @@
# Release Guide
## 📋 Prerequisites
## Setup (One-time)
Before releasing, ensure:
1. ✅ All code changes are committed and pushed
2. ✅ CI has passed on the latest commit (check [Actions](https://github.com/yichuan-w/LEANN/actions/workflows/ci.yml))
3. ✅ You have determined the new version number
Add `PYPI_API_TOKEN` to GitHub Secrets:
1. Get token: https://pypi.org/manage/account/token/
2. Add to secrets: Settings → Secrets → Actions → `PYPI_API_TOKEN`
### Required: PyPI Configuration
## Release (One-click)
To enable PyPI publishing:
1. Get a PyPI API token from https://pypi.org/manage/account/token/
2. Add it to repository secrets: Settings → Secrets → Actions → New repository secret
- Name: `PYPI_API_TOKEN`
- Value: Your PyPI token (starts with `pypi-`)
1. Go to: https://github.com/yichuan-w/LEANN/actions/workflows/release-manual.yml
2. Click "Run workflow"
3. Enter version: `0.1.2`
4. Click green "Run workflow" button
### Optional: TestPyPI Configuration
That's it! The workflow will automatically:
- ✅ Update version in all packages
- ✅ Build all packages
- ✅ Publish to PyPI
- ✅ Create GitHub tag and release
To enable TestPyPI testing (recommended but not required):
1. Get a TestPyPI API token from https://test.pypi.org/manage/account/token/
2. Add it to repository secrets: Settings → Secrets → Actions → New repository secret
- Name: `TEST_PYPI_API_TOKEN`
- Value: Your TestPyPI token (starts with `pypi-`)
**Note**: TestPyPI testing is optional. If not configured, the release will skip TestPyPI and proceed.
## 🚀 Recommended: Manual Release Workflow
### Via GitHub UI (Most Reliable)
1. **Verify CI Status**: Check that the latest commit has a green checkmark ✅
2. Go to [Actions → Manual Release](https://github.com/yichuan-w/LEANN/actions/workflows/release-manual.yml)
3. Click "Run workflow"
4. Enter version (e.g., `0.1.1`)
5. Toggle "Test on TestPyPI first" if desired
6. Click "Run workflow"
**What happens:**
- ✅ Downloads pre-built packages from CI (no rebuild needed!)
- ✅ Updates all package versions
- ✅ Optionally tests on TestPyPI
-**Publishes directly to PyPI**
- ✅ Creates tag and GitHub release
### Via Command Line
```bash
gh workflow run release-manual.yml -f version=0.1.1 -f test_pypi=true
```
## ⚡ Quick Release (One-Line)
For experienced users who want the fastest path:
```bash
./scripts/release.sh 0.1.1
```
This script will:
1. Update all package versions
2. Commit and push changes
3. Create GitHub release
4. **Manual Release workflow will automatically publish to PyPI**
⚠️ **Note**: If CI fails, you'll need to manually fix and re-tag
## Manual Testing Before Release
For testing specific packages locally (especially DiskANN on macOS):
```bash
# Build specific package locally
./scripts/build_and_test.sh diskann # or hnsw, core, meta, all
# Test installation in a clean environment
python -m venv test_env
source test_env/bin/activate
pip install packages/*/dist/*.whl
# Upload to Test PyPI (optional)
./scripts/upload_to_pypi.sh test
# Upload to Production PyPI (use with caution)
./scripts/upload_to_pypi.sh prod
```
## First-time setup
1. Install GitHub CLI:
```bash
brew install gh
gh auth login
```
2. Set PyPI token in GitHub:
```bash
gh secret set PYPI_API_TOKEN
# Paste your PyPI token when prompted
```
Check progress: https://github.com/yichuan-w/LEANN/actions

View File

@@ -4,8 +4,8 @@ build-backend = "scikit_build_core.build"
[project]
name = "leann-backend-diskann"
version = "0.1.2"
dependencies = ["leann-core==0.1.2", "numpy"]
version = "0.1.7"
dependencies = ["leann-core==0.1.7", "numpy"]
[tool.scikit-build]
# Key: simplified CMake path

View File

@@ -6,10 +6,10 @@ build-backend = "scikit_build_core.build"
[project]
name = "leann-backend-hnsw"
version = "0.1.2"
version = "0.1.7"
description = "Custom-built HNSW (Faiss) backend for the Leann toolkit."
dependencies = [
"leann-core==0.1.2",
"leann-core==0.1.7",
"numpy",
"pyzmq>=23.0.0",
"msgpack>=1.0.0",

View File

@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "leann-core"
version = "0.1.2"
version = "0.1.7"
description = "Core API and plugin system for LEANN"
readme = "README.md"
requires-python = ">=3.9"

View File

@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "leann"
version = "0.1.2"
version = "0.1.7"
description = "LEANN - The smallest vector index in the world. RAG Everything with LEANN!"
readme = "README.md"
requires-python = ">=3.9"