Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .github/workflows/build-wheels-defined.yml
Original file line number Diff line number Diff line change
Expand Up @@ -254,6 +254,8 @@ jobs:
-w /work \
-e GH_TOKEN="${GH_TOKEN}" \
-e PIP_NO_CACHE_DIR=1 \
-e PIP_INDEX_URL=https://www.piwheels.org/simple \
-e PIP_EXTRA_INDEX_URL=https://pypi.org/simple \
python:${{ matrix.python-version }}-bookworm \
bash -c "
set -e
Expand Down Expand Up @@ -339,6 +341,8 @@ jobs:
-w /work \
-e GH_TOKEN="${GH_TOKEN}" \
-e PIP_NO_CACHE_DIR=1 \
-e PIP_INDEX_URL=https://www.piwheels.org/simple \
-e PIP_EXTRA_INDEX_URL=https://pypi.org/simple \
python:${{ matrix.python-version }}-bullseye \
bash -c "
set -e
Expand Down
4 changes: 4 additions & 0 deletions .github/workflows/build-wheels-platforms.yml
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,8 @@ jobs:
-e MIN_IDF_MINOR_VERSION=${{ needs.get-supported-versions.outputs.min_idf_minor_version }} \
-e GH_TOKEN="${GH_TOKEN}" \
-e PIP_NO_CACHE_DIR=1 \
-e PIP_INDEX_URL=https://www.piwheels.org/simple \
-e PIP_EXTRA_INDEX_URL=https://pypi.org/simple \
python:${{ matrix.python-version }}-bookworm \
bash -c "
set -e
Expand All @@ -152,6 +154,8 @@ jobs:
-e MIN_IDF_MINOR_VERSION=${{ needs.get-supported-versions.outputs.min_idf_minor_version }} \
-e GH_TOKEN="${GH_TOKEN}" \
-e PIP_NO_CACHE_DIR=1 \
-e PIP_INDEX_URL=https://www.piwheels.org/simple \
-e PIP_EXTRA_INDEX_URL=https://pypi.org/simple \
python:${{ matrix.python-version }}-bullseye \
bash -c "
set -e
Expand Down
4 changes: 4 additions & 0 deletions .github/workflows/build-wheels-python-dependent.yml
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,8 @@ jobs:
-e PYO3_USE_ABI3_FORWARD_COMPATIBILITY=1 \
-e GH_TOKEN="${GH_TOKEN}" \
-e PIP_NO_CACHE_DIR=1 \
-e PIP_INDEX_URL=https://www.piwheels.org/simple \
-e PIP_EXTRA_INDEX_URL=https://pypi.org/simple \
python:${{ matrix.python-version }}-bookworm \
bash -c "
set -e
Expand All @@ -163,6 +165,8 @@ jobs:
-e PYO3_USE_ABI3_FORWARD_COMPATIBILITY=1 \
-e GH_TOKEN="${GH_TOKEN}" \
-e PIP_NO_CACHE_DIR=1 \
-e PIP_INDEX_URL=https://www.piwheels.org/simple \
-e PIP_EXTRA_INDEX_URL=https://pypi.org/simple \
python:${{ matrix.python-version }}-bullseye \
bash -c "
set -e
Expand Down
2 changes: 2 additions & 0 deletions .github/workflows/test-wheels-install.yml
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,8 @@ jobs:
python test_wheels_install.py
"

# After test_wheels_install.py, ./downloaded_wheels contains only wheels for this
# matrix Python + platform (see prune step in test_wheels_install.main).
- name: Upload tested wheels
uses: actions/upload-artifact@v4
with:
Expand Down
14 changes: 7 additions & 7 deletions .github/workflows/upload-python-wheels.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,11 @@ jobs:
path: ./downloaded_wheels
merge-multiple: true

- name: Upload release asset to S3 bucket
run: |
python upload_wheels.py $AWS_BUCKET
python create_index_pages.py $AWS_BUCKET
#- name: Upload release asset to S3 bucket
# run: |
# python upload_wheels.py $AWS_BUCKET
# python create_index_pages.py $AWS_BUCKET

- name: Drop AWS cache
id: invalidate-index-cache
run: aws cloudfront create-invalidation --distribution-id ${{ secrets.AWS_CACHE_INVALIDATION }} --paths "/pypi/*"
#- name: Drop AWS cache
# id: invalidate-index-cache
# run: aws cloudfront create-invalidation --distribution-id ${{ secrets.AWS_CACHE_INVALIDATION }} --paths "/pypi/*"
26 changes: 23 additions & 3 deletions .github/workflows/wheels-repair.yml
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,9 @@ jobs:
run: |
docker run --rm \
--platform ${{ matrix.docker_platform }} \
-e AUDITWHEEL_PLAT=manylinux_2_36_armv7l \
-e AUDITWHEEL_ONLY_PLAT=1 \
-e AUDITWHEEL_ALLOW_LINUX_TAG=1 \
-v $(pwd):/work \
-w /work \
${{ matrix.docker_image }} \
Expand All @@ -177,6 +180,9 @@ jobs:
run: |
docker run --rm \
--platform ${{ matrix.docker_platform }} \
-e AUDITWHEEL_PLAT=manylinux_2_31_armv7l \
-e AUDITWHEEL_ONLY_PLAT=1 \
-e AUDITWHEEL_ALLOW_LINUX_TAG=1 \
-v $(pwd):/work \
-w /work \
${{ matrix.docker_image }} \
Expand All @@ -201,12 +207,26 @@ jobs:
needs: repair-wheels
runs-on: ubuntu-latest
steps:
- name: Download all repaired wheels
- name: Checkout repository
uses: actions/checkout@v4

# Download each wheels-repaired-* artifact into its own subdirectory so
# same-named wheels from ARMv7 vs ARMv7 Legacy are not silently overwritten
# before collision detection or S3 upload (see README: ARMv7 wheel collisions).
- name: Download all repaired wheels (per-artifact subdirectories)
uses: actions/download-artifact@v4
with:
pattern: wheels-repaired-*
path: ./all_wheels
merge-multiple: true
path: ./all_wheels_staging
merge-multiple: false

- name: Check for duplicate wheel basenames across lineages
run: python3 check_wheel_collisions.py ./all_wheels_staging

- name: Flatten merged wheels directory
run: |
mkdir -p ./all_wheels
find ./all_wheels_staging -type f -name '*.whl' -exec cp -f {} ./all_wheels/ \;

- name: List merged wheels
run: |
Expand Down
25 changes: 25 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,18 @@ The repair tools are used after build to link and bundle all the needed librarie

This logic is done by the [repair workflow](./.github/workflows/wheels-repair.yml) and the [`repair_wheels.py` script](./repair_wheels.py)

### ARMv7 vs ARMv7 Legacy: same wheel filename, different binaries

`Linux ARMv7` and `Linux ARMv7 Legacy` can both produce a wheel whose **filename is identical** (same PEP 425 tags) while the **ELF contents differ** (different glibc/OpenSSL/Rust toolchain lineage). **Note:** `wheels-download-directory-*` CI artifacts are the **pre-repair** build outputs; comparing those can still show identical names until the [repair workflow](./.github/workflows/wheels-repair.yml) runs. Two bad outcomes follow if that is not handled after repair/merge:

1. **Artifact merge / local flatten** — downloading multiple `wheels-repaired-*` artifacts into one directory with `merge-multiple: true` can make the second file **silently overwrite** the first on disk before any upload runs.
2. **S3 upload** — [`upload_wheels.py`](./upload_wheels.py) publishes to `pypi/<package>/<wheel-filename>`. Uploading a second wheel with the **same key** replaces the object; clients then see whichever build ran last, which can surface as import crashes or segfaults.

Mitigations in this repo:

- Repair sets **`AUDITWHEEL_PLAT`** and **`AUDITWHEEL_ONLY_PLAT`** per lineage (`manylinux_2_36_armv7l` vs `manylinux_2_31_armv7l`) so [`repair_wheels.py`](./repair_wheels.py) runs `auditwheel repair --plat ... --only-plat` and emitted wheels get **distinct single-tag filenames** when auditwheel supports it. If **`AUDITWHEEL_PLAT` is set**, ARMv7 “libc detection failed” outcomes are **not** treated as non-fatal skips (that would leave identical filenames across lineages).
- The repair workflow merges repaired artifacts using **per-artifact subdirectories**, then runs [`check_wheel_collisions.py`](./check_wheel_collisions.py) to **fail CI** if the same `*.whl` basename appears with **different contents** across lineages, before flattening for tests/upload.

## Activity Diagram
The main file is `build-wheels-platforms.yml` which is scheduled to run periodically to build Python wheels for any requirement of all [ESP-IDF]-supported versions.

Expand All @@ -167,4 +179,17 @@ Docker files are in its own repository where there are build and published from.
- For older ARMv7 operating systems
- For packages requiring glibc 2.31

[!NOTE]
### ARMv7: prefer piwheels for resolution

For ARMv7 (and ARMv7 Legacy) environments, you may want to prefer [piwheels](https://www.piwheels.org/) as the primary index and use Espressif's index as a secondary source:

```bash
python -m pip install --index-url https://www.piwheels.org/simple --extra-index-url https://dl.espressif.com/pypi/ <package>
```

This repository's ARMv7 CI workflows also set these as `PIP_INDEX_URL` / `PIP_EXTRA_INDEX_URL` inside the ARMv7 Docker builds.

**Warning:** piwheels wheels may rely on system-provided shared libraries (i.e. may not bundle `.libs/`). If a target OS is missing those libraries or has an incompatible version, imports may fail at runtime.

[ESP-IDF]: https://github.com/espressif/esp-idf
78 changes: 78 additions & 0 deletions check_wheel_collisions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
#
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
#
# SPDX-License-Identifier: Apache-2.0
#
"""Detect duplicate *.whl basenames with different file contents under a tree.

Used after downloading per-arch ``wheels-repaired-*`` artifacts into separate
subdirectories (``merge-multiple: false``) so a filesystem flatten step cannot
hide ARMv7 vs ARMv7 Legacy collisions before upload to S3.
"""

from __future__ import annotations

import hashlib
import sys

from collections import defaultdict
from pathlib import Path


def _sha256_file(path: Path, chunk_size: int = 1024 * 1024) -> str:
h = hashlib.sha256()
with path.open("rb") as f:
while True:
b = f.read(chunk_size)
if not b:
break
h.update(b)
return h.hexdigest()


def collect_collision_errors(root: Path) -> list[str]:
"""Return human-readable error lines; empty if OK."""
wheels: list[Path] = []
for p in sorted(root.rglob("*.whl")):
if p.is_file():
wheels.append(p)

by_name: defaultdict[str, list[Path]] = defaultdict(list)
for p in wheels:
by_name[p.name].append(p)

errors: list[str] = []
for name, paths in sorted(by_name.items()):
if len(paths) < 2:
continue
by_digest: defaultdict[str, list[Path]] = defaultdict(list)
for p in paths:
by_digest[_sha256_file(p)].append(p)
if len(by_digest) == 1:
# Identical content in multiple artifact trees — unusual but safe.
continue
lines = [f"Duplicate wheel basename with different contents: {name}"]
for p in paths:
lines.append(f" - {p} sha256={_sha256_file(p)}")
errors.append("\n".join(lines))
return errors


def main(argv: list[str]) -> int:
root = Path(argv[1] if len(argv) > 1 else ".").resolve()
if not root.is_dir():
print(f"Error: not a directory: {root}", file=sys.stderr)
return 2

errors = collect_collision_errors(root)
if errors:
print("Wheel basename collision check failed:\n", file=sys.stderr)
for block in errors:
print(block, file=sys.stderr)
print(file=sys.stderr)
return 1
return 0


if __name__ == "__main__":
raise SystemExit(main(sys.argv))
Loading
Loading