diff --git a/.github/workflows/build-wheels-python-dependent.yml b/.github/workflows/build-wheels-python-dependent.yml index 7764707..db6744a 100644 --- a/.github/workflows/build-wheels-python-dependent.yml +++ b/.github/workflows/build-wheels-python-dependent.yml @@ -18,6 +18,8 @@ jobs: runs-on: ${{ matrix.runner }} env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # PyO3 (cryptography, etc.): allow building against CPython newer than PyO3's declared max when using stable ABI + PYO3_USE_ABI3_FORWARD_COMPATIBILITY: "1" strategy: fail-fast: false matrix: @@ -148,7 +150,7 @@ jobs: bash os_dependencies/linux_arm.sh # Source Rust environment after installation . \$HOME/.cargo/env - python build_wheels_from_file.py dependent_requirements_${{ matrix.arch }} + python build_wheels_from_file.py --force-interpreter-binary dependent_requirements_${{ matrix.arch }} " - name: Build Python dependent wheels - ARMv7 Legacy (in Docker) @@ -171,7 +173,7 @@ jobs: bash os_dependencies/linux_arm.sh # Source Rust environment after installation . \$HOME/.cargo/env - python build_wheels_from_file.py dependent_requirements_${{ matrix.arch }} + python build_wheels_from_file.py --force-interpreter-binary dependent_requirements_${{ matrix.arch }} " - name: Build Python dependent wheels - Linux/macOS @@ -184,11 +186,11 @@ jobs: export ARCHFLAGS="-arch x86_64" fi - python build_wheels_from_file.py dependent_requirements_${{ matrix.arch }} + python build_wheels_from_file.py --force-interpreter-binary dependent_requirements_${{ matrix.arch }} - name: Build Python dependent wheels for ${{ matrix.python-version }} - Windows if: matrix.os == 'Windows' - run: python build_wheels_from_file.py dependent_requirements_${{ matrix.arch }} + run: python build_wheels_from_file.py --force-interpreter-binary dependent_requirements_${{ matrix.arch }} - name: Upload artifacts diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index aee50aa..e79b428 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -32,7 +32,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install packaging pyyaml colorama requests + python -m pip install -r build_requirements.txt - name: Run unit tests run: python -m unittest discover -s . -v diff --git a/_helper_functions.py b/_helper_functions.py index c858feb..ea752e1 100644 --- a/_helper_functions.py +++ b/_helper_functions.py @@ -107,11 +107,23 @@ def get_no_binary_args(requirement_name: str) -> list: return [] +def _safe_text_for_stdout(text: str) -> str: + """Avoid UnicodeEncodeError when printing pip/tool output on Windows (e.g. cp1252 console).""" + encoding = getattr(sys.stdout, "encoding", None) or "utf-8" + if encoding.lower() in ("utf-8", "utf8"): + return text + try: + text.encode(encoding) + return text + except UnicodeEncodeError: + return text.encode(encoding, errors="replace").decode(encoding, errors="replace") + + def print_color(text: str, color: str = Fore.BLUE): """Print colored text specified by color argument based on colorama - default color BLUE """ - print(f"{color}", f"{text}", Style.RESET_ALL) + print(f"{color}", f"{_safe_text_for_stdout(text)}", Style.RESET_ALL) def merge_requirements(requirement: Requirement, another_req: Requirement) -> Requirement: diff --git a/build_requirements.txt b/build_requirements.txt index 08e4834..0f2849f 100644 --- a/build_requirements.txt +++ b/build_requirements.txt @@ -4,6 +4,7 @@ requests~=2.31.0 packaging~=23.2 PyYAML~=6.0.1 colorama~=0.4.6 +tomli; python_version < "3.11" # ----- build process ----- boto3~=1.34.4 diff --git a/build_wheels.py b/build_wheels.py index 861537a..e75ede4 100644 --- a/build_wheels.py +++ b/build_wheels.py @@ -16,6 +16,11 @@ import requests +try: + import tomllib +except ImportError: # Python < 3.11 does not have tomllib built-in module + import tomli as tomllib + from colorama import Fore from packaging.requirements import InvalidRequirement from packaging.requirements import Requirement @@ -35,6 +40,8 @@ IDF_RESOURCES_URL = "https://raw.githubusercontent.com/espressif/esp-idf/" # URL for IDF master CMAKE version file IDF_MASTER_VERSION_URL = f"{IDF_RESOURCES_URL}master/tools/cmake/version.cmake" +# URL for esptool pyproject.toml file +ESPTOOL_PYPROJECT_URL = "https://raw.githubusercontent.com/espressif/esptool/master/pyproject.toml" # Minimal IDF release version to take requirements from (v{MAJOR}.{MINOR}) # Requirements from all release branches and master equal or above this will be considered @@ -151,6 +158,15 @@ def _download_branch_requirements(branch: str, idf_requirements_json: dict) -> L if check_response(res, f"Failed to download feature (requirement group) '{feature['name']}'"): requirements_txt += res.text.splitlines() print(f"Added ESP-IDF {feature['name']} requirements") + + # Download esptool requirements from pyproject.toml file + res = requests.get(ESPTOOL_PYPROJECT_URL, headers=AUTH_HEADER, timeout=10) + if check_response(res, "Failed to download esptool pyproject.toml file"): + pyproject_content = tomllib.loads(res.text) + esptool_deps = pyproject_content.get("project", {}).get("dependencies", []) + requirements_txt += [dep for dep in esptool_deps if dep not in requirements_txt] + print("Added esptool requirements") + return requirements_txt diff --git a/build_wheels_from_file.py b/build_wheels_from_file.py index c4fcb7c..8b23210 100644 --- a/build_wheels_from_file.py +++ b/build_wheels_from_file.py @@ -3,16 +3,65 @@ # # SPDX-License-Identifier: Apache-2.0 # +from __future__ import annotations + import argparse import os +import platform import subprocess import sys from colorama import Fore +from packaging.requirements import InvalidRequirement +from packaging.requirements import Requirement +from packaging.utils import canonicalize_name from _helper_functions import get_no_binary_args from _helper_functions import print_color +# Do not pass --no-binary for these in --force-interpreter-binary mode: +# - sdists whose legacy setup breaks under PEP 517 isolation (pkg_resources in isolated env). +# - sdists that fail to compile on CI when a usable wheel exists (e.g. ruamel.yaml.clib + clang). +# - PyObjC: all pyobjc / pyobjc-framework-* use pyobjc_setup.py + pkg_resources (macOS). +# - cryptography: abi3 wheels; avoid PyO3 max-Python / heavy Rust rebuilds in dependent jobs. +# - pydantic-core: maturin + jiter + PyO3 can fail from sdist on some CI combos (e.g. ARM64 3.9: +# jiter vs pyo3-ffi PyUnicode_* / extract API). Prefer compatible wheels from find-links or PyPI. +_FORCE_INTERPRETER_BINARY_SKIP_EXACT = frozenset( + { + canonicalize_name("cryptography"), + canonicalize_name("pydantic-core"), + canonicalize_name("protobuf"), + canonicalize_name("ruamel.yaml.clib"), + } +) + + +def _force_interpreter_skip_package(canonical_dist_name: str) -> bool: + if canonical_dist_name in _FORCE_INTERPRETER_BINARY_SKIP_EXACT: + return True + # PyObjC meta and framework bindings (pyobjc-framework-corebluetooth, etc.) + return canonical_dist_name == "pyobjc" or canonical_dist_name.startswith("pyobjc-") + + +def _force_interpreter_no_binary_args(requirement_line: str) -> list[str]: + """Return pip --no-binary for this package so pip cannot reuse e.g. cp311-abi3 wheels on 3.13.""" + line = requirement_line.strip() + if not line: + return [] + try: + req = Requirement(line) + except InvalidRequirement: + return [] + if _force_interpreter_skip_package(canonicalize_name(req.name)): + return [] + return ["--no-binary", req.name] + + +def _apply_force_interpreter_binary(cli_flag: bool) -> bool: + """Linux/macOS only: forcing sdist builds for cryptography etc. is unreliable on Windows CI.""" + return cli_flag and platform.system() != "Windows" + + parser = argparse.ArgumentParser(description="Process build arguments.") parser.add_argument( "requirements_path", @@ -36,6 +85,16 @@ action="store_true", help="CI exclude-tests mode: fail if all wheels succeed (expect some to fail, e.g. excluded packages)", ) +parser.add_argument( + "--force-interpreter-binary", + action="store_true", + help=( + "For each requirement, pass --no-binary so pip builds a wheel for the current " + "interpreter instead of reusing a compatible abi3 / older cpXY wheel from --find-links. " + "Ignored on Windows (source builds for e.g. cryptography are not used in CI there). " + "Some packages are always skipped (e.g. cryptography, pydantic-core, protobuf, PyObjC, ruamel.yaml.clib)." + ), +) args = parser.parse_args() @@ -55,8 +114,16 @@ raise SystemExit(f"Python version dependent requirements directory or file not found ({e})") for requirement in requirements: + requirement = requirement.strip() + if not requirement or requirement.startswith("#"): + continue # Get no-binary args for packages that should be built from source no_binary_args = get_no_binary_args(requirement) + force_interpreter_args = ( + _force_interpreter_no_binary_args(requirement) + if _apply_force_interpreter_binary(args.force_interpreter_binary) + else [] + ) out = subprocess.run( [ @@ -64,13 +131,14 @@ "-m", "pip", "wheel", - f"{requirement}", + requirement, "--find-links", "downloaded_wheels", "--wheel-dir", "downloaded_wheels", ] - + no_binary_args, + + no_binary_args + + force_interpreter_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) @@ -100,6 +168,11 @@ for requirement in in_requirements: # Get no-binary args for packages that should be built from source no_binary_args = get_no_binary_args(requirement) + force_interpreter_args = ( + _force_interpreter_no_binary_args(requirement) + if _apply_force_interpreter_binary(args.force_interpreter_binary) + else [] + ) out = subprocess.run( [ @@ -113,7 +186,8 @@ "--wheel-dir", "downloaded_wheels", ] - + no_binary_args, + + no_binary_args + + force_interpreter_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) diff --git a/exclude_list.yaml b/exclude_list.yaml index f513fdb..a696df7 100644 --- a/exclude_list.yaml +++ b/exclude_list.yaml @@ -140,3 +140,9 @@ # https://pypi.org/project/mcp/ - package_name: 'mcp' python: ['==3.8', '==3.9'] + +# idf-component-manager v3.0.0is not supported by Python <= 3.10 +# https://pypi.org/project/idf-component-manager/3.0.0/ +- package_name: 'idf-component-manager' + version: '==3.0.0' + python: ['<3.10'] diff --git a/repair_wheels.py b/repair_wheels.py index 8afab4f..42cd492 100644 --- a/repair_wheels.py +++ b/repair_wheels.py @@ -14,6 +14,7 @@ import platform import subprocess +import zipfile from pathlib import Path from typing import Union @@ -186,6 +187,13 @@ def main() -> None: skipped_count += 1 continue + # PEP 427: wheels are zip files; invalid magic usually means truncated/corrupt CI artifact + if not zipfile.is_zipfile(wheel): + print_color(" -> Deleting file (not a valid zip / wheel archive)", Fore.RED) + wheel.unlink() + deleted_count += 1 + continue + # Clean temp directory for old_wheel in temp_dir.glob("*.whl"): old_wheel.unlink() @@ -295,14 +303,21 @@ def main() -> None: # A repaired wheel was created successfully if repaired.name != wheel.name: wheel.unlink() # Remove original - repaired.rename(wheel.parent / repaired.name) + final_path = wheel.parent / repaired.name + repaired.rename(final_path) print_color(f" -> Replaced with repaired wheel: {repaired.name}", Fore.GREEN) else: # Name unchanged wheel.unlink() repaired.rename(wheel) + final_path = wheel print_color(f" -> Repaired successfully: {repaired.name}", Fore.GREEN) - repaired_count += 1 + if not zipfile.is_zipfile(final_path): + print_color(" -> Deleting repaired output (not a valid zip archive)", Fore.RED) + final_path.unlink() + deleted_count += 1 + else: + repaired_count += 1 elif result.returncode == 0: # No repaired wheel created, but command succeeded (already compatible) print_color(" -> Keeping original wheel (already compatible)", Fore.GREEN) diff --git a/test_wheels_install.py b/test_wheels_install.py index 6531d1f..3023384 100644 --- a/test_wheels_install.py +++ b/test_wheels_install.py @@ -8,6 +8,10 @@ This script finds and installs wheels compatible with the current Python version, verifying that wheel files are valid and platform-compatible. It also checks wheels against exclude_list.yaml and removes incompatible ones. + +Wheels are ZIP archives (PEP 427). pip opens them with the zipfile module; a +BadZipFile / "Bad magic number" error means the bytes on disk are not a valid +ZIP (truncated, corrupted, or not a wheel), not that ".whl" was mistaken for ".zip". """ from __future__ import annotations @@ -15,6 +19,7 @@ import re import subprocess import sys +import zipfile from pathlib import Path @@ -141,6 +146,23 @@ def is_compatibility_error(error_message: str) -> bool: return any(err in error_message for err in compatibility_errors) +def is_corrupt_wheel_archive_error(error_message: str) -> bool: + """True if pip failed because the file is not a readable ZIP / wheel archive.""" + markers = ( + "BadZipFile", + "Bad magic number for file header", + "has an invalid wheel", + "zipfile.BadZipFile", + ) + return any(m in error_message for m in markers) + + +def discard_corrupt_wheel(wheel_path: Path, note: str) -> None: + """Remove wheel from the test tree and print a single-line warning.""" + wheel_path.unlink(missing_ok=True) + print_color(f"-- {wheel_path.name} ({note})", Fore.YELLOW) + + def main() -> int: python_version_tag = get_python_version_tag() python_version = f"{sys.version_info.major}.{sys.version_info.minor}" @@ -187,12 +209,18 @@ def main() -> int: installed = 0 failed = 0 deleted = 0 + discarded_corrupt = 0 failed_wheels = [] deleted_wheels = [] print_color("---------- INSTALL WHEELS ----------") for wheel_path in wheels_to_install: + if not zipfile.is_zipfile(wheel_path): + discarded_corrupt += 1 + discard_corrupt_wheel(wheel_path, "invalid zip — not a valid wheel file (PEP 427)") + continue + success, error_message = install_wheel(wheel_path) if success: @@ -204,6 +232,11 @@ def main() -> int: deleted_wheels.append(wheel_path.name) wheel_path.unlink() print_color(f"-- {wheel_path.name} (compatibility constraint)", Fore.YELLOW) + elif is_corrupt_wheel_archive_error(error_message): + # Truncated/corrupt artifact or bad repair output; same handling as incompatible: + # drop from this test artifact so CI can continue (see module docstring). + discarded_corrupt += 1 + discard_corrupt_wheel(wheel_path, "invalid / corrupt zip (pip could not read wheel)") else: failed += 1 failed_wheels.append((wheel_path.name, error_message)) @@ -221,6 +254,11 @@ def main() -> int: print_color(f"Excluded {excluded} wheels (exclude_list.yaml)", Fore.YELLOW) if deleted > 0: print_color(f"Deleted {deleted} wheels (compatibility constraint)", Fore.YELLOW) + if discarded_corrupt > 0: + print_color( + f"Discarded {discarded_corrupt} wheels (invalid or corrupt zip archive)", + Fore.YELLOW, + ) if failed > 0: print_color(f"Failed {failed} wheels", Fore.RED)