Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 6 additions & 4 deletions .github/workflows/build-wheels-python-dependent.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ jobs:
runs-on: ${{ matrix.runner }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# PyO3 (cryptography, etc.): allow building against CPython newer than PyO3's declared max when using stable ABI
PYO3_USE_ABI3_FORWARD_COMPATIBILITY: "1"
strategy:
fail-fast: false
matrix:
Expand Down Expand Up @@ -148,7 +150,7 @@ jobs:
bash os_dependencies/linux_arm.sh
# Source Rust environment after installation
. \$HOME/.cargo/env
python build_wheels_from_file.py dependent_requirements_${{ matrix.arch }}
python build_wheels_from_file.py --force-interpreter-binary dependent_requirements_${{ matrix.arch }}
"

- name: Build Python dependent wheels - ARMv7 Legacy (in Docker)
Expand All @@ -171,7 +173,7 @@ jobs:
bash os_dependencies/linux_arm.sh
# Source Rust environment after installation
. \$HOME/.cargo/env
python build_wheels_from_file.py dependent_requirements_${{ matrix.arch }}
python build_wheels_from_file.py --force-interpreter-binary dependent_requirements_${{ matrix.arch }}
"

- name: Build Python dependent wheels - Linux/macOS
Expand All @@ -184,11 +186,11 @@ jobs:
export ARCHFLAGS="-arch x86_64"
fi

python build_wheels_from_file.py dependent_requirements_${{ matrix.arch }}
python build_wheels_from_file.py --force-interpreter-binary dependent_requirements_${{ matrix.arch }}

- name: Build Python dependent wheels for ${{ matrix.python-version }} - Windows
if: matrix.os == 'Windows'
run: python build_wheels_from_file.py dependent_requirements_${{ matrix.arch }}
run: python build_wheels_from_file.py --force-interpreter-binary dependent_requirements_${{ matrix.arch }}


- name: Upload artifacts
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install packaging pyyaml colorama requests
python -m pip install -r build_requirements.txt

- name: Run unit tests
run: python -m unittest discover -s . -v
Expand Down
14 changes: 13 additions & 1 deletion _helper_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,11 +107,23 @@ def get_no_binary_args(requirement_name: str) -> list:
return []


def _safe_text_for_stdout(text: str) -> str:
"""Avoid UnicodeEncodeError when printing pip/tool output on Windows (e.g. cp1252 console)."""
encoding = getattr(sys.stdout, "encoding", None) or "utf-8"
if encoding.lower() in ("utf-8", "utf8"):
return text
try:
text.encode(encoding)
return text
except UnicodeEncodeError:
return text.encode(encoding, errors="replace").decode(encoding, errors="replace")


def print_color(text: str, color: str = Fore.BLUE):
"""Print colored text specified by color argument based on colorama
- default color BLUE
"""
print(f"{color}", f"{text}", Style.RESET_ALL)
print(f"{color}", f"{_safe_text_for_stdout(text)}", Style.RESET_ALL)


def merge_requirements(requirement: Requirement, another_req: Requirement) -> Requirement:
Expand Down
1 change: 1 addition & 0 deletions build_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ requests~=2.31.0
packaging~=23.2
PyYAML~=6.0.1
colorama~=0.4.6
tomli; python_version < "3.11"
# ----- build process -----
boto3~=1.34.4

Expand Down
16 changes: 16 additions & 0 deletions build_wheels.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@

import requests

try:
import tomllib
except ImportError: # Python < 3.11 does not have tomllib built-in module
import tomli as tomllib

from colorama import Fore
from packaging.requirements import InvalidRequirement
from packaging.requirements import Requirement
Expand All @@ -35,6 +40,8 @@
IDF_RESOURCES_URL = "https://raw.githubusercontent.com/espressif/esp-idf/"
# URL for IDF master CMAKE version file
IDF_MASTER_VERSION_URL = f"{IDF_RESOURCES_URL}master/tools/cmake/version.cmake"
# URL for esptool pyproject.toml file
ESPTOOL_PYPROJECT_URL = "https://raw.githubusercontent.com/espressif/esptool/master/pyproject.toml"

# Minimal IDF release version to take requirements from (v{MAJOR}.{MINOR})
# Requirements from all release branches and master equal or above this will be considered
Expand Down Expand Up @@ -151,6 +158,15 @@ def _download_branch_requirements(branch: str, idf_requirements_json: dict) -> L
if check_response(res, f"Failed to download feature (requirement group) '{feature['name']}'"):
requirements_txt += res.text.splitlines()
print(f"Added ESP-IDF {feature['name']} requirements")

# Download esptool requirements from pyproject.toml file
res = requests.get(ESPTOOL_PYPROJECT_URL, headers=AUTH_HEADER, timeout=10)
if check_response(res, "Failed to download esptool pyproject.toml file"):
pyproject_content = tomllib.loads(res.text)
esptool_deps = pyproject_content.get("project", {}).get("dependencies", [])
requirements_txt += [dep for dep in esptool_deps if dep not in requirements_txt]
print("Added esptool requirements")

return requirements_txt


Expand Down
80 changes: 77 additions & 3 deletions build_wheels_from_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,65 @@
#
# SPDX-License-Identifier: Apache-2.0
#
from __future__ import annotations

import argparse
import os
import platform
import subprocess
import sys

from colorama import Fore
from packaging.requirements import InvalidRequirement
from packaging.requirements import Requirement
from packaging.utils import canonicalize_name

from _helper_functions import get_no_binary_args
from _helper_functions import print_color

# Do not pass --no-binary for these in --force-interpreter-binary mode:
# - sdists whose legacy setup breaks under PEP 517 isolation (pkg_resources in isolated env).
# - sdists that fail to compile on CI when a usable wheel exists (e.g. ruamel.yaml.clib + clang).
# - PyObjC: all pyobjc / pyobjc-framework-* use pyobjc_setup.py + pkg_resources (macOS).
# - cryptography: abi3 wheels; avoid PyO3 max-Python / heavy Rust rebuilds in dependent jobs.
# - pydantic-core: maturin + jiter + PyO3 can fail from sdist on some CI combos (e.g. ARM64 3.9:
# jiter vs pyo3-ffi PyUnicode_* / extract API). Prefer compatible wheels from find-links or PyPI.
_FORCE_INTERPRETER_BINARY_SKIP_EXACT = frozenset(
{
canonicalize_name("cryptography"),
canonicalize_name("pydantic-core"),
canonicalize_name("protobuf"),
canonicalize_name("ruamel.yaml.clib"),
}
)


def _force_interpreter_skip_package(canonical_dist_name: str) -> bool:
if canonical_dist_name in _FORCE_INTERPRETER_BINARY_SKIP_EXACT:
return True
# PyObjC meta and framework bindings (pyobjc-framework-corebluetooth, etc.)
return canonical_dist_name == "pyobjc" or canonical_dist_name.startswith("pyobjc-")


def _force_interpreter_no_binary_args(requirement_line: str) -> list[str]:
"""Return pip --no-binary for this package so pip cannot reuse e.g. cp311-abi3 wheels on 3.13."""
line = requirement_line.strip()
if not line:
return []
try:
req = Requirement(line)
except InvalidRequirement:
return []
if _force_interpreter_skip_package(canonicalize_name(req.name)):
return []
return ["--no-binary", req.name]


def _apply_force_interpreter_binary(cli_flag: bool) -> bool:
"""Linux/macOS only: forcing sdist builds for cryptography etc. is unreliable on Windows CI."""
return cli_flag and platform.system() != "Windows"


parser = argparse.ArgumentParser(description="Process build arguments.")
parser.add_argument(
"requirements_path",
Expand All @@ -36,6 +85,16 @@
action="store_true",
help="CI exclude-tests mode: fail if all wheels succeed (expect some to fail, e.g. excluded packages)",
)
parser.add_argument(
"--force-interpreter-binary",
action="store_true",
help=(
"For each requirement, pass --no-binary <pkg> so pip builds a wheel for the current "
"interpreter instead of reusing a compatible abi3 / older cpXY wheel from --find-links. "
"Ignored on Windows (source builds for e.g. cryptography are not used in CI there). "
"Some packages are always skipped (e.g. cryptography, pydantic-core, protobuf, PyObjC, ruamel.yaml.clib)."
),
)

args = parser.parse_args()

Expand All @@ -55,22 +114,31 @@
raise SystemExit(f"Python version dependent requirements directory or file not found ({e})")

for requirement in requirements:
requirement = requirement.strip()
if not requirement or requirement.startswith("#"):
continue
# Get no-binary args for packages that should be built from source
no_binary_args = get_no_binary_args(requirement)
force_interpreter_args = (
_force_interpreter_no_binary_args(requirement)
if _apply_force_interpreter_binary(args.force_interpreter_binary)
else []
)

out = subprocess.run(
[
f"{sys.executable}",
"-m",
"pip",
"wheel",
f"{requirement}",
requirement,
"--find-links",
"downloaded_wheels",
"--wheel-dir",
"downloaded_wheels",
]
+ no_binary_args,
+ no_binary_args
+ force_interpreter_args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
Expand Down Expand Up @@ -100,6 +168,11 @@
for requirement in in_requirements:
# Get no-binary args for packages that should be built from source
no_binary_args = get_no_binary_args(requirement)
force_interpreter_args = (
_force_interpreter_no_binary_args(requirement)
if _apply_force_interpreter_binary(args.force_interpreter_binary)
else []
)

out = subprocess.run(
[
Expand All @@ -113,7 +186,8 @@
"--wheel-dir",
"downloaded_wheels",
]
+ no_binary_args,
+ no_binary_args
+ force_interpreter_args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
Expand Down
6 changes: 6 additions & 0 deletions exclude_list.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -140,3 +140,9 @@
# https://pypi.org/project/mcp/
- package_name: 'mcp'
python: ['==3.8', '==3.9']

# idf-component-manager v3.0.0is not supported by Python <= 3.10
# https://pypi.org/project/idf-component-manager/3.0.0/
- package_name: 'idf-component-manager'
version: '==3.0.0'
python: ['<3.10']
19 changes: 17 additions & 2 deletions repair_wheels.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

import platform
import subprocess
import zipfile

from pathlib import Path
from typing import Union
Expand Down Expand Up @@ -186,6 +187,13 @@ def main() -> None:
skipped_count += 1
continue

# PEP 427: wheels are zip files; invalid magic usually means truncated/corrupt CI artifact
if not zipfile.is_zipfile(wheel):
print_color(" -> Deleting file (not a valid zip / wheel archive)", Fore.RED)
wheel.unlink()
deleted_count += 1
continue

# Clean temp directory
for old_wheel in temp_dir.glob("*.whl"):
old_wheel.unlink()
Expand Down Expand Up @@ -295,14 +303,21 @@ def main() -> None:
# A repaired wheel was created successfully
if repaired.name != wheel.name:
wheel.unlink() # Remove original
repaired.rename(wheel.parent / repaired.name)
final_path = wheel.parent / repaired.name
repaired.rename(final_path)
print_color(f" -> Replaced with repaired wheel: {repaired.name}", Fore.GREEN)
else:
# Name unchanged
wheel.unlink()
repaired.rename(wheel)
final_path = wheel
print_color(f" -> Repaired successfully: {repaired.name}", Fore.GREEN)
repaired_count += 1
if not zipfile.is_zipfile(final_path):
print_color(" -> Deleting repaired output (not a valid zip archive)", Fore.RED)
final_path.unlink()
deleted_count += 1
else:
repaired_count += 1
elif result.returncode == 0:
# No repaired wheel created, but command succeeded (already compatible)
print_color(" -> Keeping original wheel (already compatible)", Fore.GREEN)
Expand Down
Loading