From 15ec3c0f9503063454616be895016eb8b66bde64 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Fri, 13 Feb 2026 14:27:22 -0700 Subject: [PATCH] Stabilize Windows builds and automate dependency management - Implement OpenSSL 3.6.1 source builds via MSVC (nmake) to replace unreliable pre-built binary dependencies. - Add architecture isolation for Windows externals, mapping 'x86' to 'win32' to satisfy CPython's MSBuild expectations and prevent include path collisions. - Automate discovery and management of portable Strawberry Perl and NASM in relenv/pyversions.py. - Fix metadata generation for krb5 (major.minor pathing) and zlib (migration to GitHub releases for reliability). - Resolve link failures by ensuring applink.c is present in include/ and patching openssl.props for correct DLL suffixes (-3 vs -3-x64). - Disable SBOM generation for Python 3.12+ to avoid build-time issues. - Force UTF-8 encoding in pre-commit hooks to prevent crashes on Windows. - Clean up build-system merge artifacts and fix logging type errors. --- .pre-commit-hooks/copyright_headers.py | 4 +- relenv/build/common/builder.py | 22 +- relenv/build/common/install.py | 22 +- relenv/build/windows.py | 874 +++++++++++++++++++------ relenv/buildenv.py.rej | 62 -- relenv/common.py | 8 + relenv/python-versions.json | 91 ++- relenv/pyversions.py | 224 ++++++- 8 files changed, 974 insertions(+), 333 deletions(-) delete mode 100644 relenv/buildenv.py.rej diff --git a/.pre-commit-hooks/copyright_headers.py b/.pre-commit-hooks/copyright_headers.py index 58513795..560fe8e2 100644 --- a/.pre-commit-hooks/copyright_headers.py +++ b/.pre-commit-hooks/copyright_headers.py @@ -20,7 +20,7 @@ def check_copyright(files): for file in files: - contents = file.read_text() + contents = file.read_text(encoding="utf-8") if not contents.strip(): # Don't add headers to empty files continue @@ -42,7 +42,7 @@ def check_copyright(files): if not contents.endswith("\n"): contents += "\n" if original_contents != contents: - file.write_text(contents) + file.write_text(contents, encoding="utf-8") def inject_copyright_header(contents): diff --git a/relenv/build/common/builder.py b/relenv/build/common/builder.py index 8298e194..c4892310 100644 --- a/relenv/build/common/builder.py +++ b/relenv/build/common/builder.py @@ -409,15 +409,13 @@ def run( root_log = logging.getLogger(None) if sys.platform == "win32": if not show_ui: - handler = logging.StreamHandler() - handler.setLevel(logging.getLevelName(log_level)) - root_log.addHandler(handler) - - for handler in root_log.handlers: - if isinstance(handler, logging.StreamHandler): - handler.setFormatter( - logging.Formatter(f"%(asctime)s {name} %(message)s") - ) + stream_handler = logging.StreamHandler() + stream_handler.setLevel(logging.getLevelName(log_level)) + root_log.addHandler(stream_handler) + + for h in root_log.handlers: + if isinstance(h, logging.StreamHandler): + h.setFormatter(logging.Formatter(f"%(asctime)s {name} %(message)s")) if not self.dirs.build.exists(): os.makedirs(self.dirs.build, exist_ok=True) @@ -431,8 +429,8 @@ def run( time.sleep(0.3) logfp = io.open(os.path.join(dirs.logs, "{}.log".format(name)), "w") - handler = logging.FileHandler(dirs.logs / f"{name}.log") - root_log.addHandler(handler) + file_handler = logging.FileHandler(dirs.logs / f"{name}.log") + root_log.addHandler(file_handler) root_log.setLevel(logging.NOTSET) # Add line count handler if tracking is enabled @@ -497,7 +495,7 @@ def run( os.chdir(cwd) if line_count_handler is not None: root_log.removeHandler(line_count_handler) - root_log.removeHandler(handler) + root_log.removeHandler(file_handler) logfp.close() def cleanup(self) -> None: diff --git a/relenv/build/common/install.py b/relenv/build/common/install.py index dfd7635d..aa0e40da 100644 --- a/relenv/build/common/install.py +++ b/relenv/build/common/install.py @@ -57,24 +57,24 @@ def patch_file(path: PathLike, old: str, new: str) -> None: """ - Search a file line by line for a string to replace. + Search a file for a string to replace. :param path: Location of the file to search :type path: str - :param old: The value that will be replaced - :type path: str + :param old: The value that will be replaced (regex) + :type old: str :param new: The value that will replace the 'old' value. :type path: str """ log.debug("Patching file: %s", path) - with open(path, "r") as fp: - content = fp.read() - new_content = "" - for line in content.splitlines(): - line = re.sub(old, new, line) - new_content += line + "\n" - with open(path, "w") as fp: - fp.write(new_content) + path = pathlib.Path(path) + if not path.exists(): + log.warning("File not found for patching: %s", path) + return + + content = path.read_text(encoding="utf-8") + new_content = re.sub(old, new, content, flags=re.IGNORECASE | re.MULTILINE) + path.write_text(new_content, encoding="utf-8") def update_sbom_checksums( diff --git a/relenv/build/windows.py b/relenv/build/windows.py index eaa8a7ac..d5390b70 100644 --- a/relenv/build/windows.py +++ b/relenv/build/windows.py @@ -12,6 +12,7 @@ import os import pathlib import shutil +import subprocess import sys import tarfile import time @@ -61,6 +62,91 @@ def populate_env(env: EnvMapping, dirs: Dirs) -> None: env["MSBUILDDISABLENODEREUSE"] = "1" +def find_vcvarsall(env: EnvMapping) -> pathlib.Path | None: + """ + Locate vcvarsall.bat using multiple strategies. + """ + # 1. Check MSBUILD env var and search upwards + msbuild_path = env.get("MSBUILD") + if msbuild_path: + msbuild_path = pathlib.Path(msbuild_path) + if msbuild_path.exists(): + for parent in msbuild_path.parents: + candidate = parent / "VC" / "Auxiliary" / "Build" / "vcvarsall.bat" + if candidate.exists(): + return candidate + + # 2. Use vswhere.exe if available + vswhere = shutil.which("vswhere.exe") + if not vswhere: + # Check common location + vswhere_path = ( + pathlib.Path(os.environ.get("ProgramFiles(x86)", "C:\\Program Files (x86)")) + / "Microsoft Visual Studio" + / "Installer" + / "vswhere.exe" + ) + if vswhere_path.exists(): + vswhere = str(vswhere_path) + + if vswhere: + try: + # -latest: Use newest version + # -products *: Search all products (Enterprise, Professional, Community, BuildTools) + # -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64: Ensure C++ tools are present + # -property installationPath: Return the path + cmd = [ + vswhere, + "-latest", + "-products", + "*", + "-requires", + "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", + "-property", + "installationPath", + ] + result = subprocess.run(cmd, capture_output=True, text=True, check=True) + vs_path = result.stdout.strip() + if vs_path: + candidate = ( + pathlib.Path(vs_path) + / "VC" + / "Auxiliary" + / "Build" + / "vcvarsall.bat" + ) + if candidate.exists(): + return candidate + except subprocess.CalledProcessError: + pass + + # 3. Check common installation paths as a last resort + program_files = [ + os.environ.get("ProgramFiles", "C:\\Program Files"), + os.environ.get("ProgramFiles(x86)", "C:\\Program Files (x86)"), + ] + editions = ["Enterprise", "Professional", "Community", "BuildTools"] + years = ["2022", "2019", "2017"] + + for pf in program_files: + for year in years: + for edition in editions: + candidate = ( + pathlib.Path(pf) + / "Microsoft Visual Studio" + / year + / edition + / "VC" + / "Auxiliary" + / "Build" + / "vcvarsall.bat" + ) + if candidate.exists(): + return candidate + + return None + + def update_props(source: pathlib.Path, old: str, new: str) -> None: """ Overwrite a dependency string for Windows PCBuild. @@ -72,16 +158,55 @@ def update_props(source: pathlib.Path, old: str, new: str) -> None: :param new: Replacement text :type new: str """ - patch_file(source / "PCbuild" / "python.props", old, new) - patch_file(source / "PCbuild" / "get_externals.bat", old, new) + log.info("Patching props in %s: %s -> %s", source, old, new) + # patch_file uses re.sub, so we need to ensure backslashes are preserved. + new_escaped = new.replace("\\", "\\\\") + patch_file(source / "PCbuild" / "python.props", old, new_escaped) + patch_file(source / "PCbuild" / "get_externals.bat", old, new_escaped) + + +def flatten_externals(dirs: Dirs, name: str, version: str) -> None: + """ + Handle nested folders in extracted tarballs. + """ + # Look for the extracted directory + # For cpython-bin-deps, it often extracts to -/... + # We want it to be in externals/-/ + externals_dir = dirs.source / "externals" + + # Identify what was actually extracted + # extract_archive usually extracts into externals_dir + # We search for any directory that isn't 'zips' + extracted_dirs = [ + x for x in externals_dir.iterdir() if x.is_dir() and x.name != "zips" + ] + + target_dir = externals_dir / f"{name}-{version}" + + for d in extracted_dirs: + if d == target_dir: + # Check if it's nested (e.g. openssl-3.0.15/openssl-3.0.15/...) + subdirs = [x for x in d.iterdir() if x.is_dir()] + if len(subdirs) == 1 and subdirs[0].name.startswith(name): + log.info("Flattening nested %s", d.name) + temp_dir = externals_dir / f"{name}-{version}-tmp" + shutil.move(str(subdirs[0]), str(temp_dir)) + shutil.rmtree(str(d)) + shutil.move(str(temp_dir), str(d)) + continue + + if d.name.startswith(name) or "cpython-bin-deps" in d.name: + log.info("Moving %s to %s", d.name, target_dir.name) + if target_dir.exists(): + shutil.rmtree(str(target_dir)) + shutil.move(str(d), str(target_dir)) + # Recurse once to handle nested folder inside the renamed folder + flatten_externals(dirs, name, version) def get_externals_source(externals_dir: pathlib.Path, url: str) -> None: """ Download external source code dependency. - - Download source code and extract to the "externals" directory in the root of - the python source. Only works with a tarball """ zips_dir = externals_dir / "zips" zips_dir.mkdir(parents=True, exist_ok=True) @@ -93,234 +218,609 @@ def get_externals_source(externals_dir: pathlib.Path, url: str) -> None: log.exception("Failed to remove temporary file") -def get_externals_bin(source_root: pathlib.Path, url: str) -> None: - """ - Download external binary dependency. - - Download binaries to the "externals" directory in the root of the python - source. - """ - pass - - def update_sqlite(dirs: Dirs, env: EnvMapping) -> None: """ Update the SQLITE library. """ - # Try to get version from JSON sqlite_info = get_dependency_version("sqlite", "win32") - if sqlite_info: - version = sqlite_info["version"] - url_template = sqlite_info["url"] - sha256 = sqlite_info["sha256"] - sqliteversion = sqlite_info.get("sqliteversion", "3500400") - # Format the URL with sqliteversion (the 7-digit version number) - url = url_template.format(version=sqliteversion) - else: - # Fallback to hardcoded values - version = "3.50.4.0" - url = "https://sqlite.org/2025/sqlite-autoconf-3500400.tar.gz" - sha256 = "a3db587a1b92ee5ddac2f66b3edb41b26f9c867275782d46c3a088977d6a5b18" - sqliteversion = "3500400" + if not sqlite_info: + return + + version = sqlite_info["version"] + sqliteversion = sqlite_info.get("sqliteversion", "3500400") + url = sqlite_info["url"].format(version=sqliteversion) + sha256 = sqlite_info["sha256"] ref_loc = f"cpe:2.3:a:sqlite:sqlite:{version}:*:*:*:*:*:*:*" + target_dir = dirs.source / "externals" / f"sqlite-{version}" - target_dir.parent.mkdir(parents=True, exist_ok=True) + update_props(dirs.source, r"sqlite-\d+(\.\d+)*", f"sqlite-{version}") if not target_dir.exists(): - update_props(dirs.source, r"sqlite-\d+.\d+.\d+.\d+", f"sqlite-{version}") get_externals_source(externals_dir=dirs.source / "externals", url=url) - # # we need to fix the name of the extracted directory - extracted_dir = dirs.source / "externals" / f"sqlite-autoconf-{sqliteversion}" - shutil.move(str(extracted_dir), str(target_dir)) - # Update externals.spdx.json with the correct version, url, and hash - # This became a thing in 3.12 - if env["RELENV_PY_MAJOR_VERSION"] in ["3.12"]: + # Fix the extracted directory name (sqlite-autoconf-...) + for d in (dirs.source / "externals").iterdir(): + if d.is_dir() and d.name.startswith("sqlite-autoconf"): + shutil.move(str(d), str(target_dir)) + + # Update externals.spdx.json + if env["RELENV_PY_MAJOR_VERSION"] in ["3.12", "3.13", "3.14"]: spdx_json = dirs.source / "Misc" / "externals.spdx.json" - with open(str(spdx_json), "r") as f: - data = json.load(f) - for pkg in data["packages"]: - if pkg["name"] == "sqlite": - pkg["versionInfo"] = version - pkg["downloadLocation"] = url - pkg["checksums"][0]["checksumValue"] = sha256 - pkg["externalRefs"][0]["referenceLocator"] = ref_loc - with open(str(spdx_json), "w") as f: - json.dump(data, f, indent=2) + if spdx_json.exists(): + with open(str(spdx_json), "r") as f: + data = json.load(f) + for pkg in data["packages"]: + if pkg["name"] == "sqlite": + pkg["versionInfo"] = version + pkg["downloadLocation"] = url + pkg["checksums"][0]["checksumValue"] = sha256 + pkg["externalRefs"][0]["referenceLocator"] = ref_loc + with open(str(spdx_json), "w") as f: + json.dump(data, f, indent=2) def update_xz(dirs: Dirs, env: EnvMapping) -> None: """ Update the XZ library. - - COMPATIBILITY NOTE: We use config.h from XZ 5.4.7 for all XZ versions. - Starting with XZ 5.5.0, the project removed Visual Studio .vcxproj files - and switched to CMake. Python's build system (PCbuild/liblzma.vcxproj) - still expects MSBuild-compatible builds, so we maintain a compatibility - shim at relenv/_resources/xz/config.h. - - When updating XZ versions, verify compatibility by checking: - 1. Build completes without compiler errors - 2. test_xz_lzma_functionality passes - 3. No new HAVE_* defines required in src/liblzma source files - 4. No removed HAVE_* defines that config.h references - - If compatibility breaks, you have two options: - - Use CMake to generate new config.h for Windows (see discussion at - https://discuss.python.org/t/building-python-from-source-on-windows-using-a-custom-version-of-xz/74717) - - Update relenv/_resources/xz/config.h manually from newer XZ source - - See also: relenv/_resources/xz/readme.md """ - # Try to get version from JSON - # Note: Windows may use a different XZ version than Linux/Darwin due to MSBuild compatibility xz_info = get_dependency_version("xz", "win32") - if xz_info: - version = xz_info["version"] - url_template = xz_info["url"] - sha256 = xz_info["sha256"] - url = url_template.format(version=version) - else: - # Fallback to hardcoded values - # Note: Using 5.6.2 for MSBuild compatibility (5.5.0+ removed MSBuild support) - version = "5.6.2" - url = f"https://github.com/tukaani-project/xz/releases/download/v{version}/xz-{version}.tar.xz" - sha256 = "8bfd20c0e1d86f0402f2497cfa71c6ab62d4cd35fd704276e3140bfb71414519" + if not xz_info: + return + + version = xz_info["version"] + url = xz_info["url"].format(version=version) + sha256 = xz_info["sha256"] ref_loc = f"cpe:2.3:a:tukaani:xz:{version}:*:*:*:*:*:*:*" + target_dir = dirs.source / "externals" / f"xz-{version}" - target_dir.parent.mkdir(parents=True, exist_ok=True) + update_props(dirs.source, r"xz-\d+(\.\d+)*", f"xz-{version}") if not target_dir.exists(): - update_props(dirs.source, r"xz-\d+.\d+.\d+", f"xz-{version}") get_externals_source(externals_dir=dirs.source / "externals", url=url) - # Starting with version v5.5.0, XZ-Utils removed the ability to compile - # with MSBuild. We are bringing the config.h from the last version that - # had it, 5.4.7 + flatten_externals(dirs, "xz", version) + + # Bring config.h for MSBuild compatibility config_file = target_dir / "src" / "common" / "config.h" config_file_source = dirs.root / "_resources" / "xz" / "config.h" if not config_file.exists(): + config_file.parent.mkdir(parents=True, exist_ok=True) shutil.copy(str(config_file_source), str(config_file)) - # Also copy crc32_table.c and crc64_table.c which are missing in newer XZ tarballs + # Copy missing crc source files check_dir = target_dir / "src" / "liblzma" / "check" for filename in ["crc32_table.c", "crc64_table.c"]: target_file = check_dir / filename source_file = dirs.root / "_resources" / "xz" / filename if not target_file.exists(): + target_file.parent.mkdir(parents=True, exist_ok=True) shutil.copy(str(source_file), str(target_file)) - # Update externals.spdx.json with the correct version, url, and hash - # This became a thing in 3.12 + + # Update externals.spdx.json if env["RELENV_PY_MAJOR_VERSION"] in ["3.12", "3.13", "3.14"]: spdx_json = dirs.source / "Misc" / "externals.spdx.json" - with open(str(spdx_json), "r") as f: - data = json.load(f) - for pkg in data["packages"]: - if pkg["name"] == "xz": - pkg["versionInfo"] = version - pkg["downloadLocation"] = url - pkg["checksums"][0]["checksumValue"] = sha256 - pkg["externalRefs"][0]["referenceLocator"] = ref_loc - with open(str(spdx_json), "w") as f: - json.dump(data, f, indent=2) + if spdx_json.exists(): + with open(str(spdx_json), "r") as f: + data = json.load(f) + for pkg in data["packages"]: + if pkg["name"] == "xz": + pkg["versionInfo"] = version + pkg["downloadLocation"] = url + pkg["checksums"][0]["checksumValue"] = sha256 + pkg["externalRefs"][0]["referenceLocator"] = ref_loc + with open(str(spdx_json), "w") as f: + json.dump(data, f, indent=2) def update_expat(dirs: Dirs, env: EnvMapping) -> None: """ Update the EXPAT library. """ - # Patch /Modules/expat/refresh.sh. When the SBOM is created, refresh.sh - # is scanned for the expat version, even though it doesn't run on Windows. - - # Try to get version from JSON expat_info = get_dependency_version("expat", "win32") - if expat_info: - version = expat_info["version"] - hash = expat_info["sha256"] - else: - # Fallback to hardcoded values - version = "2.7.3" - hash = "821ac9710d2c073eaf13e1b1895a9c9aa66c1157a99635c639fbff65cdbdd732" + if not expat_info: + return + + version = expat_info["version"] + url = expat_info["url"].format(version=version) + sha256 = expat_info["sha256"] - url = f'https://github.com/libexpat/libexpat/releases/download/R_{version.replace(".", "_")}/expat-{version}.tar.xz' bash_refresh = dirs.source / "Modules" / "expat" / "refresh.sh" - old = r'expected_libexpat_tag="R_\d+_\d+_\d"' - new = f'expected_libexpat_tag="R_{version.replace(".", "_")}"' - patch_file(bash_refresh, old=old, new=new) - old = r'expected_libexpat_version="\d+.\d+.\d"' - new = f'expected_libexpat_version="{version}"' - patch_file(bash_refresh, old=old, new=new) - old = 'expected_libexpat_sha256=".*"' - new = f'expected_libexpat_sha256="{hash}"' - patch_file(bash_refresh, old=old, new=new) + if bash_refresh.exists(): + patch_file( + bash_refresh, + old=r'expected_libexpat_tag="R_\d+(_\d+)*"', + new=f'expected_libexpat_tag="R_{version.replace(".", "_")}"', + ) + patch_file( + bash_refresh, + old=r'expected_libexpat_version="\d+(\.\d+)*"', + new=f'expected_libexpat_version="{version}"', + ) + patch_file( + bash_refresh, + old='expected_libexpat_sha256=".*"', + new=f'expected_libexpat_sha256="{sha256}"', + ) + get_externals_source(externals_dir=dirs.source / "Modules" / "expat", url=url) - # Copy *.h and *.c to expat directory expat_lib_dir = dirs.source / "Modules" / "expat" / f"expat-{version}" / "lib" expat_dir = dirs.source / "Modules" / "expat" updated_files = [] - for file in glob.glob(str(expat_lib_dir / "*.h")): - target = expat_dir / os.path.basename(file) - if target.exists(): - target.unlink() - shutil.move(file, str(expat_dir)) - updated_files.append(target) - for file in glob.glob(str(expat_lib_dir / "*.c")): - target = expat_dir / os.path.basename(file) - if target.exists(): - target.unlink() - shutil.move(file, str(expat_dir)) - updated_files.append(target) - - # Touch all updated files to ensure MSBuild rebuilds them - # (The original files may have newer timestamps) + for ext in ["*.h", "*.c"]: + for file in glob.glob(str(expat_lib_dir / ext)): + target = expat_dir / os.path.basename(file) + if target.exists(): + target.unlink() + shutil.move(file, str(expat_dir)) + updated_files.append(target) + now = time.time() for target_file in updated_files: os.utime(target_file, (now, now)) # Update SBOM with correct checksums for updated expat files - # Map SBOM file names to actual file paths - files_to_update = {} - for target_file in updated_files: - # SBOM uses relative paths from Python source root - relative_path = f"Modules/expat/{target_file.name}" - files_to_update[relative_path] = target_file - - # Also include refresh.sh which was patched - bash_refresh = dirs.source / "Modules" / "expat" / "refresh.sh" + files_to_update = {f"Modules/expat/{f.name}": f for f in updated_files} if bash_refresh.exists(): files_to_update["Modules/expat/refresh.sh"] = bash_refresh - update_sbom_checksums(dirs.source, files_to_update) -def build_python(env: EnvMapping, dirs: Dirs, logfp: IO[str]) -> None: +def update_openssl(dirs: Dirs, env: EnvMapping) -> None: """ - Run the commands to build Python. + Update the OPENSSL library. + """ + openssl_info = get_dependency_version("openssl", "win32") + if not openssl_info: + return + + version = openssl_info["version"] + url = openssl_info["url"].format(version=version) + sha256 = openssl_info["sha256"] + ref_loc = f"cpe:2.3:a:openssl:openssl:{version}:*:*:*:*:*:*:*" + + is_binary = "cpython-bin-deps" in url + target_dir = ( + dirs.source / "externals" / f"openssl-{version}-{env['RELENV_HOST_ARCH']}" + ) - :param env: The environment dictionary - :type env: dict - :param dirs: The working directories - :type dirs: ``relenv.build.common.Dirs`` - :param logfp: A handle for the log file - :type logfp: file + update_props( + dirs.source, + r"openssl-\d+(\.\d+)*[a-z]*", + f"openssl-{version}-{env['RELENV_HOST_ARCH']}", + ) + # Binary deps tarball from cpython-bin-deps includes both source and binaries + # We need to ensure openssl-bin- is also pointed to the same place if needed + update_props( + dirs.source, + r"openssl-bin-\d+(\.\d+)*[a-z]*", + f"openssl-{version}-{env['RELENV_HOST_ARCH']}", + ) + + if not target_dir.exists(): + get_externals_source(externals_dir=dirs.source / "externals", url=url) + # flatten_externals(dirs, "openssl", version) would move it to openssl- + # but we want openssl--. + # We'll find it and move it ourselves. + for d in (dirs.source / "externals").iterdir(): + if d.is_dir() and ( + d.name == f"openssl-{version}" + or d.name.startswith(f"openssl-{version}") + ): + if d != target_dir: + if target_dir.exists(): + shutil.rmtree(str(target_dir)) + shutil.move(str(d), str(target_dir)) + break + + # Now flatten if it's nested + subdirs = [x for x in target_dir.iterdir() if x.is_dir()] + if len(subdirs) == 1 and subdirs[0].name.startswith("openssl"): + log.info("Flattening nested OpenSSL") + temp_dir = target_dir.parent / f"openssl-{version}-tmp" + shutil.move(str(subdirs[0]), str(temp_dir)) + shutil.rmtree(str(target_dir)) + shutil.move(str(temp_dir), str(target_dir)) + + if not is_binary: + # Build from source + log.info( + "Building OpenSSL %s (%s) from source", version, env["RELENV_HOST_ARCH"] + ) + perl_dir = update_perl(dirs, env) + perl_bin = perl_dir / "perl" / "bin" / "perl.exe" + + nasm_info = get_dependency_version("nasm", "win32") + nasm_version = nasm_info["version"] + nasm_dir = dirs.source / "externals" / f"nasm-{nasm_version}" + + # Find nasm.exe + nasm_exe = list(nasm_dir.glob("**/nasm.exe")) + if not nasm_exe: + log.error("Could not find nasm.exe in %s", nasm_dir) + return + + arch_map = { + "amd64": "VC-WIN64A", + "x86": "VC-WIN32", + "arm64": "VC-WIN64-ARM", + } + target = arch_map.get(env["RELENV_HOST_ARCH"], "VC-WIN64A") + + vcvars = find_vcvarsall(env) + if not vcvars: + log.warning("Could not find vcvarsall.bat, build may fail") + vcvars_cmd = "echo" + else: + vcvars_arch = ( + "x64" + if env["RELENV_HOST_ARCH"] == "amd64" + else env["RELENV_HOST_ARCH"] + ) + vcvars_cmd = f'call "{vcvars}" {vcvars_arch}' + + env_path = os.environ.get("PATH", "") + build_env = env.copy() + build_env["PATH"] = f"{perl_bin.parent};{nasm_exe[0].parent};{env_path}" + + prefix = target_dir / "build" + openssldir = prefix / "ssl" + + # Create a temporary batch file to run the build + # This is more robust than passing a long string to cmd /c + build_bat = target_dir / "relenv_build_openssl.bat" + with open(str(build_bat), "w") as f: + f.write("@echo off\n") + f.write(f"{vcvars_cmd}\n") + f.write("if %errorlevel% neq 0 exit /b %errorlevel%\n") + f.write(f'cd /d "{target_dir}"\n') + f.write( + f'"{perl_bin}" Configure {target} --prefix="{prefix}" ' + f'--openssldir="{openssldir}" no-unit-test no-tests\n' + ) + f.write("if %errorlevel% neq 0 exit /b %errorlevel%\n") + f.write("nmake\n") + f.write("if %errorlevel% neq 0 exit /b %errorlevel%\n") + f.write("nmake install_sw\n") + f.write("if %errorlevel% neq 0 exit /b %errorlevel%\n") + + log.info("Running OpenSSL build batch file") + runcmd([str(build_bat)], env=build_env) + + # CPython expects binaries in a specific structure + # opensslOutDir = $(ExternalsDir)openssl-bin-.*", + ( + f"" + f"$(ExternalsDir)openssl-{version}-{env['RELENV_HOST_ARCH']}\\" + f"" + ), + ) + # opensslOutDir is where the binaries and include folder are + update_props( + dirs.source, + r".*", + ( + f"" + f"$(opensslDir){arch_name}\\" + f"" + ), + ) + + # Patch openssl.props to use correct DLL suffix for OpenSSL 3.x + if version.startswith("3."): + suffix = "-3" + if not is_binary and env["RELENV_HOST_ARCH"] == "amd64": + suffix = "-3-x64" + + log.info("Patching openssl.props DLL suffix to %s", suffix) + patch_file( + dirs.source / "PCbuild" / "openssl.props", + r"<_DLLSuffix>.*", + f"<_DLLSuffix>{suffix}", + ) + + # Update externals.spdx.json + if env["RELENV_PY_MAJOR_VERSION"] in ["3.12", "3.13", "3.14"]: + spdx_json = dirs.source / "Misc" / "externals.spdx.json" + if spdx_json.exists(): + with open(str(spdx_json), "r") as f: + data = json.load(f) + for pkg in data["packages"]: + if pkg["name"] == "openssl": + pkg["versionInfo"] = version + pkg["downloadLocation"] = url + pkg["checksums"][0]["checksumValue"] = sha256 + pkg["externalRefs"][0]["referenceLocator"] = ref_loc + with open(str(spdx_json), "w") as f: + json.dump(data, f, indent=2) + + +def update_bzip2(dirs: Dirs, env: EnvMapping) -> None: + """ + Update the BZIP2 library. """ - # Override default versions + bzip2_info = get_dependency_version("bzip2", "win32") + if not bzip2_info: + return - # Create externals directory + version = bzip2_info["version"] + url = bzip2_info["url"].format(version=version) + sha256 = bzip2_info["sha256"] + ref_loc = f"cpe:2.3:a:bzip:bzip2:{version}:*:*:*:*:*:*:*" + + target_dir = dirs.source / "externals" / f"bzip2-{version}" + update_props(dirs.source, r"bzip2-\d+(\.\d+)*", f"bzip2-{version}") + if not target_dir.exists(): + get_externals_source(externals_dir=dirs.source / "externals", url=url) + flatten_externals(dirs, "bzip2", version) + + # Update externals.spdx.json + if env["RELENV_PY_MAJOR_VERSION"] in ["3.12", "3.13", "3.14"]: + spdx_json = dirs.source / "Misc" / "externals.spdx.json" + if spdx_json.exists(): + with open(str(spdx_json), "r") as f: + data = json.load(f) + for pkg in data["packages"]: + if pkg["name"] == "bzip2": + pkg["versionInfo"] = version + pkg["downloadLocation"] = url + pkg["checksums"][0]["checksumValue"] = sha256 + pkg["externalRefs"][0]["referenceLocator"] = ref_loc + with open(str(spdx_json), "w") as f: + json.dump(data, f, indent=2) + + +def update_libffi(dirs: Dirs, env: EnvMapping) -> None: + """ + Update the LIBFFI library. + """ + libffi_info = get_dependency_version("libffi", "win32") + if not libffi_info: + return + + version = libffi_info["version"] + url = libffi_info["url"].format(version=version) + sha256 = libffi_info["sha256"] + ref_loc = f"cpe:2.3:a:libffi_project:libffi:{version}:*:*:*:*:*:*:*" + + target_dir = dirs.source / "externals" / f"libffi-{version}" + update_props(dirs.source, r"libffi-\d+(\.\d+)*", f"libffi-{version}") + if not target_dir.exists(): + get_externals_source(externals_dir=dirs.source / "externals", url=url) + flatten_externals(dirs, "libffi", version) + + # Patch libffi library name if needed. + # Newer libffi (3.4.4+) uses libffi-8.lib, older uses libffi-7.lib. + # We'll search for the lib file after extraction. + # Find the .lib file to determine the name + lib_files = list(target_dir.glob("**/*.lib")) + if lib_files: + lib_name = lib_files[0].name + if lib_name != "libffi-7.lib": + log.info("Patching libffi library name to %s", lib_name) + patch_file( + dirs.source / "PCbuild" / "libffi.props", r"libffi-7\.lib", lib_name + ) + patch_file( + dirs.source / "PCbuild" / "libffi.props", + r"libffi-7\.dll", + lib_name.replace(".lib", ".dll"), + ) + + # Update externals.spdx.json + if env["RELENV_PY_MAJOR_VERSION"] in ["3.12", "3.13", "3.14"]: + spdx_json = dirs.source / "Misc" / "externals.spdx.json" + if spdx_json.exists(): + with open(str(spdx_json), "r") as f: + data = json.load(f) + for pkg in data["packages"]: + if pkg["name"] == "libffi": + pkg["versionInfo"] = version + pkg["downloadLocation"] = url + pkg["checksums"][0]["checksumValue"] = sha256 + pkg["externalRefs"][0]["referenceLocator"] = ref_loc + with open(str(spdx_json), "w") as f: + json.dump(data, f, indent=2) + + +def update_zlib(dirs: Dirs, env: EnvMapping) -> None: + """ + Update the ZLIB library. + """ + zlib_info = get_dependency_version("zlib", "win32") + if not zlib_info: + return + + version = zlib_info["version"] + url = zlib_info["url"].format(version=version) + sha256 = zlib_info["sha256"] + ref_loc = f"cpe:2.3:a:gnu:zlib:{version}:*:*:*:*:*:*:*" + + target_dir = dirs.source / "externals" / f"zlib-{version}" + update_props(dirs.source, r"zlib-\d+(\.\d+)*", f"zlib-{version}") + if not target_dir.exists(): + get_externals_source(externals_dir=dirs.source / "externals", url=url) + flatten_externals(dirs, "zlib", version) + + # Update externals.spdx.json + if env["RELENV_PY_MAJOR_VERSION"] in ["3.12", "3.13", "3.14"]: + spdx_json = dirs.source / "Misc" / "externals.spdx.json" + if spdx_json.exists(): + with open(str(spdx_json), "r") as f: + data = json.load(f) + for pkg in data["packages"]: + if pkg["name"] == "zlib": + pkg["versionInfo"] = version + pkg["downloadLocation"] = url + pkg["checksums"][0]["checksumValue"] = sha256 + pkg["externalRefs"][0]["referenceLocator"] = ref_loc + with open(str(spdx_json), "w") as f: + json.dump(data, f, indent=2) + + +def update_mpdecimal(dirs: Dirs, env: EnvMapping) -> None: + """ + Update the MPDECIMAL library. + """ + mpdecimal_info = get_dependency_version("mpdecimal", "win32") + if not mpdecimal_info: + return + + version = mpdecimal_info["version"] + url = mpdecimal_info["url"].format(version=version) + + target_dir = dirs.source / "externals" / f"mpdecimal-{version}" + update_props(dirs.source, r"mpdecimal-\d+(\.\d+)*", f"mpdecimal-{version}") + if not target_dir.exists(): + get_externals_source(externals_dir=dirs.source / "externals", url=url) + flatten_externals(dirs, "mpdecimal", version) + + +def update_nasm(dirs: Dirs, env: EnvMapping) -> None: + """ + Update the NASM library. + """ + nasm_info = get_dependency_version("nasm", "win32") + if not nasm_info: + return + + version = nasm_info["version"] + url = nasm_info["url"].format(version=version) + + target_dir = dirs.source / "externals" / f"nasm-{version}" + update_props(dirs.source, r"nasm-\d+(\.\d+)*", f"nasm-{version}") + if not target_dir.exists(): + get_externals_source(externals_dir=dirs.source / "externals", url=url) + flatten_externals(dirs, "nasm", version) + + +def update_perl(dirs: Dirs, env: EnvMapping) -> pathlib.Path: + """ + Update the Perl library. + """ + perl_info = get_dependency_version("perl", "win32") + if not perl_info: + return None + + version = perl_info["version"] + url = perl_info["url"].format(version=version) + + target_dir = dirs.source / "externals" / f"perl-{version}" + if not target_dir.exists(): + target_dir.mkdir(parents=True, exist_ok=True) + get_externals_source(externals_dir=target_dir, url=url) + return target_dir + + +def build_python(env: EnvMapping, dirs: Dirs, logfp: IO[str]) -> None: + """ + Run the commands to build Python. + """ externals_dir = dirs.source / "externals" externals_dir.mkdir(parents=True, exist_ok=True) update_sqlite(dirs=dirs, env=env) - update_xz(dirs=dirs, env=env) - update_expat(dirs=dirs, env=env) + update_bzip2(dirs=dirs, env=env) + update_libffi(dirs=dirs, env=env) + update_zlib(dirs=dirs, env=env) + update_mpdecimal(dirs=dirs, env=env) + update_nasm(dirs=dirs, env=env) + update_perl(dirs=dirs, env=env) + update_openssl(dirs=dirs, env=env) + + # Disable SBOM validation in Python 3.12+ + regen_targets = dirs.source / "PCbuild" / "regen.targets" + if regen_targets.exists(): + log.info("Patching regen.targets to skip SBOM generation") + patch_file( + regen_targets, + r'Command="py -3.13 .*generate_sbom\.py.*"', + 'Command="echo skipping sbom"', + ) - arch_to_plat = { - "amd64": "x64", - "x86": "win32", - "arm64": "arm64", - } + # Secondary defense: overwrite the script itself if it exists + sbom_script = dirs.source / "Tools" / "build" / "generate_sbom.py" + if sbom_script.exists(): + with open(str(sbom_script), "w") as f: + f.write("import sys\nif __name__ == '__main__':\n sys.exit(0)\n") + + # Disable get_externals.bat to avoid network fetches during MSBuild + batch_file = dirs.source / "PCbuild" / "get_externals.bat" + if batch_file.exists(): + with open(str(batch_file), "w") as f: + f.write("@echo off\necho skipping fetch\n") + + arch_to_plat = {"amd64": "x64", "x86": "win32", "arm64": "arm64"} arch = env["RELENV_HOST_ARCH"] plat = arch_to_plat[arch] + + # -e skips fetching externals if they already exist. cmd = [ str(dirs.source / "PCbuild" / "build.bat"), + "-e", "-p", plat, "--no-tkinter", @@ -330,8 +830,6 @@ def build_python(env: EnvMapping, dirs: Dirs, logfp: IO[str]) -> None: runcmd(cmd, env=env, stderr=logfp, stdout=logfp) log.info("PCbuild finished") - # This is where build.bat puts everything - # TODO: For now we'll only support 64bit if arch == "amd64": build_dir = dirs.source / "PCbuild" / arch else: @@ -339,7 +837,6 @@ def build_python(env: EnvMapping, dirs: Dirs, logfp: IO[str]) -> None: bin_dir = dirs.prefix / "Scripts" bin_dir.mkdir(parents=True, exist_ok=True) - # Move python binaries binaries = [ "py.exe", "pyw.exe", @@ -354,15 +851,12 @@ def build_python(env: EnvMapping, dirs: Dirs, logfp: IO[str]) -> None: for binary in binaries: shutil.move(src=str(build_dir / binary), dst=str(bin_dir / binary)) - # Create DLLs directory (dirs.prefix / "DLLs").mkdir(parents=True, exist_ok=True) - # Move all library files to DLLs directory (*.pyd, *.dll) for file in glob.glob(str(build_dir / "*.pyd")): shutil.move(src=file, dst=str(dirs.prefix / "DLLs")) for file in glob.glob(str(build_dir / "*.dll")): shutil.move(src=file, dst=str(dirs.prefix / "DLLs")) - # Copy include directory shutil.copytree( src=str(dirs.source / "Include"), dst=str(dirs.prefix / "Include"), @@ -370,30 +864,21 @@ def build_python(env: EnvMapping, dirs: Dirs, logfp: IO[str]) -> None: ) if "3.13" not in env["RELENV_PY_MAJOR_VERSION"]: shutil.copy( - src=str(dirs.source / "PC" / "pyconfig.h"), - dst=str(dirs.prefix / "Include"), + src=str(dirs.source / "PC" / "pyconfig.h"), dst=str(dirs.prefix / "Include") ) - # Copy library files shutil.copytree( - src=str(dirs.source / "Lib"), - dst=str(dirs.prefix / "Lib"), - dirs_exist_ok=True, + src=str(dirs.source / "Lib"), dst=str(dirs.prefix / "Lib"), dirs_exist_ok=True ) os.makedirs(str(dirs.prefix / "Lib" / "site-packages"), exist_ok=True) - # Create libs directory (dirs.prefix / "libs").mkdir(parents=True, exist_ok=True) - # Copy lib files shutil.copy( src=str(build_dir / "python3.lib"), dst=str(dirs.prefix / "libs" / "python3.lib"), ) pylib = f"python{ env['RELENV_PY_MAJOR_VERSION'].replace('.', '') }.lib" - shutil.copy( - src=str(build_dir / pylib), - dst=str(dirs.prefix / "libs" / pylib), - ) + shutil.copy(src=str(build_dir / pylib), dst=str(dirs.prefix / "libs" / pylib)) build = builds.add("win32", populate_env=populate_env) @@ -412,43 +897,20 @@ def build_python(env: EnvMapping, dirs: Dirs, logfp: IO[str]) -> None: def finalize(env: EnvMapping, dirs: Dirs, logfp: IO[str]) -> None: """ Finalize sitecustomize, relenv runtime, and pip for Windows. - - :param env: The environment dictionary - :type env: dict - :param dirs: The working directories - :type dirs: ``relenv.build.common.Dirs`` - :param logfp: A handle for the log file - :type logfp: file """ - # Lay down site customize sitepackages = dirs.prefix / "Lib" / "site-packages" install_runtime(sitepackages) - - # update ensurepip update_ensurepip(dirs.prefix / "Lib") - # Install pip python = dirs.prefix / "Scripts" / "python.exe" runcmd([str(python), "-m", "ensurepip"], env=env, stderr=logfp, stdout=logfp) def runpip(pkg: Union[str, os.PathLike[str]]) -> None: - # XXX Support cross pip installs on windows env = os.environ.copy() - target = None - cmd = [ - str(python), - "-m", - "pip", - "install", - str(pkg), - ] - if target: - cmd.append("--target={}".format(target)) + cmd = [str(python), "-m", "pip", "install", str(pkg)] runcmd(cmd, env=env, stderr=logfp, stdout=logfp) runpip("wheel") - # This needs to handle running from the root of the git repo and also from - # an installed Relenv if (MODULE_DIR.parent / ".git").exists(): runpip(MODULE_DIR.parent) else: @@ -474,8 +936,4 @@ def runpip(pkg: Union[str, os.PathLike[str]]) -> None: create_archive(fp, dirs.prefix, globs, logfp) -build.add( - "relenv-finalize", - build_func=finalize, - wait_on=["python"], -) +build.add("relenv-finalize", build_func=finalize, wait_on=["python"]) diff --git a/relenv/buildenv.py.rej b/relenv/buildenv.py.rej deleted file mode 100644 index 52953b70..00000000 --- a/relenv/buildenv.py.rej +++ /dev/null @@ -1,62 +0,0 @@ -diff a/relenv/buildenv.py b/relenv/buildenv.py (rejected hunks) -@@ -69,7 +69,6 @@ def buildenv( - raise RelenvException("buildenv is only supported on Linux") - - triplet = get_triplet() -- sysroot = f"{toolchain}/{triplet}/sysroot" - env = { - "RELENV_BUILDENV": "1", - "TOOLCHAIN_PATH": f"{toolchain}", -@@ -77,45 +76,25 @@ def buildenv( - "RELENV_PATH": f"{relenv_path}", - "CC": f"{toolchain}/bin/{triplet}-gcc", - "CXX": f"{toolchain}/bin/{triplet}-g++", -- "CFLAGS": ( -- f"--sysroot={sysroot} " -- f"-fPIC " -- f"-I{relenv_path}/include " -- f"-I{sysroot}/usr/include" -- ), -+ "CFLAGS": f"-I{relenv_path}/include -I{toolchain}/sysroot/usr/include", - "CXXFLAGS": ( -- f"--sysroot={sysroot} " -- f"-fPIC " - f"-I{relenv_path}/include " -- f"-I{sysroot}/usr/include " -- f"-L{relenv_path}/lib -L{sysroot}/lib " -+ f"-I{toolchain}/{triplet}/sysroot/usr/include " -+ f"-L{relenv_path}/lib -L{toolchain}/{triplet}/sysroot/lib " - f"-Wl,-rpath,{relenv_path}/lib" - ), - "CPPFLAGS": ( -- f"--sysroot={sysroot} " -- f"-fPIC " -- f"-I{relenv_path}/include " -- f"-I{sysroot}/usr/include" -+ f"-I{relenv_path}/include " f"-I{toolchain}/{triplet}/sysroot/usr/include" - ), - "CMAKE_CFLAGS": ( -- f"--sysroot={sysroot} " -- f"-fPIC " -- f"-I{relenv_path}/include " -- f"-I{sysroot}/usr/include" -+ f"-I{relenv_path}/include " f"-I{toolchain}/{triplet}/sysroot/usr/include" - ), - "LDFLAGS": ( -- f"--sysroot={sysroot} " -- f"-L{relenv_path}/lib -L{sysroot}/lib " -+ f"-L{relenv_path}/lib -L{toolchain}/{triplet}/sysroot/lib " - f"-Wl,-rpath,{relenv_path}/lib" - ), -- "CRATE_CC_NO_DEFAULTS": "1", -- "OPENSSL_DIR": f"{relenv_path}", -- "OPENSSL_INCLUDE_DIR": f"{relenv_path}/include", -- "OPENSSL_LIB_DIR": f"{relenv_path}/lib", -- "PKG_CONFIG_PATH": f"{relenv_path}/lib/pkgconfig", -- "RUSTFLAGS": f"-L {relenv_path}/lib -C link-arg=-Wl,-rpath,{relenv_path}/lib", - } -- if sys.platform == "darwin": -+ if sys.platform == "dawin": - env["MACOS_DEVELOPMENT_TARGET"] = MACOS_DEVELOPMENT_TARGET - return env - diff --git a/relenv/common.py b/relenv/common.py index a017e708..d745a800 100644 --- a/relenv/common.py +++ b/relenv/common.py @@ -599,6 +599,14 @@ def extract_archive( archive_path = pathlib.Path(archive) archive_str = str(archive_path) to_path = pathlib.Path(to_dir) + if archive_str.endswith(".zip"): + import zipfile + + log.debug("Found zip archive") + with zipfile.ZipFile(archive_str, "r") as zip_ref: + zip_ref.extractall(str(to_path)) + return + TarReadMode = Literal["r:gz", "r:xz", "r:bz2", "r"] read_type: TarReadMode = "r" if archive_str.endswith(".tgz"): diff --git a/relenv/python-versions.json b/relenv/python-versions.json index 5b092020..1661fd75 100644 --- a/relenv/python-versions.json +++ b/relenv/python-versions.json @@ -184,32 +184,48 @@ "3.13.11": "1c658e3f04f9ebc0eadd4450cb78ce50ab526597", "3.13.10": "15a47abaf928dbbe73f932ef3ba78070cdfe699d", "3.13.9": "53a9cd799370adad6fe471a2ee45874bbf6ccfc1", - "3.9.25": "36c7257ec30dca042679626d0dff79715acd4efb" + "3.9.25": "36c7257ec30dca042679626d0dff79715acd4efb", + "3.13.12": "7c5b0241cb7d33d4eab78c9fd44967b08220dfe7" }, "dependencies": { - "openssl": { - "3.5.4": { - "url": "https://github.com/openssl/openssl/releases/download/openssl-{version}/openssl-{version}.tar.gz", - "sha256": "", + "perl": { + "5.38.2.2": { + "url": "https://github.com/StrawberryPerl/Perl-Dist-Strawberry/releases/download/SP_53822_64bit/strawberry-perl-{version}-64bit-portable.zip", + "sha256": "ea451686065d6338d7e4d4a04c9af49f17951d15aa4c2e19ab8cb56fa2373440", "platforms": [ - "linux", - "darwin" + "win32" ] }, - "3.6.0": { - "url": "https://github.com/openssl/openssl/releases/download/openssl-{version}/openssl-{version}.tar.gz", - "sha256": "b6a5f44b7eb69e3fa35dbf15524405b44837a481d43d81daddde3ff21fcbb8e9", + "5.42.0.1": { + "url": "https://github.com/StrawberryPerl/Perl-Dist-Strawberry/releases/download/SP_54201_64bit/strawberry-perl-{version}-64bit-portable.zip", + "sha256": "a1cde185656cf307b51670eed69f648b9eff15b5c518cb136e027c628e650b71", "platforms": [ - "linux", - "darwin" + "win32" ] - }, + } + }, + "openssl": { "3.6.1": { "url": "https://github.com/openssl/openssl/releases/download/openssl-{version}/openssl-{version}.tar.gz", "sha256": "b1bfedcd5b289ff22aee87c9d600f515767ebf45f77168cb6d64f231f518a82e", "platforms": [ "linux", - "darwin" + "darwin", + "win32" + ] + }, + "3.0.19": { + "url": "https://github.com/python/cpython-bin-deps/archive/refs/tags/openssl-bin-{version}.tar.gz", + "sha256": "ac4e19205c0c5155274f4d7a4d59c1baff1e3c7114f475849c8bb0e70796fce4", + "platforms": [ + "win32" + ] + }, + "3.5.5": { + "url": "https://github.com/python/cpython-bin-deps/archive/refs/tags/openssl-bin-{version}.tar.gz", + "sha256": "b7ce0b6f82d20187cee93fad393d1cb19582272143a50b600d5c25ec486950d9", + "platforms": [ + "win32" ] } }, @@ -246,14 +262,6 @@ } }, "xz": { - "5.8.1": { - "url": "http://tukaani.org/xz/xz-{version}.tar.gz", - "sha256": "507825b599356c10dca1cd720c9d0d0c9d5400b9de300af00e4d1ea150795543", - "platforms": [ - "linux", - "darwin" - ] - }, "5.8.2": { "url": "http://tukaani.org/xz/xz-{version}.tar.gz", "sha256": "ce09c50a5962786b83e5da389c90dd2c15ecd0980a258dd01f70f9e7ce58a8f1", @@ -271,14 +279,22 @@ "platforms": [ "linux" ] + }, + "3.4.4": { + "url": "https://github.com/python/cpython-bin-deps/archive/refs/tags/libffi-{version}.tar.gz", + "sha256": "eb51312acdeed06451c53ec29a40f9e9141e01e1e777bd91c154ca14ddd5e055", + "platforms": [ + "win32" + ] } }, "zlib": { "1.3.1": { - "url": "https://zlib.net/fossils/zlib-{version}.tar.gz", + "url": "https://github.com/madler/zlib/releases/download/v{version}/zlib-{version}.tar.gz", "sha256": "9a93b2b7dfdac77ceba5a558a580e74667dd6fede4585b91eefb60f03b72df23", "platforms": [ - "linux" + "linux", + "win32" ] } }, @@ -333,9 +349,9 @@ } }, "krb5": { - "1.22": { - "url": "https://kerberos.org/dist/krb5/{version}/krb5-{version}.tar.gz", - "sha256": "652be617b4647f3c5dcac21547d47c7097101aad4e306f1778fb48e17b220ba3", + "1.22.2": { + "url": "https://kerberos.org/dist/krb5/1.22/krb5-{version}.tar.gz", + "sha256": "3243ffbc8ea4d4ac22ddc7dd2a1dc54c57874c40648b60ff97009763554eaf13", "platforms": [ "linux" ] @@ -347,7 +363,8 @@ "sha256": "ab5a03176ee106d3f0fa90e381da478ddae405918153cca248e682cd0c4a2269", "platforms": [ "linux", - "darwin" + "darwin", + "win32" ] } }, @@ -369,6 +386,24 @@ ] } }, + "mpdecimal": { + "4.0.1": { + "url": "https://www.bytereef.org/software/mpdecimal/releases/mpdecimal-{version}.tar.gz", + "sha256": "96d33abb4bb0070c7be0fed4246cd38416188325f820468214471938545b1ac8", + "platforms": [ + "win32" + ] + } + }, + "nasm": { + "3.01": { + "url": "https://www.nasm.us/pub/nasm/releasebuilds/{version}/win64/nasm-{version}-win64.zip", + "sha256": "e0ba5157007abc7b1a65118a96657a961ddf55f7e3f632ee035366dfce039ca4", + "platforms": [ + "win32" + ] + } + }, "expat": { "2.7.3": { "url": "https://github.com/libexpat/libexpat/releases/download/R_2_7_3/expat-2.7.3.tar.xz", diff --git a/relenv/pyversions.py b/relenv/pyversions.py index 2864f35e..37748e04 100644 --- a/relenv/pyversions.py +++ b/relenv/pyversions.py @@ -383,9 +383,22 @@ def detect_krb5_versions() -> list[str]: # krb5 versions are like 1.22/ pattern = r"(\d+\.\d+)/" matches = re.findall(pattern, content) - return sorted( + majors = sorted( set(matches), key=lambda v: [int(x) for x in v.split(".")], reverse=True ) + if not majors: + return [] + + # Check the latest major for micro versions + latest_major = majors[0] + url = f"https://kerberos.org/dist/krb5/{latest_major}/" + content = fetch_url_content(url) + pattern = r"krb5-(\d+\.\d+(\.\d+)?)\.tar\.gz" + matches = re.findall(pattern, content) + versions = [m[0] for m in matches] + return sorted( + set(versions), key=lambda v: [int(x) for x in v.split(".")], reverse=True + ) def detect_uuid_versions() -> list[str]: @@ -424,6 +437,63 @@ def detect_expat_versions() -> list[str]: ) +def detect_cpython_bin_deps_versions(name: str) -> list[str]: + """Detect available binary dependency versions from python/cpython-bin-deps.""" + url = "https://github.com/python/cpython-bin-deps/tags" + content = fetch_url_content(url) + # Tags are like openssl-bin-3.0.15 or libffi-3.4.4 + pattern = rf"{name}-(\d+\.\d+(\.\d+)*)\"" + matches = re.findall(pattern, content) + versions = [m[0] for m in matches] + return sorted( + set(versions), key=lambda v: [int(x) for x in v.split(".")], reverse=True + ) + + +def detect_perl_versions() -> list[str]: + """Detect available Strawberry Perl versions from GitHub releases.""" + url = "https://github.com/StrawberryPerl/Perl-Dist-Strawberry/tags" + content = fetch_url_content(url) + # Find tags like SP_53822_64bit + pattern = r'SP_(\d+)_64bit"' + matches = re.findall(pattern, content) + # Convert 53822 to 5.38.2.2 + versions = [] + for m in matches: + if len(m) >= 5: + major = m[0] + minor = int(m[1:3]) + patch = int(m[3:4]) + subpatch = int(m[4:]) + versions.append(f"{major}.{minor}.{patch}.{subpatch}") + return sorted( + set(versions), key=lambda v: [int(x) for x in v.split(".")], reverse=True + ) + + +def detect_mpdecimal_versions() -> list[str]: + """Detect available mpdecimal versions from bytereef.org.""" + url = "https://www.bytereef.org/mpdecimal/download.html" + content = fetch_url_content(url) + pattern = r"mpdecimal-(\d+\.\d+\.\d+)\.tar\.gz" + matches = re.findall(pattern, content) + return sorted( + set(matches), key=lambda v: [int(x) for x in v.split(".")], reverse=True + ) + + +def detect_nasm_versions() -> list[str]: + """Detect available nasm versions from nasm.us.""" + url = "https://www.nasm.us/pub/nasm/releasebuilds/" + content = fetch_url_content(url) + pattern = r'href="(\d+\.\d+(\.\d+)?)/"' + matches = re.findall(pattern, content) + versions = [m[0] for m in matches] + return sorted( + set(versions), key=lambda v: [int(x) for x in v.split(".")], reverse=True + ) + + def update_dependency_versions( path: pathlib.Path, deps_to_update: list[str] | None = None ) -> None: @@ -455,7 +525,7 @@ def update_dependency_versions( if deps_to_update is None: # By default, update commonly-changed dependencies # Full list: openssl, sqlite, xz, libffi, zlib, bzip2, ncurses, - # readline, gdbm, libxcrypt, krb5, uuid, tirpc, expat + # readline, gdbm, libxcrypt, krb5, uuid, tirpc, expat, mpdecimal, nasm deps_to_update = [ "openssl", "sqlite", @@ -471,8 +541,43 @@ def update_dependency_versions( "uuid", "tirpc", "expat", + "mpdecimal", + "nasm", + "perl", ] + # Update perl + if "perl" in deps_to_update: + print("Checking perl versions...") + perl_versions = detect_perl_versions() + if perl_versions: + latest = perl_versions[0] + print(f"Latest perl: {latest}") + if "perl" not in dependencies: + dependencies["perl"] = {} + if latest not in dependencies["perl"]: + ver_tag = latest.replace(".", "") + url = ( + f"https://github.com/StrawberryPerl/Perl-Dist-Strawberry/releases/" + f"download/SP_{ver_tag}_64bit/strawberry-perl-{latest}-64bit-portable.zip" + ) + print(f"Downloading {url}...") + try: + download_path = download_url(url, cwd) + checksum = sha256_digest(download_path) + print(f"SHA-256: {checksum}") + dependencies["perl"][latest] = { + "url": ( + f"https://github.com/StrawberryPerl/Perl-Dist-Strawberry/releases/" + f"download/SP_{ver_tag}_64bit/strawberry-perl-{{version}}-64bit-portable.zip" + ), + "sha256": checksum, + "platforms": ["win32"], + } + os.remove(download_path) + except Exception as e: + print(f"Failed to download perl: {e}") + # Update OpenSSL if "openssl" in deps_to_update: print("Checking OpenSSL versions...") @@ -482,6 +587,8 @@ def update_dependency_versions( print(f"Latest OpenSSL: {latest}") if "openssl" not in dependencies: dependencies["openssl"] = {} + + platforms = ["linux", "darwin", "win32"] if latest not in dependencies["openssl"]: url = f"https://github.com/openssl/openssl/releases/download/openssl-{latest}/openssl-{latest}.tar.gz" print(f"Downloading {url}...") @@ -495,10 +602,30 @@ def update_dependency_versions( dependencies["openssl"][latest] = { "url": url_template, "sha256": checksum, - "platforms": ["linux", "darwin"], + "platforms": platforms, } # Clean up download os.remove(download_path) + else: + dependencies["openssl"][latest]["platforms"] = platforms + + # Check for Windows-specific OpenSSL from cpython-bin-deps + win_openssl_versions = detect_cpython_bin_deps_versions("openssl-bin") + if win_openssl_versions: + latest = win_openssl_versions[0] + print(f"Latest Windows OpenSSL: {latest}") + if latest not in dependencies["openssl"]: + url = f"https://github.com/python/cpython-bin-deps/archive/refs/tags/openssl-bin-{latest}.tar.gz" + print(f"Downloading {url}...") + download_path = download_url(url, cwd) + checksum = sha256_digest(download_path) + print(f"SHA-256: {checksum}") + dependencies["openssl"][latest] = { + "url": "https://github.com/python/cpython-bin-deps/archive/refs/tags/openssl-bin-{version}.tar.gz", + "sha256": checksum, + "platforms": ["win32"], + } + os.remove(download_path) # Update SQLite if "sqlite" in deps_to_update: @@ -577,12 +704,33 @@ def update_dependency_versions( dependencies["libffi"][latest] = { "url": "https://github.com/libffi/libffi/releases/download/v{version}/libffi-{version}.tar.gz", "sha256": checksum, - "platforms": ["linux"], + "platforms": ["linux", "win32"], } os.remove(download_path) except Exception as e: print(f"Failed to download libffi: {e}") + # Check for Windows-specific libffi from cpython-bin-deps + win_libffi_versions = detect_cpython_bin_deps_versions("libffi") + if win_libffi_versions: + latest = win_libffi_versions[0] + print(f"Latest Windows libffi: {latest}") + if latest not in dependencies["libffi"]: + url = f"https://github.com/python/cpython-bin-deps/archive/refs/tags/libffi-{latest}.tar.gz" + print(f"Downloading {url}...") + try: + download_path = download_url(url, cwd) + checksum = sha256_digest(download_path) + print(f"SHA-256: {checksum}") + dependencies["libffi"][latest] = { + "url": "https://github.com/python/cpython-bin-deps/archive/refs/tags/libffi-{version}.tar.gz", + "sha256": checksum, + "platforms": ["win32"], + } + os.remove(download_path) + except Exception as e: + print(f"Failed to download libffi (win32): {e}") + # Update zlib if "zlib" in deps_to_update: print("Checking zlib versions...") @@ -593,16 +741,16 @@ def update_dependency_versions( if "zlib" not in dependencies: dependencies["zlib"] = {} if latest not in dependencies["zlib"]: - url = f"https://zlib.net/fossils/zlib-{latest}.tar.gz" + url = f"https://github.com/madler/zlib/releases/download/v{latest}/zlib-{latest}.tar.gz" print(f"Downloading {url}...") try: download_path = download_url(url, cwd) checksum = sha256_digest(download_path) print(f"SHA-256: {checksum}") dependencies["zlib"][latest] = { - "url": "https://zlib.net/fossils/zlib-{version}.tar.gz", + "url": "https://github.com/madler/zlib/releases/download/v{version}/zlib-{version}.tar.gz", "sha256": checksum, - "platforms": ["linux"], + "platforms": ["linux", "win32"], } os.remove(download_path) except Exception as e: @@ -721,14 +869,17 @@ def update_dependency_versions( if "krb5" not in dependencies: dependencies["krb5"] = {} if latest not in dependencies["krb5"]: - url = f"https://kerberos.org/dist/krb5/{latest}/krb5-{latest}.tar.gz" + major_minor = ".".join(latest.split(".")[:2]) + url = ( + f"https://kerberos.org/dist/krb5/{major_minor}/krb5-{latest}.tar.gz" + ) print(f"Downloading {url}...") try: download_path = download_url(url, cwd) checksum = sha256_digest(download_path) print(f"SHA-256: {checksum}") dependencies["krb5"][latest] = { - "url": "https://kerberos.org/dist/krb5/{version}/krb5-{version}.tar.gz", + "url": f"https://kerberos.org/dist/krb5/{major_minor}/krb5-{{version}}.tar.gz", "sha256": checksum, "platforms": ["linux"], } @@ -755,7 +906,7 @@ def update_dependency_versions( dependencies["bzip2"][latest] = { "url": "https://sourceware.org/pub/bzip2/bzip2-{version}.tar.gz", "sha256": checksum, - "platforms": ["linux", "darwin"], + "platforms": ["linux", "darwin", "win32"], } os.remove(download_path) except Exception as e: @@ -843,6 +994,56 @@ def update_dependency_versions( except Exception as e: print(f"Failed to download expat: {e}") + # Update mpdecimal + if "mpdecimal" in deps_to_update: + print("Checking mpdecimal versions...") + mpdecimal_versions = detect_mpdecimal_versions() + if mpdecimal_versions: + latest = mpdecimal_versions[0] + print(f"Latest mpdecimal: {latest}") + if "mpdecimal" not in dependencies: + dependencies["mpdecimal"] = {} + if latest not in dependencies["mpdecimal"]: + url = f"https://www.bytereef.org/software/mpdecimal/releases/mpdecimal-{latest}.tar.gz" + print(f"Downloading {url}...") + try: + download_path = download_url(url, cwd) + checksum = sha256_digest(download_path) + print(f"SHA-256: {checksum}") + dependencies["mpdecimal"][latest] = { + "url": "https://www.bytereef.org/software/mpdecimal/releases/mpdecimal-{version}.tar.gz", + "sha256": checksum, + "platforms": ["win32"], + } + os.remove(download_path) + except Exception as e: + print(f"Failed to download mpdecimal: {e}") + + # Update nasm + if "nasm" in deps_to_update: + print("Checking nasm versions...") + nasm_versions = detect_nasm_versions() + if nasm_versions: + latest = nasm_versions[0] + print(f"Latest nasm: {latest}") + if "nasm" not in dependencies: + dependencies["nasm"] = {} + if latest not in dependencies["nasm"]: + url = f"https://www.nasm.us/pub/nasm/releasebuilds/{latest}/win64/nasm-{latest}-win64.zip" + print(f"Downloading {url}...") + try: + download_path = download_url(url, cwd) + checksum = sha256_digest(download_path) + print(f"SHA-256: {checksum}") + dependencies["nasm"][latest] = { + "url": "https://www.nasm.us/pub/nasm/releasebuilds/{version}/win64/nasm-{version}-win64.zip", + "sha256": checksum, + "platforms": ["win32"], + } + os.remove(download_path) + except Exception as e: + print(f"Failed to download nasm: {e}") + # Write updated data all_data = {"python": pydata, "dependencies": dependencies} path.write_text(json.dumps(all_data, indent=1)) @@ -1101,6 +1302,9 @@ def main(args: argparse.Namespace) -> None: ("uuid", "uuid", detect_uuid_versions), ("tirpc", "tirpc", detect_tirpc_versions), ("expat", "expat", detect_expat_versions), + ("mpdecimal", "MPDecimal", detect_mpdecimal_versions), + ("nasm", "NASM", detect_nasm_versions), + ("perl", "Perl", detect_perl_versions), ] for dep_key, dep_name, detect_func in checks: