From 2a48c5bcc9ca4628e8b7dd3ae4c6f012b8e6bf3d Mon Sep 17 00:00:00 2001 From: Bryan Weber Date: Tue, 23 Jul 2024 12:58:57 -0400 Subject: [PATCH] Add HDF5 support for building wheels (#5) * Remove boost from sdist build, not needed * Bump cibuildwheel and some cleanup * Bump boost to 1.85.0 * Config updates * Add hdf5 for macos and turn off fail fast * Move some more config to pyproject.toml * Update some build steps from h5py * Star the python config * Use boost install action on Linux * Adapt windows builder from h5py * Update macos build after h5py * Checkout the repo * D'oh * Hopefully fixes for boost * Fix boost locations * Fix paths to script locations * Boost is built-in to the Linux cibuildwheel image now * Set MACOSX_DEPLOYMENT_TARGET for hdf5 We need to set MACOSX_DEPLOYMENT_TARGET when building HDF5, explained here https://github.com/h5py/h5py/pull/2444/files#r1679541011 * Fix boost arch for macos * Cache HDF5 after building * Try setting up HDF5 libs for macOS * Fix boost paths on Windows * More Boost fixes * Move building hdf5 into macos script * Set MACOSX_DEPLOYMENT_TARGET to support C++17 * Fix some bash syntax errors * Patch libaec * Debugging * Fix dots and dashes in hdf5 version * Fix building HDF5 on macOS * Add container engine config to environment * Bump MACOSX_DEPLOYMENT_TARGET Latest error message says 10.15 * Export variables on macos to the environment * Fix Windows test paths * Simplify to a single job definition * Rename the action file * Add arch to boost installer * Need to set boost arch for macos * Fix download action * Cache pip dependencies * Fix build selectors on macOS * Action garbage * Quoting problems * Windows updates * Typo! * Fix a macos arm misconfig * Set HDF5 and ZLIB dirs so delvewheel config doesn't need to know the installation structure * Clean up unused script and add README * Try to clean up env var setting * Set CMAKE parallel level on Windows The other platforms should use Ninja which is automatically parallel * Formatting and updates * Address review comments --- .../actions/download-cantera-test/action.yml | 43 +++ .github/workflows/python-package.yml | 272 ++++++++---------- README.md | 7 + cibw_before_all_macos.sh | 146 ++++++++++ cibw_before_all_windows.sh | 68 +++++ get_hdf5_win.py | 217 ++++++++++++++ libaec_cmakelists.patch | 11 + 7 files changed, 615 insertions(+), 149 deletions(-) create mode 100644 .github/actions/download-cantera-test/action.yml create mode 100644 README.md create mode 100644 cibw_before_all_macos.sh create mode 100644 cibw_before_all_windows.sh create mode 100644 get_hdf5_win.py create mode 100644 libaec_cmakelists.patch diff --git a/.github/actions/download-cantera-test/action.yml b/.github/actions/download-cantera-test/action.yml new file mode 100644 index 0000000..eb9ffae --- /dev/null +++ b/.github/actions/download-cantera-test/action.yml @@ -0,0 +1,43 @@ +name: "Download Cantera Test Files" +description: "Download and cache Cantera's Python test suite" +inputs: + incoming-sha: + description: The hash of the commit that should be downloaded + required: true +outputs: + test-root: + description: The root folder where the tests are located + value: ${{ steps.set-output.outputs.DESTINATION_PATH }} + +runs: + using: "composite" + steps: + - name: Sanitize the destination path + run: | + $DESTINATION_PATH = "${{ runner.temp }}" -replace "\\", "/" + echo "DESTINATION_PATH=$DESTINATION_PATH" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append + if: runner.os == 'Windows' + shell: pwsh + - name: Sanitize the destination path + run: echo "DESTINATION_PATH=${{ runner.temp }}" >> $GITHUB_ENV + shell: bash + if: runner.os != 'Windows' + - name: Set the outputs + id: set-output + run: echo "DESTINATION_PATH=${{ env.DESTINATION_PATH }}" >> $GITHUB_OUTPUT + shell: bash + - name: Download and unpack the tarball + if: steps.restore-cache.outputs.cache-hit != 'true' + run: | + curl -fsSL "https://github.com/cantera/cantera/archive/${{ inputs.incoming-sha }}.tar.gz" -o cantera.tar.gz + tar -xzf cantera.tar.gz --strip-components=1 "cantera-${{ inputs.incoming-sha }}/test" + rm cantera.tar.gz + shell: bash + working-directory: ${{ steps.set-output.outputs.DESTINATION_PATH }} + - name: Save the test file cache + uses: actions/cache/save@v4 + if: always() && steps.restore-cache.outputs.cache-hit != true + id: save-cache + with: + path: ${{ steps.set-output.outputs.DESTINATION_PATH }}/test + key: ${{ steps.restore-cache.outputs.cache-primary-key }} diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index cdb4317..cf38fe5 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -19,9 +19,6 @@ concurrency: cancel-in-progress: true env: - CIBW_BUILD_FRONTEND: build - CIBW_TEST_EXTRAS: pandas,units,graphviz - CIBW_TEST_REQUIRES: pytest ACTION_URL: "https://github.com/Cantera/pypi-packages/actions/runs/${{ github.run_id }}" jobs: @@ -106,10 +103,6 @@ jobs: outputs: job-status: ${{ job.status }} steps: - - name: Install dependencies - run: | - sudo apt-get update - sudo apt-get install libboost-dev - uses: actions/checkout@v4 name: Checkout the repository with: @@ -120,15 +113,13 @@ jobs: uses: actions/setup-python@v5 with: python-version: "3.12" + cache: 'pip' + cache-dependency-path: interfaces/python_sdist/pyproject.toml.in - name: Install dependencies - run: python3 -m pip install -U pip scons build + run: python3 -m pip install scons build - name: Build the sdist run: | - python3 `which scons` sdist f90_interface=n python_package='none' \ - system_blas_lapack=n system_sundials=n system_eigen=n system_fmt=n \ - system_yamlcpp=n googletest=none env_vars='CYTHON_FORCE_REGEN' - env: - CYTHON_FORCE_REGEN: "1" + python3 `which scons` sdist - name: Archive the built sdist uses: actions/upload-artifact@v4 with: @@ -154,164 +145,153 @@ jobs: echo -e '\n\n' >> $GITHUB_STEP_SUMMARY echo ----- End of Metadata ----- - linux-wheel: - name: Build ${{ matrix.libc }}linux_${{ matrix.arch }} for py${{ matrix.py }} - runs-on: ubuntu-22.04 + build-wheels: + name: Build ${{ matrix.os }} ${{ matrix.arch }} for py${{ matrix.python || '-all' }} + runs-on: ${{ matrix.os }} needs: ["sdist", "post-pending-status"] outputs: job-status: ${{ job.status }} strategy: matrix: - py: ["38", "39", "310", "311", "312"] - arch: ["x86_64", "aarch64"] - libc: ["many"] - fail-fast: true - env: - BOOST_INCLUDE: include - BOOST_URL: https://boostorg.jfrog.io/artifactory/main/release/1.78.0/source/boost_1_78_0.7z + # Wheel builds are fast except for aarch64, so split that into multiple jobs, + # one for each Python version + os: [ubuntu-latest] + arch: [aarch64] + python: ["3.8", "3.9", "3.10", "3.11", "3.12"] + include: + - os: ubuntu-latest + arch: x86_64 + - os: windows-2022 + arch: AMD64 + boost-arch: x86 + boost-toolset: msvc + boost-platform-version: 2022 + boost-version: "1.85.0" + - os: macos-14 + arch: arm64 + boost-arch: aarch64 + boost-toolset: clang + # Since we only use the headers, we can use the platform version for this + # macos version + boost-platform-version: "14" + boost-version: "1.85.0" + - os: macos-13 + arch: x86_64 + boost-arch: x86 + boost-toolset: clang + # Since we only use the headers, we can use the platform version for this + # macos version + boost-platform-version: "13" + boost-version: "1.85.0" + fail-fast: false steps: + - name: Checkout the repository + uses: actions/checkout@v4 - name: Download pre-built sdist uses: actions/download-artifact@v4 with: name: cibw-sdist - name: Extract the sdist tarball run: tar -xvf *.tar.gz --strip-components=1 - - name: Restore Boost cache - uses: actions/cache@v4 - id: cache-boost + shell: bash + - name: Download test files + id: download-test-files + uses: ./.github/actions/download-cantera-test with: - path: ${{ env.BOOST_INCLUDE }}/boost - key: boost-${{env.BOOST_URL}} - - name: Install Boost Headers - if: steps.cache-boost.outputs.cache-hit != 'true' - run: | - mkdir -p $BOOST_INCLUDE - curl --progress-bar --location --output $BOOST_INCLUDE/download.7z $BOOST_URL - 7z -o$BOOST_INCLUDE x $BOOST_INCLUDE/download.7z -y -bd boost_1_78_0/boost - mv $BOOST_INCLUDE/boost_1_78_0/boost $BOOST_INCLUDE/boost - rm $BOOST_INCLUDE/download.7z - rm -r $BOOST_INCLUDE/boost_1_78_0 + incoming-sha: ${{ needs.post-pending-status.outputs.incoming-sha }} + + # Linux steps - name: Set up QEMU uses: docker/setup-qemu-action@v3 with: platforms: all - - name: Build wheels - uses: pypa/cibuildwheel@v2.19.1 - env: - CIBW_ENVIRONMENT: BOOST_INCLUDE=${{ env.BOOST_INCLUDE }} CT_SKIP_SLOW=1 CYTHON_FORCE_REGEN=${{ matrix.py == '38' && '1' || '0' }} - CIBW_BUILD: cp${{ matrix.py }}-${{ matrix.libc }}linux* - CIBW_ARCHS: ${{ matrix.arch }} - # cibuildwheel on Linux uses a Docker container to run the build, so - # runner.temp is not available. cibuildwheel also uses the /tmp folder, so - # we should be pretty safe to also use that. - CIBW_TEST_COMMAND: pytest -vv --durations=100 /tmp/test/python - CIBW_BEFORE_TEST: | - curl -sL "https://github.com/cantera/cantera/archive/${{ needs.post-pending-status.outputs.incoming-sha }}.tar.gz" -o /tmp/cantera.tar.gz \ - && tar -xzf /tmp/cantera.tar.gz --strip-components=1 -C /tmp "cantera-${{ needs.post-pending-status.outputs.incoming-sha }}/test" - - # NumPy is generally not available for these platforms so testing takes a - # while. This just skips the tests on these - # combinations, the wheels are still built and uploaded. - CIBW_TEST_SKIP: "*-manylinux_{i686,ppc64le,s390x} *musl*" + if: matrix.arch != 'x86_64' && runner.os == 'Linux' + - name: Set up CIBW environment + run: | + PYTHON="${{ matrix.python }}" + if [[ $PYTHON == "" ]]; then PYTHON="*"; fi + CIBW_BUILD="cp${PYTHON//./}-*_${{ matrix.arch }}" + echo "CIBW_BUILD=${CIBW_BUILD}" | tee -a $GITHUB_ENV + if: runner.os == 'Linux' - - name: Archive the built wheels - uses: actions/upload-artifact@v4 + - name: Install boost + # Our custom manylinux images already have Boost installed + if: runner.os != 'Linux' + uses: MarkusJx/install-boost@v2.4.5 + id: install-boost with: - path: ./wheelhouse/*.whl - name: cibw-wheels-linux-${{ strategy.job-index }} + # REQUIRED: Specify the required boost version + # A list of supported versions can be found here: + # https://github.com/MarkusJx/prebuilt-boost/blob/main/versions-manifest.json + boost_version: ${{ matrix.boost-version }} + # OPTIONAL: Specify a custon install location + boost_install_dir: ${{ runner.temp }} + toolset: ${{ matrix.boost-toolset }} + platform_version: ${{ matrix.boost-platform-version }} + arch: ${{ matrix.boost-arch }} - windows-wheel: - name: Build Windows Wheels for py${{ matrix.py }} - runs-on: windows-2019 - needs: ["sdist", "post-pending-status"] - outputs: - job-status: ${{ job.status }} - strategy: - matrix: - py: ["38", "39", "310", "311", "312"] - fail-fast: true - env: - BOOST_ROOT: ${{ github.workspace }}/3rdparty/boost - BOOST_URL: https://boostorg.jfrog.io/artifactory/main/release/1.78.0/source/boost_1_78_0.7z - steps: - - name: Download pre-built sdist - uses: actions/download-artifact@v4 - with: - name: cibw-sdist - - name: Extract the sdist tarball - run: tar -xvf *.tar.gz --strip-components=1 - shell: bash - - name: Restore Boost cache + - name: Cache built libraries + id: cache-built-libraries + # Our custom manylinux images already have hdf5 installed + if: runner.os != 'Linux' uses: actions/cache@v4 - id: cache-boost with: - path: ${{env.BOOST_ROOT}} - key: boost-${{env.BOOST_URL}} - - name: Install Boost Headers - if: steps.cache-boost.outputs.cache-hit != 'true' - run: | - BOOST_ROOT=$(echo $BOOST_ROOT | sed 's/\\/\//g') - mkdir -p $BOOST_ROOT - curl --progress-bar --location --output $BOOST_ROOT/download.7z $BOOST_URL - 7z -o$BOOST_ROOT x $BOOST_ROOT/download.7z -y -bd boost_1_78_0/boost - mv $BOOST_ROOT/boost_1_78_0/boost $BOOST_ROOT/boost - rm $BOOST_ROOT/download.7z - shell: bash - - name: Build wheels - uses: pypa/cibuildwheel@v2.19.1 - env: - CIBW_ENVIRONMENT: BOOST_INCLUDE=${BOOST_ROOT} CT_SKIP_SLOW=1 CYTHON_FORCE_REGEN=${{ matrix.py == '38' && '1' || '0' }} - CIBW_ARCHS: "AMD64" - CIBW_BUILD: cp${{ matrix.py }}-* - CIBW_TEST_COMMAND: pytest -vv --durations=100 ${{ runner.temp }}/test/python - CIBW_BEFORE_TEST: | - curl -sL "https://github.com/cantera/cantera/archive/${{ needs.post-pending-status.outputs.incoming-sha }}.tar.gz" -o ${{ runner.temp }}/cantera.tar.gz && tar -xzf ${{ runner.temp }}/cantera.tar.gz --strip-components=1 -C ${{ runner.temp }} "cantera-${{ needs.post-pending-status.outputs.incoming-sha }}/test" - - name: Archive the built wheels - uses: actions/upload-artifact@v4 + key: ${{ matrix.os }}-${{ matrix.arch }}-0 + path: ${{ runner.temp }}/cache + + # Windows-only steps + - name: Set Up Nuget + uses: nuget/setup-nuget@v2 + if: runner.os == 'Windows' + - uses: actions/setup-python@v5 with: - path: ./wheelhouse/*.whl - name: cibw-wheels-windows-${{ strategy.job-index }} + python-version: 3.12 + if: runner.os == 'Windows' + - run: bash ./cibw_before_all_windows.sh "${{ runner.temp }}" + if: runner.os == 'Windows' + - name: Set up CIBW environment + # On Windows, Boost_ROOT needs to have \ replaced by / because that's what + # cibuildwheel says. CANTERA_TEST_DIR doesn't need the replacement because + # it will be substituted in a cmd or pwsh session. + run: | + $BOOST_ROOT = "${{ steps.install-boost.outputs.BOOST_ROOT }}" -replace "\\", "/" + echo "Boost_ROOT=$BOOST_ROOT" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append + echo "CANTERA_TEST_DIR=${{ steps.download-test-files.outputs.test-root }}" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append + echo "CIBW_BUILD=cp*-*" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append + shell: pwsh + if: runner.os == 'Windows' - macos-wheel: - name: Build ${{ matrix.macos-version }} Wheels for py${{ matrix.py }} - runs-on: ${{ matrix.macos-version }} - needs: ["sdist", "post-pending-status"] - outputs: - job-status: ${{ job.status }} - strategy: - matrix: - macos-version: [ "macos-13", "macos-14" ] - py: [ "39", "310", "311", "312" ] - deployment_target: [ "11.0" ] - include: - - py: "38" - deployment_target: "11.0" - macos-version: "macos-13" - fail-fast: true - env: - MACOSX_DEPLOYMENT_TARGET: ${{ matrix.deployment_target }} - steps: - - name: Download pre-built sdist - uses: actions/download-artifact@v4 + # macOS-only steps + - name: Build required libraries + run: bash ./cibw_before_all_macos.sh "${{ runner.temp }}" + if: runner.os == 'macOS' + # Force installation to resolve Python 3.8 bug (https://github.com/pypa/cibuildwheel/pull/1871#issuecomment-2161613619) + - name: Hack for 3.8 bug + uses: actions/setup-python@v5 with: - name: cibw-sdist - - name: Extract the sdist tarball - run: tar -xvf *.tar.gz --strip-components=1 - - name: Install Brew dependencies - run: brew install boost + python-version: 3.8 + if: runner.os == 'macOS' && matrix.arch == 'arm64' + + - name: Set up CIBW environment + run: | + echo "Boost_ROOT=${{ steps.install-boost.outputs.BOOST_ROOT }}" >> $GITHUB_ENV + echo "CANTERA_TEST_DIR=${{ steps.download-test-files.outputs.test-root }}" >> $GITHUB_ENV + echo "CIBW_BUILD=cp*-*" >> $GITHUB_ENV + if: runner.os == 'macOS' + - name: Build wheels - uses: pypa/cibuildwheel@v2.19.1 + uses: pypa/cibuildwheel@v2.19.2 env: - CIBW_ENVIRONMENT: BOOST_INCLUDE="$(brew --prefix)/include" RUNNER_TEMP=${{ runner.temp }} CT_SKIP_SLOW=1 CYTHON_FORCE_REGEN=${{ matrix.py == '38' && '1' || '0' }} - CIBW_BUILD: cp${{ matrix.py }}-* - CIBW_TEST_COMMAND: pytest -vv --durations=100 ${RUNNER_TEMP}/test/python - CIBW_BEFORE_TEST: | - curl -sL "https://github.com/cantera/cantera/archive/${{ needs.post-pending-status.outputs.incoming-sha }}.tar.gz" -o ${{ runner.temp }}/cantera.tar.gz && tar -xzf ${{ runner.temp }}/cantera.tar.gz --strip-components=1 -C ${{ runner.temp }} "cantera-${{ needs.post-pending-status.outputs.incoming-sha }}/test" + CIBW_ENVIRONMENT_LINUX: CT_SKIP_SLOW=1 CANTERA_TEST_DIR=/host/${{ steps.download-test-files.outputs.test-root }} + CIBW_ENVIRONMENT_WINDOWS: CT_SKIP_SLOW=1 CMAKE_BUILD_PARALLEL_LEVEL=4 + CIBW_ENVIRONMNET_MACOS: CT_SKIP_SLOW=1 + CIBW_BUILD: ${{ env.CIBW_BUILD }} + CIBW_ARCHS: ${{ matrix.arch }} - name: Archive the built wheels uses: actions/upload-artifact@v4 with: path: ./wheelhouse/*.whl - name: cibw-wheels-macos-${{ strategy.job-index }} + name: cibw-wheels-${{ runner.os }}-${{ strategy.job-index }} publish-files-to-pypi: name: Publish distribution files to PyPI @@ -320,9 +300,7 @@ jobs: job-status: ${{ job.status }} needs: - "sdist" - - "linux-wheel" - - "windows-wheel" - - "macos-wheel" + - "build-wheels" if: github.event.inputs.upload == 'true' permissions: id-token: write @@ -343,9 +321,7 @@ jobs: needs: - "post-pending-status" - "sdist" - - "linux-wheel" - - "windows-wheel" - - "macos-wheel" + - "build-wheels" - "publish-files-to-pypi" if: always() steps: @@ -355,9 +331,7 @@ jobs: import os statuses = { "sdist": "${{needs.sdist.outputs.job-status}}", - "linux": "${{needs.linux-wheel.outputs.job-status}}", - "windows": "${{needs.windows-wheel.outputs.job-status}}", - "macos": "${{needs.macos-wheel.outputs.job-status}}", + "wheels": "${{needs.build-wheels.outputs.job-status}}", "publish": "${{needs.publish-files-to-pypi.outputs.job-status}}", } # This is a deliberate comparison to the empty string. diff --git a/README.md b/README.md new file mode 100644 index 0000000..95f4acc --- /dev/null +++ b/README.md @@ -0,0 +1,7 @@ +# Cantera PyPI Packages + +This repo contains setup to build and publish packages to PyPI. It uses [`cibuildwheel`](https://cibuildwheel.pypa.io/en/stable/) to manage the builds. + +Docker images for the manylinux builds are hosted at . + +For macOS and Windows, the scripts in this repo will build and install the required HDF5 dependencies for Cantera wheel builds. On macOS, we support `szip` (via `libaec`) and `zlib`. On Windows, we only support `zlib`. diff --git a/cibw_before_all_macos.sh b/cibw_before_all_macos.sh new file mode 100644 index 0000000..c51d2c1 --- /dev/null +++ b/cibw_before_all_macos.sh @@ -0,0 +1,146 @@ +#!/bin/bash +# Copied from h5py. Licensed under the BSD 3-Clause license. +# Copyright (c) 2008 Andrew Collette and contributors +# All rights reserved. + +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: + +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. + +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the +# distribution. + +# 3. Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. + +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +set -eo pipefail + +function setup_github_env { + if [[ "$GITHUB_ENV" != "" ]]; then + echo "HDF5_DIR=${HDF5_DIR}" | tee -a $GITHUB_ENV + echo "LIBAEC_DIR=${LIBAEC_DIR}" | tee -a $GITHUB_ENV + echo "ZLIB_DIR=${ZLIB_DIR}" | tee -a $GITHUB_ENV + echo "LD_LIBRARY_PATH=${LD_LIBRARY_PATH}" | tee -a $GITHUB_ENV + echo "MACOSX_DEPLOYMENT_TARGET=${MACOSX_DEPLOYMENT_TARGET}" | tee -a $GITHUB_ENV + echo "DYLD_FALLBACK_LIBRARY_PATH=${HDF5_DIR}/lib:${ZLIB_DIR}/lib:${LIBAEC_DIR}/lib" | tee -a $GITHUB_ENV + fi +} + +set +x + +if [[ "$1" == "" ]] ; then + echo "Usage: $0 " + exit 1 +fi +PROJECT_PATH="$1" +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) +ARCH=$(uname -m) + +ZLIB_VERSION="1.3.1" +LIBAEC_VERSION="1.0.6" + +HDF5_VERSION="1.14.4.3" +# Replace the last dot with a dash because that's what some of the files in this +# release have done. +HDF5_PATCH_VERSION=${HDF5_VERSION%.*}-${HDF5_VERSION##*.} + +HDF5_DIR="${PROJECT_PATH}/cache/hdf5/${HDF5_VERSION}-${ARCH}" +ZLIB_DIR="${PROJECT_PATH}/cache/zlib/${ZLIB_VERSION}-${ARCH}" +LIBAEC_DIR="${PROJECT_PATH}/cache/libaec/${LIBAEC_VERSION}-${ARCH}" + +LD_LIBRARY_PATH="${ZLIB_DIR}/lib:${LD_LIBRARY_PATH}" + +# When compiling HDF5, we should use the minimum across all Python versions for a given +# arch, for versions see for example a more updated version of the following: +# https://github.com/pypa/cibuildwheel/blob/89a5cfe2721c179f4368a2790669e697759b6644/cibuildwheel/macos.py#L296-L310 +if [[ "${ARCH}" == "arm64" ]]; then + export MACOSX_DEPLOYMENT_TARGET="11.0" +else + # This is the minimum version for Cantera + export MACOSX_DEPLOYMENT_TARGET="10.15" +fi + +lib_name=libhdf5.dylib + +if [ -f ${HDF5_DIR}/lib/${lib_name} ]; then + echo "using cached build" + setup_github_env + exit 0 +else + echo "building HDF5" +fi + +brew install ninja cmake + +pushd ${PROJECT_PATH} + +curl -fsSLO "https://github.com/madler/zlib/releases/download/v${ZLIB_VERSION}/zlib-${ZLIB_VERSION}.tar.gz" +tar -xzf zlib-${ZLIB_VERSION}.tar.gz + +mkdir -p zlib-${ZLIB_VERSION}/build +pushd zlib-${ZLIB_VERSION}/build +cmake -G Ninja \ + -DCMAKE_INSTALL_PREFIX=${ZLIB_DIR} \ + -DZLIB_BUILD_EXAMPLES=OFF \ + .. + +ninja install +popd + +curl -fsSLO "https://gitlab.dkrz.de/k202009/libaec/uploads/45b10e42123edd26ab7b3ad92bcf7be2/libaec-${LIBAEC_VERSION}.tar.gz" +tar -xzf libaec-${LIBAEC_VERSION}.tar.gz +mkdir -p libaec-${LIBAEC_VERSION}/build +pushd libaec-${LIBAEC_VERSION} +patch -p0 < ${SCRIPT_DIR}/libaec_cmakelists.patch +pushd build + +cmake -G Ninja \ + -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=${LIBAEC_DIR} \ + -DBUILD_TESTING=OFF \ + .. + +ninja install +popd +popd + +curl -fsSLO "https://github.com/HDFGroup/hdf5/releases/download/hdf5_${HDF5_VERSION}/hdf5-${HDF5_PATCH_VERSION}.tar.gz" +tar -xzf hdf5-${HDF5_PATCH_VERSION}.tar.gz +mkdir -p hdf5-${HDF5_PATCH_VERSION}/build +pushd hdf5-${HDF5_PATCH_VERSION}/build + +cmake -G Ninja \ + -DCMAKE_BUILD_TYPE=Release \ + -DZLIB_ROOT=${ZLIB_DIR} \ + -Dlibaec_ROOT=${LIBAEC_DIR} \ + -DCMAKE_INSTALL_PREFIX=${HDF5_DIR} \ + -DHDF5_ENABLE_Z_LIB_SUPPORT=ON \ + -DHDF5_ENABLE_SZIP_SUPPORT=ON \ + -DHDF5_BUILD_EXAMPLES=OFF \ + -DBUILD_TESTING=OFF \ + .. + +ninja install +popd + +setup_github_env + +set -x diff --git a/cibw_before_all_windows.sh b/cibw_before_all_windows.sh new file mode 100644 index 0000000..a03f8fa --- /dev/null +++ b/cibw_before_all_windows.sh @@ -0,0 +1,68 @@ +#!/bin/bash +# Copied from h5py. Licensed under the BSD 3-Clause license. +# Copyright (c) 2008 Andrew Collette and contributors +# All rights reserved. + +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: + +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. + +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the +# distribution. + +# 3. Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. + +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +set -eo pipefail + +if [[ "$1" == "" ]] ; then + echo "Usage: $0 " + exit 1 +fi +PROJECT_PATH="$1" + +# nuget +nuget install zlib-msvc-x64 -ExcludeVersion -OutputDirectory "$PROJECT_PATH" +EXTRA_PATH="$PROJECT_PATH\zlib-msvc-x64\build\native\bin_release" +export PATH="$PATH:$EXTRA_PATH" +export CL="/I$PROJECT_PATH\zlib-msvc-x64\build\native\include" +export LINK="/LIBPATH:$PROJECT_PATH\zlib-msvc-x64\build\native\lib_release" +export ZLIB_ROOT="$PROJECT_PATH\zlib-msvc-x64\build\native" + +# HDF5 +export HDF5_VERSION="1.14.4.3" +export HDF5_VSVERSION="17-64" +export HDF5_DIR="$PROJECT_PATH/cache/hdf5/$HDF5_VERSION" + +python -m pip install requests +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) +python $SCRIPT_DIR/get_hdf5_win.py + +if [[ "$GITHUB_ENV" != "" ]] ; then + # PATH on windows is special + echo "$EXTRA_PATH" | tee -a $GITHUB_PATH + echo "CL=$CL" | tee -a $GITHUB_ENV + echo "LINK=$LINK" | tee -a $GITHUB_ENV + echo "ZLIB_ROOT=$ZLIB_ROOT" | tee -a $GITHUB_ENV + echo "HDF5_DIR=$HDF5_DIR" | tee -a $GITHUB_ENV + echo "HDF5_LIB_DIR=${HDF5_DIR}\lib" | tee -a $GITHUB_ENV + echo "ZLIB_LIB_DIR=${ZLIB_ROOT}\bin_release" | tee -a $GITHUB_ENV +fi diff --git a/get_hdf5_win.py b/get_hdf5_win.py new file mode 100644 index 0000000..eb46cfa --- /dev/null +++ b/get_hdf5_win.py @@ -0,0 +1,217 @@ +# Copied from h5py. Licensed under the BSD 3-Clause license. +# Copyright (c) 2008 Andrew Collette and contributors +# All rights reserved. + +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: + +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. + +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the +# distribution. + +# 3. Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. + +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" +Script for downloading and building HDF5 on Windows +This does not support MPI, nor non-Windows OSes + +This script may not completely clean up after itself, it is designed to run in a +CI environment which thrown away each time +""" + +from os import environ, makedirs, walk, getcwd, chdir +from os.path import join as pjoin, exists +from tempfile import TemporaryFile, TemporaryDirectory +from sys import exit, stderr +from shutil import copy +from glob import glob +from subprocess import run +from zipfile import ZipFile +import requests + +HDF5_URL = "https://github.com/HDFGroup/hdf5/releases/download/hdf5_{dotted_version}/hdf5-{dashed_version}.zip" +ZLIB_ROOT = environ.get("ZLIB_ROOT") + +CMAKE_CONFIGURE_CMD = [ + "cmake", + "-DBUILD_SHARED_LIBS:BOOL=ON", + "-DCMAKE_BUILD_TYPE:STRING=RELEASE", + "-DHDF5_BUILD_CPP_LIB=OFF", + "-DHDF5_BUILD_HL_LIB=ON", + "-DHDF5_BUILD_TOOLS:BOOL=OFF", + "-DBUILD_TESTING:BOOL=OFF", +] +if ZLIB_ROOT: + CMAKE_CONFIGURE_CMD += [ + "-DHDF5_ENABLE_Z_LIB_SUPPORT=ON", + f"-DZLIB_INCLUDE_DIR={ZLIB_ROOT}\\include", + f"-DZLIB_LIBRARY_RELEASE={ZLIB_ROOT}\\lib_release\\zlib.lib", + f"-DZLIB_LIBRARY_DEBUG={ZLIB_ROOT}\\lib_debug\\zlibd.lib", + ] +CMAKE_BUILD_CMD = ["cmake", "--build"] +CMAKE_INSTALL_ARG = ["--target", "install", "--config", "Release"] +CMAKE_INSTALL_PATH_ARG = "-DCMAKE_INSTALL_PREFIX={install_path}" +CMAKE_HDF5_LIBRARY_PREFIX = ["-DHDF5_EXTERNAL_LIB_PREFIX=h5py_"] +REL_PATH_TO_CMAKE_CFG = "hdf5-{version}" +DEFAULT_VERSION = "1.14.3.3" +VSVERSION_TO_GENERATOR = { + "9": "Visual Studio 9 2008", + "10": "Visual Studio 10 2010", + "14": "Visual Studio 14 2015", + "15": "Visual Studio 15 2017", + "16": "Visual Studio 16 2019", + "9-64": "Visual Studio 9 2008 Win64", + "10-64": "Visual Studio 10 2010 Win64", + "14-64": "Visual Studio 14 2015 Win64", + "15-64": "Visual Studio 15 2017 Win64", + "16-64": "Visual Studio 16 2019", + "17-64": "Visual Studio 17 2022", +} + + +def get_dashed_version(version): + dotted_version = version + if len(version.split(".")) > 3: + dashed_version = "-".join(version.rsplit(".", maxsplit=1)) + else: + dashed_version = version + return {"dotted_version": dotted_version, "dashed_version": dashed_version} + + +def download_hdf5(version, outfile): + file = HDF5_URL.format(**get_dashed_version(version)) + + print("Downloading " + file, file=stderr) + r = requests.get(file, stream=True) + try: + r.raise_for_status() + except requests.HTTPError: + print( + "Failed to download hdf5 version {version}, exiting".format( + version=version + ), + file=stderr, + ) + exit(1) + else: + for chunk in r.iter_content(chunk_size=None): + outfile.write(chunk) + + +def build_hdf5(version, hdf5_file, install_path, cmake_generator, use_prefix): + versions = get_dashed_version(version) + try: + with TemporaryDirectory() as hdf5_extract_path: + generator_args = ( + ["-G", cmake_generator] if cmake_generator is not None else [] + ) + prefix_args = CMAKE_HDF5_LIBRARY_PREFIX if use_prefix else [] + + with ZipFile(hdf5_file) as z: + z.extractall(hdf5_extract_path) + old_dir = getcwd() + + with TemporaryDirectory() as new_dir: + chdir(new_dir) + cfg_cmd = ( + CMAKE_CONFIGURE_CMD + + [ + get_cmake_install_path(install_path), + get_cmake_config_path( + versions["dashed_version"], hdf5_extract_path + ), + ] + + generator_args + + prefix_args + ) + print(f"Configuring HDF5 version {versions["dotted_version"]}...") + print(" ".join(cfg_cmd), file=stderr) + run(cfg_cmd, check=True) + + build_cmd = ( + CMAKE_BUILD_CMD + + [ + ".", + ] + + CMAKE_INSTALL_ARG + ) + print(f"Building HDF5 version {version}...") + print(" ".join(build_cmd), file=stderr) + run(build_cmd, check=True) + + print( + f"Installed HDF5 version {version} to {install_path}", file=stderr + ) + chdir(old_dir) + except OSError as e: + if e.winerror == 145: + print("Hit the rmtree race condition, continuing anyway...", file=stderr) + else: + raise + for f in glob(pjoin(install_path, "bin/*.dll")): + copy(f, pjoin(install_path, "lib")) + + +def get_cmake_config_path(version, extract_point): + return pjoin(extract_point, REL_PATH_TO_CMAKE_CFG.format(version=version)) + + +def get_cmake_install_path(install_path): + if install_path is not None: + return CMAKE_INSTALL_PATH_ARG.format(install_path=install_path) + return " " + + +def hdf5_install_cached(install_path): + if exists(pjoin(install_path, "lib", "hdf5.dll")): + return True + return False + + +def main(): + install_path = environ.get("HDF5_DIR") + version = environ.get("HDF5_VERSION", DEFAULT_VERSION) + vs_version = environ.get("HDF5_VSVERSION") + use_prefix = True if environ.get("H5PY_USE_PREFIX") is not None else False + + if install_path is not None: + if not exists(install_path): + makedirs(install_path) + if vs_version is not None: + cmake_generator = VSVERSION_TO_GENERATOR[vs_version] + else: + cmake_generator = None + + if not hdf5_install_cached(install_path): + with TemporaryFile() as f: + download_hdf5(version, f) + build_hdf5(version, f, install_path, cmake_generator, use_prefix) + else: + print("using cached hdf5", file=stderr) + if install_path is not None: + print("hdf5 files: ", file=stderr) + for dirpath, dirnames, filenames in walk(install_path): + for file in filenames: + print(" * " + pjoin(dirpath, file)) + + +if __name__ == "__main__": + main() diff --git a/libaec_cmakelists.patch b/libaec_cmakelists.patch new file mode 100644 index 0000000..71ae14c --- /dev/null +++ b/libaec_cmakelists.patch @@ -0,0 +1,11 @@ +--- CMakeLists.txt 2024-07-16 15:43:44.268427707 +0000 ++++ CMakeLists.txt.new 2024-07-16 15:43:38.540403952 +0000 +@@ -12,6 +12,8 @@ + include(TestBigEndian) + test_big_endian(WORDS_BIGENDIAN) + ++set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/bin) ++ + # Check for __builtin_clzll for faster decoding + include(CheckCSourceCompiles) + check_c_source_compiles(