Skip to content

Build, test, and Push to PyPI #223

Build, test, and Push to PyPI

Build, test, and Push to PyPI #223

Workflow file for this run

# This workflow will upload a Python Package using Twine when a release is created
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
name: Build, test, and Push to PyPI
on:
[workflow_dispatch]
jobs:
build_test:
name: Build and Test
strategy:
matrix:
python-version: ["3.10", "3.11", "3.12"]
os: ["ubuntu-latest", "macos-13", "macos-14"] # windows fails, macos13 is x86_64 and macos-latest=14 is arm64
# monitor https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- name: Setup miniconda
uses: conda-incubator/setup-miniconda@v3
with:
python-version: ${{ matrix.python-version }}
mamba-version: "1.5"
activate-environment: build_test_env
channels: conda-forge
channel-priority: true
- name: Install build and test dependencies
shell: bash -el {0}
run: |
# conda install mamba -c conda-forge
mamba install python=${{matrix.python-version}} pip -c conda-forge
## if we split arm and x86_64 runners, tbb conda's non-fat builds are fine.
mamba install boost-cpp gudhi=3.10 numpy=2 pot cython=3 pytest scikit-learn matplotlib boost joblib tqdm scipy tbb tbb-devel -c conda-forge
pip install pykeops filtration-domination --upgrade
mamba install cxx-compiler=1.6
if [[ ${{ matrix.os }} =~ "ubuntu-".* ]]; then
# oldest supported gcc release; see https://gcc.gnu.org/
mamba install patchelf -c conda-forge
gcc --version
fi
- name: Build package
shell: bash -el {0}
run: |
python setup.py sdist
if [[ ${{ matrix.os }} =~ "macos-".* ]]; then
export CXXFLAGS="-fno-aligned-new" # Macos workaround
export CFLAGS="-fno-aligned-new" # Macos workaround
## should compile for both architecture
## initialize below ?
export MACOSX_DEPLOYMENT_TARGET="13.0"
# export _PYTHON_HOST_PLATFORM="macosx-13.0-universal2"
fi
if [[ ${{ matrix.os }} =~ "ubuntu-".* ]]; then
export CXXFLAGS="-march=x86-64-v3" # post haswell, for avx
export CFLAGS="-march=x86-64-v3"
fi
python setup.py build_ext -j4 --inplace bdist_wheel
- name: Fix wheels on Linux and Macos
shell: bash -el {0}
run: |
if [[ ${{ matrix.os }} =~ "ubuntu-".* ]]; then
cd dist
pip install auditwheel --upgrade
for wheel_file in multipers*linux*.whl; do
auditwheel show $wheel_file
auditwheel repair $wheel_file --plat manylinux_2_34_x86_64
rm $wheel_file
done
mv wheelhouse/*.whl . # retrieves repaired wheels
cd ..
fi
if [[ ${{ matrix.os }} == "macos-13" ]]; then
cd dist
pip install delocate --upgrade
for wheel_file in multipers*macosx*x86*.whl; do
delocate-listdeps $wheel_file
delocate-wheel --require-archs x86_64 -w wheelhouse -v $wheel_file
rm $wheel_file
done
mv wheelhouse/*.whl . # retrieves repaired wheels
cd ..
fi
if [[ ${{ matrix.os }} == "macos-14" ]]; then
cd dist
pip install delocate --upgrade
for wheel_file in multipers*macosx*arm64*.whl; do
delocate-listdeps $wheel_file
delocate-wheel --require-archs arm64 -w wheelhouse -v $wheel_file
rm $wheel_file
done
mv wheelhouse/*.whl . # retrieves repaired wheels
cd ..
fi
- name: Install and Test
shell: bash -el {0}
run: |
pip install dist/*.whl --force-reinstall ## tests the repaired wheel on a "fresh" env
pytest multipers/tests
- name: Upload sources and wheel
uses: actions/upload-artifact@v3
with:
name: sources
path: dist
send_to_pypi:
name: Send sources and wheels
runs-on: ubuntu-latest
needs: [ build_test ]
steps:
- uses: actions/download-artifact@v3
with:
name: sources
- uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Publish to PyPI
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
run: |
pip install --upgrade pip twine
twine upload --skip-existing *.tar.gz # sources
twine upload --skip-existing *.whl # wheels