Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Workflow optimisations #1859

Merged
merged 18 commits into from
Aug 1, 2024
Merged
99 changes: 56 additions & 43 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: xclim Testing Suite
name: Testing

on:
push:
Expand Down Expand Up @@ -69,15 +69,13 @@ jobs:
python -m tox -e lint

test-preliminary:
name: Python${{ matrix.python-version }} (${{ matrix.tox-env }}, ${{ matrix.os }})
name: Python${{ matrix.python-version }} (ubuntu-latest)
needs: lint
runs-on: ${{ matrix.os }}
runs-on: ubuntu-latest
strategy:
matrix:
include:
- tox-env: "py39-coverage"
python-version: "3.9"
os: ubuntu-latest
python-version:
- "3.9"
steps:
- name: Harden Runner
uses: step-security/harden-runner@0d381219ddf674d61a7572ddd19d7941e271515c # v2.9.0
Expand All @@ -102,54 +100,58 @@ jobs:
python -m pip install --require-hashes -r CI/requirements_ci.txt
- name: Test with tox
run: |
python -m tox -e ${{ matrix.tox-env }}
python -m tox -- -m 'not slow'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

test-pypi:
needs: lint
name: Python${{ matrix.python-version }} (${{ matrix.tox-env }}, ${{ matrix.os }})
name: Python${{ matrix.python-version }} (${{ matrix.os }}, ${{ matrix.tox-env }})
if: |
contains(github.event.pull_request.labels.*.name, 'approved') ||
(github.event.review.state == 'approved') ||
(github.event_name == 'push')
runs-on: ${{ matrix.os }}
timeout-minutes: 20
strategy:
matrix:
include:
# Windows builds
- tox-env: py39-coverage-prefetch
python-version: "3.9"
# Linux builds
- os: ubuntu-latest
markers: -m 'not slow'
os: windows-latest
# macOS builds
- tox-env: py310-coverage-extras-lmoments-numpy
python-version: "3.10"
tox-env: standard
- os: ubuntu-latest
markers: -m 'not slow'
os: macos-latest
# Linux builds
- tox-env: py39-coverage-offline-prefetch
python-version: "3.9"
markers: -m 'not slow and not requires_internet'
os: ubuntu-latest
- tox-env: py310-coverage-lmoments # No markers -- includes slow tests
python-version: "3.10"
os: ubuntu-latest
- tox-env: py311-coverage-extras-sbck-numpy
python-version: "3.11"
tox-env: standard
- os: ubuntu-latest
markers: -m 'not slow'
os: ubuntu-latest
- tox-env: py312-coverage-extras-lmoments
python-version: "3.12"
tox-env: standard
# Windows builds
- os: windows-latest
markers: -m 'not slow'
os: ubuntu-latest
python-version: "3.9"
tox-env: py39-coverage-prefetch # Test data prefetch is needed for Windows
# macOS builds
- os: macos-latest
python-version: "3.10"
markers: '' # Slow tests
tox-env: py310-coverage-extras
# Specialized tests
- tox-env: notebooks
- os: ubuntu-latest
markers: -m 'not requires_internet and not slow'
python-version: "3.9"
tox-env: py39-coverage-offline-prefetch
- os: ubuntu-latest
markers: ''
python-version: "3.10"
os: ubuntu-latest
- tox-env: doctests
tox-env: notebooks
- os: ubuntu-latest
markers: ''
python-version: "3.12"
os: ubuntu-latest
tox-env: doctests
steps:
- name: Harden Runner
uses: step-security/harden-runner@0d381219ddf674d61a7572ddd19d7941e271515c # v2.9.0
Expand All @@ -168,9 +170,10 @@ jobs:
ppa.launchpadcontent.net:443
pypi.org:443
raw.githubusercontent.com:443
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Install Eigen3
if: contains(matrix.tox-env, 'sbck')
- name: Checkout Repository
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Install Eigen3 (SBCK)
if: ${{ matrix.python-version == '3.11' }}
run: |
sudo apt-get update
sudo apt-get install libeigen3-dev
Expand All @@ -182,26 +185,36 @@ jobs:
run: |
python -m pip install --require-hashes -r CI/requirements_ci.txt
- name: Test with tox
if: ${{ matrix.tox-env == 'standard' }}
run: |
python -m tox -- ${{ matrix.markers }};
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
COVERALLS_FLAG_NAME: run-${{ matrix.python-version }}-${{ matrix.os }}-${{ matrix.tox-env }}
COVERALLS_PARALLEL: true
COVERALLS_SERVICE_NAME: github
- name: Test with tox (specialized tests)
if: ${{ matrix.tox-env != 'standard' }}
run: |
python -m tox -e ${{ matrix.tox-env }} -- ${{ matrix.markers }}
python -m tox -e ${{ matrix.tox-env }} -- ${{ matrix.markers }};
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
COVERALLS_FLAG_NAME: run-{{ matrix.tox-env }}-{{ matrix.os }}
COVERALLS_FLAG_NAME: run-${{ matrix.python-version }}-${{ matrix.os }}-${{ matrix.tox-env }}
COVERALLS_PARALLEL: true
COVERALLS_SERVICE_NAME: github

test-conda:
needs: lint
name: Python${{ matrix.python-version }} (Conda, ${{ matrix.os }})
name: Python${{ matrix.python-version }} (${{ matrix.os }}, conda)
if: |
contains(github.event.pull_request.labels.*.name, 'approved') ||
(github.event.review.state == 'approved') ||
(github.event_name == 'push')
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest]
python-version: ["3.9", "3.12"]
os: [ ubuntu-latest ]
python-version: [ "3.9", "3.12" ]
defaults:
run:
shell: bash -l {0}
Expand Down Expand Up @@ -232,15 +245,15 @@ jobs:
environment-file: environment.yml
create-args: >-
python=${{ matrix.python-version }}
- name: Conda and Mamba versions
- name: Micromamba version
run: |
echo "micromamba: $(micromamba --version)"
- name: Install xclim
run: |
python -m pip install --no-user --editable .
- name: Check versions
run: |
conda list
micromamba list
xclim show_version_info
python -m pip check || true
- name: Test with pytest
Expand All @@ -263,7 +276,7 @@ jobs:
coveralls
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
COVERALLS_FLAG_NAME: run-{{ matrix.tox-env }}-opt-slow
COVERALLS_FLAG_NAME: run-{{ matrix.python-version }}-conda
COVERALLS_PARALLEL: true
COVERALLS_SERVICE_NAME: github

Expand Down
10 changes: 4 additions & 6 deletions .github/workflows/upstream.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ jobs:
name: test-upstream-dev (Python${{ matrix.python-version }})
runs-on: ubuntu-latest
permissions:
contents: read
issues: write
if: |
(github.event_name == 'schedule') ||
Expand Down Expand Up @@ -65,25 +64,24 @@ jobs:
cache-environment: true
environment-file: environment.yml
create-args: >-
conda
eigen
pybind11
python=${{ matrix.python-version }}
pytest-reportlog
- name: Conda and Mamba versions
- name: Micromamba version
run: |
conda --version
echo "micromamba: $(micromamba --version)"
- name: Install upstream versions and SBCK
run: |
python -m pip install --require-hashes -r CI/requirements_upstream.txt
# git-based dependencies cannot be installed from hashes
python -m pip install -r CI/requirements_upstream.txt
python -m pip install "sbck @ git+https://github.com/yrobink/SBCK-python.git@master"
- name: Install xclim
run: |
python -m pip install --no-user --no-deps --editable .
- name: Check versions
run: |
conda list
micromamba list
xclim show_version_info
python -m pip check || true
- name: Run Tests
Expand Down
2 changes: 2 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ CI changes
* `pip-tools` (`pip-compile`) has been used to generate a lock file with hashes for the CI dependencies. (:pull:`1841`).
* The ``main.yml`` workflow has been updated to use simpler trigger logic. (:pull:`1841`).
* A workflow bug has been fixed that was causing multiple duplicate comments to be made on Pull Requests originating from forks. (:pull:`1841`).
* The ``upstream.yml`` workflow was adapted to not install upstream Python dependencies using hashes (impossible to install directly from GitHub sources using --require-hashes). (:pull:`1859`).
* The `tox-gh` configuration has been set to handle the environment configurations on GitHub Workflows. The tox.ini file is also a bit more organized/consistent. (:pull:`1859`).

v0.51.0 (2024-07-04)
--------------------
Expand Down
23 changes: 12 additions & 11 deletions CONTRIBUTING.rst
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,8 @@ General notes for implementing new bias-adjustment methods:

* Method are implemented as classes in ``xclim/sdba/adjustment.py``.
* If the algorithm gets complicated and would generate many dask tasks, it should be implemented as functions wrapped by :py:func:`~xclim.sdba.map_blocks` or :py:func:`~xclim.sdba.map_groups` in ``xclim/sdba/_adjustment.py``.
* xclim doesn't implement monolithic multi-parameter methods, but rather smaller modular functions to construct post-processing workflows.
* If you are working on numba-accelerated function that use ``@guvectorize``, consider disabling caching during the development phase and reactivating it once all changes are ready for review. This is done by commenting ``cache=True`` in the decorator.
* `xclim` doesn't implement monolithic multi-parameter methods, but rather smaller modular functions to construct post-processing workflows.
* If you are working on numba-accelerated function that uses ``@guvectorize``, consider disabling caching during the development phase and reactivating it once all changes are ready for review. This is done by commenting ``cache=True`` in the decorator.

Report Bugs
~~~~~~~~~~~
Expand All @@ -83,7 +83,7 @@ Look through the GitHub issues for bugs. Anything tagged with "bug" and "help wa
Write Documentation
~~~~~~~~~~~~~~~~~~~

xclim could always use more documentation, whether as part of the official xclim docs, in docstrings, or even on the web in blog posts, articles, and such.
xclim could always use more documentation, whether as part of the official `xclim` docs, in docstrings, or even on the web in blog posts, articles, and such.

To reference documents (article, presentation, thesis, etc) in the documentation or in a docstring, xclim uses `sphinxcontrib-bibtex`_.
Metadata of the documents is stored as BibTeX entries in the ``docs/references.bib`` file.
Expand Down Expand Up @@ -162,25 +162,26 @@ Ready to contribute? Here's how to set up `xclim` for local development.

Alternatively, one can use ``$ tox`` to run very specific testing configurations, as GitHub Workflows would do when a Pull Request is submitted and new commits are pushed::

$ tox -e py39 # run tests on Python 3.9
$ tox -e py310-upstream-doctest # run tests on Python 3.10, including doctests, with upstream dependencies
$ tox -e py311 -- -m "not slow # run tests on Python 3.11, excluding "slow" marked tests
$ tox -e py312-numba -- -m "not slow # run tests on Python 3.12, installing upstream `numba`, excluding "slow" marked tests
$ tox -e notebooks_doctests # run tests using the base Python on doctests and evaluate all notebooks
$ tox -e offline # run tests using the base Python, excluding tests requiring internet access
$ tox -e py39-coverage # run tests on Python 3.9, reporting code coverage
$ tox -e py310-upstream # run tests on Python 3.10, with upstream dependencies
$ tox -e py311-prefetch-offline -- -m "not slow" # run tests on Python 3.11, force download of testing, ensure tests are all offline, exclude "slow" marked tests
$ tox -e py312-lmoments -- -m "not slow" # run tests on Python 3.12, installing lmoments3, excluding "slow" marked tests
$ tox -e notebooks,doctests # run the notebook-based tests, then run the doctests

$ tox -m test # run all builds listed above
$ tox -m test # run the standard tests used in GitHub Workflows

.. warning::

Starting from `xclim` v0.46.0, when running tests with `tox`, any `pytest` markers passed to `pyXX` builds (e.g. `-m "not slow"`) must be passed to `tox` directly. This can be done as follows::

$ tox -e py310 -- -m "not slow"

The exceptions to this rule are:
`notebooks_doctests`: this configuration does not pass test markers to its `pytest` call.
`notebooks` and `doctests`: these configurations do not pass test markers to its `pytest` call.
`offline`: this configuration runs by default with the `-m "not requires_internet"` test marker. Be aware that running `tox` and manually setting a `pytest` marker will override this default.

.. note::

`xclim` tests are organized to support the `pytest-xdist`_ plugin for distributed testing across workers or CPUs.
In order to benefit from multiple processes, add the flag `--numprocesses=auto` or `-n auto` to your `pytest` calls.

Expand Down
4 changes: 2 additions & 2 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,8 @@ dependencies:
- sphinx-mdinclude
- sphinxcontrib-bibtex
- tokenize-rt
- tox >=4.15.1
# - tox-conda # Will be added when a tox@v4.0+ compatible plugin is released.
- tox >=4.16.0
# - tox-conda # Will be added when a tox@v4.0+ compatible plugin is released.
- vulture # ==2.11 # The conda-forge version is out of date.
- xdoctest >=1.1.5
- yamllint
Expand Down
3 changes: 1 addition & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -85,9 +85,8 @@ dev = [
"pytest-xdist[psutil] >=3.2",
"ruff >=0.4.10",
"tokenize-rt",
"tox >=4.15.1",
"tox >=4.16.0",
# "tox-conda", # Will be added when a tox@v4.0+ compatible plugin is released.
"tox-gh >=1.3.1",
"vulture ==2.11",
"xdoctest >=1.1.5",
"yamllint ==1.35.1"
Expand Down
22 changes: 13 additions & 9 deletions tox.ini
Original file line number Diff line number Diff line change
@@ -1,26 +1,29 @@
[tox]
min_version = 4.15.1
min_version = 4.16.0
env_list =
lint
docs
notebooks
doctests
py39-upstream-doctest
py310-doctest
py311-lmoments
py39
py310-extras-numpy
py311-extras-sbck
py312-extras-lmoments
labels =
test = py39, py310-upstream-doctest, py311, notebooks_doctests, offline-prefetch
static = lint
test = py39, py310-extras-numpy, py311-extras-sbck, py312-extras-lmoments
special = docs, notebooks, doctests
requires =
pip >= 24.0
flit >=3.9
opts = -vv

[gh]
python =
3.12 = doctests
3.11 = py311-coverage-lmoments-sbck, offline-coverage-prefetch
3.10 = py310-coverage-lmoments, notebooks
3.9 = py39-coverage-sbck, lint, docs
3.9 = py39-coverage
3.10 = py310-coverage-extras-numpy
3.11 = py311-coverage-extras-sbck
3.12 = py312-coverage-extras-lmoments

[testenv:lint]
description = Run code quality compliance tests under {basepython}
Expand Down Expand Up @@ -120,6 +123,7 @@ deps =
install_command = python -m pip install --no-user {opts} {packages}
download = True
commands_pre =
sbck: python -c 'print("The sbck dependency requires the \"libeigen3-dev\" package to be installed on the system.")'
sbck: python -m pip install sbck
python -m pip list
xclim show_version_info
Expand Down