diff --git a/.github/actions/install-parcels/action.yml b/.github/actions/install-parcels/action.yml deleted file mode 100644 index 66a3bbccc3..0000000000 --- a/.github/actions/install-parcels/action.yml +++ /dev/null @@ -1,33 +0,0 @@ -name: Setup Conda and install parcels -description: > - In-repo composite action to setup Conda and install parcels. Installation of parcels relies on - `setup.py` file being available in the root. For general setup of Anaconda environments, just use - the `conda-incubator/setup-miniconda` action (setting C variables as required). -inputs: - environment-file: - description: Conda environment file to use. - default: environment.yml - python-version: - description: Python version to use. - default: "" -runs: - using: "composite" - steps: - - name: Configure pagefile # Windows compatability fix as per PR #1279 - if: ${{ runner.os == 'Windows' }} - uses: al-cheb/configure-pagefile-action@v1.3 - with: - minimum-size: 8GB - - name: Install miniconda (${{ inputs.environment-file }}) - uses: conda-incubator/setup-miniconda@v3 - with: - environment-file: ${{ inputs.environment-file }} - python-version: ${{ inputs.python-version }} - channels: conda-forge - - name: MPI support - if: ${{ ! (runner.os == 'Windows') }} - run: conda install -c conda-forge mpich mpi4py - shell: bash -el {0} - - name: Install parcels - run: pip install . - shell: bash -el {0} diff --git a/.github/ci/min-core-deps.yml b/.github/ci/min-core-deps.yml index 47f942fcdf..55c3f47a64 100644 --- a/.github/ci/min-core-deps.yml +++ b/.github/ci/min-core-deps.yml @@ -14,8 +14,6 @@ dependencies: # (see https://github.com/Unidata/netcdf4-python/issues/1090) - netcdf4=1.6 - numpy=1.23 - - psutil=5.9 - - pymbolic=2022.1 - pytest=7.1 - scipy=1.9 - tqdm=4.64 diff --git a/.github/ci/recipe.yaml b/.github/ci/recipe.yaml index 70b1f4b3b8..5f4fa5ce9d 100644 --- a/.github/ci/recipe.yaml +++ b/.github/ci/recipe.yaml @@ -35,8 +35,6 @@ requirements: - netcdf4 >=1.1.9 - numpy >=1.11 - platformdirs - - psutil - - pymbolic - pytest - scipy >=0.16.0 - trajan diff --git a/.github/workflows/additional.yml b/.github/workflows/additional.yml index 8be353f762..790a7a5bb0 100644 --- a/.github/workflows/additional.yml +++ b/.github/workflows/additional.yml @@ -29,7 +29,7 @@ jobs: python .github/ci/min_deps_check.py .github/ci/min-core-deps.yml linkcheck: - name: Sphinx linkcheck + name: pixi run docs-linkcheck runs-on: "ubuntu-latest" defaults: run: @@ -37,8 +37,6 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Setup parcels - uses: ./.github/actions/install-parcels - with: - environment-file: environment.yml - - run: sphinx-build -b linkcheck docs/ _build/linkcheck + - uses: actions/checkout@v4 + - uses: prefix-dev/setup-pixi@v0.9.0 + - run: pixi run docs-linkcheck diff --git a/.github/workflows/cache-pixi-lock.yml b/.github/workflows/cache-pixi-lock.yml new file mode 100644 index 0000000000..48fa1b72b5 --- /dev/null +++ b/.github/workflows/cache-pixi-lock.yml @@ -0,0 +1,49 @@ +name: Generate and cache Pixi lockfile + +on: + workflow_call: + outputs: + cache-id: + description: "The lock file contents" + value: ${{ jobs.cache-pixi-lock.outputs.cache-id }} + +jobs: + cache-pixi-lock: + name: Generate output + runs-on: ubuntu-latest + outputs: + cache-id: ${{ steps.restore.outputs.cache-primary-key }} + steps: + - uses: actions/checkout@v5 + with: + fetch-depth: 0 + sparse-checkout: pixi.toml + - name: Get current date + id: date + run: echo "date=$(date +'%Y-%m-%d')" >> "$GITHUB_OUTPUT" + - uses: actions/cache/restore@v4 + id: restore + with: + path: | + pixi.lock + key: ${{ steps.date.outputs.date }}_${{hashFiles('pixi.toml')}} + - uses: prefix-dev/setup-pixi@v0.9.0 + if: ${{ !steps.restore.outputs.cache-hit }} + with: + pixi-version: v0.49.0 + run-install: false + - name: Run pixi lock + if: ${{ !steps.restore.outputs.cache-hit }} + run: pixi lock + - uses: actions/cache/save@v4 + if: ${{ !steps.restore.outputs.cache-hit }} + id: cache + with: + path: | + pixi.lock + key: ${{ steps.restore.outputs.cache-primary-key }} + - name: Upload pixi.lock + uses: actions/upload-artifact@v4 + with: + name: pixi-lock + path: pixi.lock diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8b1e05be70..27079c96d7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,29 +17,38 @@ defaults: shell: bash -el {0} jobs: + cache-pixi-lock: + uses: ./.github/workflows/cache-pixi-lock.yml unit-test: - name: "py${{ matrix.python-version }} | ${{ matrix.os }} | unit tests" + name: "Unit tests: ${{ matrix.os }} | pixi run -e ${{ matrix.pixi-environment }} tests" runs-on: ${{ matrix.os }}-latest + needs: [cache-pixi-lock] + env: + COVERAGE_REPORT: "${{ matrix.os }}_${{ matrix.pixi-environment }}_unit_test_report.html" strategy: fail-fast: false matrix: os: [ubuntu] #, mac, windows] # TODO v4: Re-enable windows and mac - python-version: ["3.12"] + pixi-environment: [test-latest] include: - os: ubuntu - python-version: "3.11" + pixi-environment: "test-py311" steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Conda and parcels - uses: ./.github/actions/install-parcels + - uses: actions/checkout@v4 + - name: Restore cached pixi lockfile + uses: actions/cache/restore@v4 + id: restore-pixi-lock + with: + path: | + pixi.lock + key: ${{ needs.cache-pixi-lock.outputs.cache-id }} + - uses: prefix-dev/setup-pixi@v0.9.0 with: - environment-file: environment.yml - python-version: ${{ matrix.python-version }} + cache: true + cache-write: ${{ github.event_name == 'push' && github.ref_name == 'v4-dev' }} # TODO: Update v4-dev to main when v4 is released - name: Unit test run: | - coverage run -m pytest -v -s --html=${{ matrix.os }}_${{ matrix.python-version }}_unit_test_report.html --self-contained-html - coverage xml + pixi run -e ${{ matrix.pixi-environment }} tests -v -s --cov=parcels --cov-report=xml --html="${{ env.COVERAGE_REPORT }}" --self-contained-html - name: Codecov uses: codecov/codecov-action@v5.3.1 env: @@ -50,13 +59,16 @@ jobs: if: ${{ always() }} # Always run this step, even if tests fail uses: actions/upload-artifact@v4 with: - name: Unittest report ${{ matrix.os }}-${{ matrix.python-version }} - path: ${{ matrix.os }}_${{ matrix.python-version }}_unit_test_report.html + name: Unittest report ${{ matrix.os }}-${{ matrix.pixi-environment }} + path: ${{ env.COVERAGE_REPORT }} integration-test: # TODO v4: Re-enable the workflow once development has stabilized and we want to run integration tests again if: false - name: "py${{ matrix.python-version }} | ${{ matrix.os }} | integration tests" + name: "Integration: ${{ matrix.os }} | pixi run -e ${{ matrix.pixi-environment }} tests-notebooks" runs-on: ${{ matrix.os }}-latest + needs: [cache-pixi-lock] + env: + COVERAGE_REPORT: "${{ matrix.os }}_${{ matrix.pixi-environment }}_integration_test_report.html" strategy: fail-fast: false matrix: @@ -66,22 +78,21 @@ jobs: - os: ubuntu python-version: "3.11" steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Conda and parcels - uses: ./.github/actions/install-parcels + - uses: actions/checkout@v4 + - name: Restore cached pixi lockfile + uses: actions/cache/restore@v4 + id: restore-pixi-lock with: - environment-file: environment.yml + path: | + pixi.lock + key: ${{ needs.cache-pixi-lock.outputs.cache-id }} + - uses: prefix-dev/setup-pixi@v0.9.0 + with: + cache: true + cache-write: ${{ github.event_name == 'push' && github.ref_name == 'v4-dev' }} # TODO: Update v4-dev to main when v4 is released - name: Integration test - # TODO v4: Re-enable `tutorial_periodic_boundaries` - # TODO v4: Re-enable `tutorial_timevaryingdepthdimensions` - # TODO v4: Re-enable `tutorial_particle_field_interaction` - # TODO v4: Re-enable `tutorial_croco_3D` - # TODO v4: Re-enable `tutorial_nemo_3D` (https://github.com/OceanParcels/Parcels/pull/1936#issuecomment-2717666705) - # TODO v4: Re-enable `tutorial_analyticaladvection` run: | - coverage run -m pytest -v -s --nbval-lax -k "not documentation and not tutorial_periodic_boundaries and not tutorial_timevaryingdepthdimensions and not tutorial_particle_field_interaction and not tutorial_croco_3D and not tutorial_nemo_3D and not tutorial_analyticaladvection" --html="${{ matrix.os }}_${{ matrix.python-version }}_integration_test_report.html" --self-contained-html docs/examples - coverage xml + pixi run test-notebooks -v -s --html="${{ env.COVERAGE_REPORT }}" --self-contained-html --cov=parcels --cov-report=xml - name: Codecov uses: codecov/codecov-action@v5.3.1 env: @@ -92,8 +103,8 @@ jobs: if: ${{ always() }} # Always run this step, even if tests fail uses: actions/upload-artifact@v4 with: - name: Integration test report ${{ matrix.os }}-${{ matrix.python-version }} - path: ${{ matrix.os }}_${{ matrix.python-version }}_integration_test_report.html + name: Integration test report ${{ matrix.os }}-${{ matrix.pixi-environment }} + path: ${{ env.COVERAGE_REPORT }} merge-test-artifacts: runs-on: ubuntu-latest needs: @@ -107,21 +118,28 @@ jobs: name: Testing reports pattern: "* report *" typechecking: - name: mypy + name: "TypeChecking: pixi run typing" # TODO v4: Enable typechecking again if: false runs-on: ubuntu-latest + needs: [cache-pixi-lock] steps: - name: Checkout uses: actions/checkout@v4 - - name: Setup Conda and parcels - uses: ./.github/actions/install-parcels + - name: Restore cached pixi lockfile + uses: actions/cache/restore@v4 + id: restore-pixi-lock + with: + path: | + pixi.lock + key: ${{ needs.cache-pixi-lock.outputs.cache-id }} + - uses: prefix-dev/setup-pixi@v0.9.0 with: - environment-file: environment.yml - - run: conda install lxml # dep for report generation + cache: true + cache-write: ${{ github.event_name == 'push' && github.ref_name == 'v4-dev' }} # TODO: Update v4-dev to main when v4 is released - name: Typechecking run: | - mypy --install-types --non-interactive parcels --html-report mypy-report + pixi run typing --non-interactive --html-report mypy-report - name: Upload test results if: ${{ always() }} # Upload even on mypy error uses: actions/upload-artifact@v4 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ee15347d81..cf189b2f21 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -43,3 +43,14 @@ repos: rev: 0.8.1 hooks: - id: nbstripout + - repo: https://github.com/ComPWA/taplo-pre-commit + rev: v0.9.3 + hooks: + - id: taplo-format + args: + [ + "--option", + "array_auto_collapse=false", + "--option", + "align_comments=false", + ] diff --git a/.readthedocs.yaml b/.readthedocs.yaml index eadff00604..5056d4a9bb 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -1,13 +1,15 @@ version: 2 build: - os: ubuntu-22.04 - tools: - python: mambaforge-22.9 + os: ubuntu-lts-latest jobs: - post_create_environment: - - pip install -e . + create_environment: + - asdf plugin add pixi + - asdf install pixi latest + - asdf global pixi latest + install: + - pixi install -e docs + build: + html: + - pixi run -e docs sphinx-build -T -b html docs $READTHEDOCS_OUTPUT/html sphinx: configuration: docs/conf.py - -conda: - environment: environment.yml diff --git a/README.md b/README.md index d93147a448..d6f6831c9c 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,6 @@ ## Parcels +[![Pixi Badge](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/prefix-dev/pixi/main/assets/badge/v0.json)](https://pixi.sh) [![Anaconda-release](https://anaconda.org/conda-forge/parcels/badges/version.svg)](https://anaconda.org/conda-forge/parcels/) [![Anaconda-date](https://anaconda.org/conda-forge/parcels/badges/latest_release_date.svg)](https://anaconda.org/conda-forge/parcels/) [![Zenodo](https://zenodo.org/badge/DOI/10.5281/zenodo.823561.svg)](https://doi.org/10.5281/zenodo.823561) diff --git a/environment.yml b/environment.yml deleted file mode 100644 index 3511e74a67..0000000000 --- a/environment.yml +++ /dev/null @@ -1,53 +0,0 @@ -name: parcels -channels: - - conda-forge -dependencies: #! Keep in sync with [tool.pixi.dependencies] in pyproject.toml - - python>=3.11, <3.13 - - ffmpeg>=3.2.3 - - jupyter - - matplotlib-base>=2.0.2 - - netcdf4>=1.1.9 - - numpy>=1.9.1 - - psutil - - pymbolic - - scipy>=0.16.0 - - tqdm - - xarray>=0.10.8 - - cftime>=1.3.1 - - dask>=2.0 - - scikit-learn - - zarr>=2.11.0,!=2.18.0,<3 - - uxarray>=2025.3.0 - - xgcm>=0.9.0 - - pooch - - cf_xarray - - # Notebooks - - trajan - - # Testing - - nbval - - pytest - - pytest-html - - coverage - - hypothesis - - # Typing - - mypy - - lxml # in CI - - types-tqdm - - types-psutil - - # Linting - - pre_commit - - # Docs - - ipython - - numpydoc!=1.9.0 - - nbsphinx - - sphinx - - pandoc - - pydata-sphinx-theme - - sphinx-autobuild - - myst-parser - - sphinxcontrib-mermaid diff --git a/pixi.toml b/pixi.toml new file mode 100644 index 0000000000..58f5a6a5ee --- /dev/null +++ b/pixi.toml @@ -0,0 +1,94 @@ +[workspace] +name = "Parcels" +preview = ["pixi-build"] +channels = ["conda-forge"] +platforms = ["win-64", "linux-64", "osx-64", "osx-arm64"] + +[package] +name = "parcels" +version = "dynamic" # dynamic versioning needs better support in pixi https://github.com/prefix-dev/pixi/issues/2923#issuecomment-2598460666 . Putting `version = "dynamic"` here for now until pixi recommends something else. + +[package.build] +backend = { name = "pixi-build-python", version = "==0.3.2" } + +[package.host-dependencies] +setuptools = "*" +setuptools_scm = "*" + +[environments] +test-latest = { features = ["test"], solve-group = "test" } +test-py311 = { features = ["test", "py311"] } +test-py312 = { features = ["test", "py312"] } +test-notebooks = { features = ["test", "notebooks"], solve-group = "test" } +docs = { features = ["docs"], solve-group = "docs" } +typing = { features = ["typing"], solve-group = "typing" } +pre-commit = { features = ["pre-commit"], no-default-feature = true } + +[dependencies] # keep section in sync with pyproject.toml dependencies +python = ">=3.11,<3.13" +netcdf4 = ">=1.1.9" +numpy = ">=1.9.1" +tqdm = "*" +xarray = ">=0.10.8" +uxarray = ">=2025.3.0" +dask = ">=2.0" +scikit-learn = "*" +zarr = ">=2.11.0,!=2.18.0,<3" +xgcm = ">=0.9.0" +cf_xarray = "*" +cftime = ">=1.3.1" +scipy = ">=0.16.0" #? Not sure if we rely on scipy internally anymore... +pooch = "*" + +[feature.py311.dependencies] +python = "3.11.*" + +[feature.py312.dependencies] +python = "3.12.*" + +[feature.test.dependencies] +nbval = "*" +pytest = "*" +hypothesis = "*" +pytest-html = "*" +pytest-cov = "*" + +[feature.test.tasks] +tests = "pytest" +tests-notebooks = "pytest --nbval-lax -k 'argo' docs/examples" + + +[feature.notebooks.dependencies] +jupyter = "*" +trajan = "*" +matplotlib-base = ">=2.0.2" + +[feature.docs.dependencies] +numpydoc = "!=1.9.0" +nbsphinx = "*" +ipython = "*" +sphinx = "*" +pandoc = "*" +pydata-sphinx-theme = "*" +sphinx-autobuild = "*" +myst-parser = "*" +sphinxcontrib-mermaid = "*" + +[feature.docs.tasks] +docs = "sphinx-build docs docs/_build" +docs-watch = 'sphinx-autobuild --ignore "*.zip" docs docs/_build' +docs-linkcheck = "sphinx-build -b linkcheck docs/ docs/_build/linkcheck" + +[feature.pre-commit.dependencies] +pre_commit = "*" + +[feature.pre-commit.tasks] +lint = "pre-commit run --all-files" + +[feature.typing.dependencies] +mypy = "*" +lxml = "*" # in CI +types-tqdm = "*" + +[feature.typing.tasks] +typing = "mypy parcels --install-types" diff --git a/pyproject.toml b/pyproject.toml index c4fefdf097..06436fb899 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,9 +7,9 @@ name = "parcels" description = "Framework for Lagrangian tracking of virtual ocean particles in the petascale age." readme = "README.md" dynamic = ["version"] -authors = [{name = "oceanparcels.org team"}] +authors = [{ name = "Parcels team" }] requires-python = ">=3.11,<3.13" -license = {file = "LICENSE.md"} +license = { file = "LICENSE.md" } classifiers = [ "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3.11", @@ -23,11 +23,9 @@ dependencies = [ "cftime", "numpy >=1.11", "dask", - "psutil", "netCDF4 >=1.1.9", "zarr >=2.11.0,!=2.18.0,<3", "tqdm", - "pymbolic", "pytest", "scipy >=0.16.0", "xarray >=0.10.8", @@ -42,75 +40,6 @@ homepage = "https://oceanparcels.org/" repository = "https://github.com/OceanParcels/parcels" Tracker = "https://github.com/OceanParcels/parcels/issues" -[tool.pixi.project] -channels = ["conda-forge"] -name = "parcels-dev" -platforms = ["win-64", "linux-64", "osx-64", "osx-arm64"] - -[tool.pixi.tasks] -tests = "pytest" -tests-notebooks = "pytest -v -s --nbval-lax -k 'not documentation and not tutorial_periodic_boundaries and not tutorial_timevaryingdepthdimensions and not tutorial_particle_field_interaction and not tutorial_croco_3D and not tutorial_nemo_3D and not tutorial_analyticaladvection' docs/examples" # TODO v4: Mirror ci.yml for notebooks being run -coverage = "coverage run -m pytest && coverage html" -typing = "mypy parcels" -pre-commit = "pre-commit run --all-files" -docs = 'sphinx-autobuild --ignore "*.zip" docs docs/_build' - -# TODO v4: Remove v4 specific tasks -tests-short = "pytest tests/test_advection.py" -tests-exclude-v4 = "pytest -m 'not v4alpha and not v4future and not v4remove'" - -[tool.pixi.dependencies] #! Keep in sync with environment.yml -python = ">=3.11" -ffmpeg = ">=3.2.3" -jupyter = "*" -matplotlib-base = ">=2.0.2" -netcdf4 = ">=1.1.9" -numpy = ">=1.9.1" -psutil = "*" -pymbolic = "*" -scipy = ">=0.16.0" -tqdm = "*" -xarray = ">=0.10.8" -uxarray = ">=2025.3.0" -cftime = ">=1.3.1" -dask = ">=2.0" -scikit-learn = "*" -zarr = ">=2.11.0,!=2.18.0,<3" -xgcm = ">=0.9.0" - -# Notebooks -trajan = "*" - -# Testing -nbval = "*" -pytest = "*" -hypothesis = "*" -pytest-html = "*" -coverage = "*" - -# Typing -mypy = "*" -lxml = "*" # in CI -types-tqdm = "*" -types-psutil = "*" - -# Linting -pre_commit = "*" - -# Docs -ipython = "*" -numpydoc = "!=1.9.0" -nbsphinx = "*" -sphinx = "*" -pandoc = "*" -pydata-sphinx-theme = "*" -sphinx-autobuild = "*" -myst-parser = "*" -sphinxcontrib-mermaid = "*" -cf_xarray = "*" - -[tool.pixi.pypi-dependencies] -parcels = { path = ".", editable = true } [tool.setuptools] packages = ["parcels"] @@ -143,58 +72,58 @@ line-length = 120 [tool.ruff.lint] select = [ - "D", # pydocstyle - "E", # Error - "F", # pyflakes - "I", # isort - "B", # Bugbear - "UP", # pyupgrade - "LOG", # logging - "ICN", # import conventions - "G", # logging-format - "RUF", # ruff - "ISC001", # single-line-implicit-string-concatenation - "TID", # flake8-tidy-imports - "T100", # Checks for the presence of debugger calls and imports + "D", # pydocstyle + "E", # Error + "F", # pyflakes + "I", # isort + "B", # Bugbear + "UP", # pyupgrade + "LOG", # logging + "ICN", # import conventions + "G", # logging-format + "RUF", # ruff + "ISC001", # single-line-implicit-string-concatenation + "TID", # flake8-tidy-imports + "T100", # Checks for the presence of debugger calls and imports ] ignore = [ - # line too long (82 > 79 characters) - "E501", - # ‘from module import *’ used; unable to detect undefined names - "F403", - # Mutable class attributes should be annotated with `typing.ClassVar` - "RUF012", - # Consider `(slice(2), *block)` instead of concatenation - "RUF005", - # Prefer `next(iter(variable.items()))` over single element slice - "RUF015", - # Use `X | Y` in `isinstance` (see https://github.com/home-assistant/core/issues/123850) - "UP038", - "RUF046", # Value being cast to `int` is already an integer - - # TODO: ignore for now (requires more work). Remove ignore once fixed - # Missing docstring in public module - "D100", - # Missing docstring in public class - "D101", - # Missing docstring in public method - "D102", - # Missing docstring in public function - "D103", - # Missing docstring in public package - "D104", - # Missing docstring in magic method - "D105", - # Missing docstring in __init__ - "D400", - # First line should be in imperative mood (requires writing of summaries) - "D401", - # First word of the docstring should not be `This` - "D404", - # 1 blank line required between summary line and description (requires writing of summaries) - "D205", - "F811", + # line too long (82 > 79 characters) + "E501", + # ‘from module import *’ used; unable to detect undefined names + "F403", + # Mutable class attributes should be annotated with `typing.ClassVar` + "RUF012", + # Consider `(slice(2), *block)` instead of concatenation + "RUF005", + # Prefer `next(iter(variable.items()))` over single element slice + "RUF015", + # Use `X | Y` in `isinstance` (see https://github.com/home-assistant/core/issues/123850) + "UP038", + "RUF046", # Value being cast to `int` is already an integer + + # TODO: ignore for now (requires more work). Remove ignore once fixed + # Missing docstring in public module + "D100", + # Missing docstring in public class + "D101", + # Missing docstring in public method + "D102", + # Missing docstring in public function + "D103", + # Missing docstring in public package + "D104", + # Missing docstring in magic method + "D105", + # Missing docstring in __init__ + "D400", + # First line should be in imperative mood (requires writing of summaries) + "D401", + # First word of the docstring should not be `This` + "D404", + # 1 blank line required between summary line and description (requires writing of summaries) + "D205", + "F811", ] [tool.ruff.lint.pydocstyle] @@ -214,14 +143,14 @@ files = [ [[tool.mypy.overrides]] module = [ - "parcels._version_setup", - "mpi4py", - "scipy.spatial", - "sklearn.cluster", - "zarr", - "cftime", - "pykdtree.kdtree", - "netCDF4", - "pooch", + "parcels._version_setup", + "mpi4py", + "scipy.spatial", + "sklearn.cluster", + "zarr", + "cftime", + "pykdtree.kdtree", + "netCDF4", + "pooch", ] ignore_missing_imports = true