diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..7bca37f8f --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,15 @@ +version: 2 + +updates: + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + assignees: + - "xylar" + - "altheaden" + reviewers: + - "xylar" + - "altheaden" + diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 000000000..9732c9516 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,21 @@ + + + +Checklist +* [ ] User's Guide has been updated +* [ ] If this PR adds a new analysis task, it has also been added to the user's guide +* [ ] Developer's Guide has been updated +* [ ] API documentation in the Developer's Guide (`api.rst`) has any new or modified class, method and/or functions listed +* [ ] Documentation has been [built locally](https://mpas-dev.github.io/MPAS-Analysis/latest/users_guide/quick_start.html#generating-documentation) and changes look as expected +* [ ] `Testing` comment in the PR documents testing used to verify the changes + + + diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml new file mode 100644 index 000000000..687632e02 --- /dev/null +++ b/.github/workflows/build_workflow.yml @@ -0,0 +1,92 @@ +name: CI/CD Build Workflow + +on: + push: + branches: + - main + - develop + + pull_request: + branches: + - main + - develop + + workflow_dispatch: + +env: + CANCEL_OTHERS: false + PATHS_IGNORE: '["**/README.md", "**/docs/**"]' + +jobs: + build: + name: test mpas_analysis - python ${{ matrix.python-version }} + runs-on: ubuntu-latest + timeout-minutes: 20 + defaults: + run: + shell: bash -l {0} + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12"] + steps: + - id: skip_check + uses: fkirc/skip-duplicate-actions@master + with: + cancel_others: ${{ env.CANCEL_OTHERS }} + paths_ignore: ${{ env.PATHS_IGNORE }} + + - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} + uses: actions/checkout@v4 + + - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} + name: Cache Conda + uses: actions/cache@v4 + env: + # Increase this value to reset cache if conda-dev-spec.template has not changed in the workflow + CACHE_NUMBER: 0 + with: + path: ~/conda_pkgs_dir + key: + ${{ runner.os }}-${{ matrix.python-version }}-conda-${{ env.CACHE_NUMBER }}-${{ + hashFiles('dev-spec.txt,setup.py') }} + + - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} + name: Set up Conda Environment + uses: conda-incubator/setup-miniconda@v3 + with: + activate-environment: "mpas_analysis_ci" + miniforge-version: latest + channels: conda-forge + channel-priority: strict + auto-update-conda: true + python-version: ${{ matrix.python-version }} + + - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} + name: Install mpas_analysis + run: | + conda create -n mpas_analysis_dev --file dev-spec.txt \ + python=${{ matrix.python-version }} + conda activate mpas_analysis_dev + python -m pip install --no-deps --no-build-isolation -vv -e . + + - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} + name: Run Tests + env: + CHECK_IMAGES: False + run: | + set -e + conda activate mpas_analysis_dev + pip check + pytest --pyargs mpas_analysis + mpas_analysis --help + download_analysis_data --help + + - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} + name: Build Sphinx Docs + run: | + conda activate mpas_analysis_dev + # sphinx-multiversion expects at least a "main" branch + git branch main || echo "branch main already exists." + cd docs + sphinx-multiversion . _build/html + diff --git a/.github/workflows/docs_workflow.yml b/.github/workflows/docs_workflow.yml new file mode 100644 index 000000000..d11d3f4b1 --- /dev/null +++ b/.github/workflows/docs_workflow.yml @@ -0,0 +1,96 @@ +name: CI/CD Release Workflow + +on: + push: + branches: + - main + - develop + + release: + types: [published] + +jobs: + publish-docs: + runs-on: ubuntu-latest + defaults: + run: + shell: bash -l {0} + timeout-minutes: 20 + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + fetch-depth: 0 + + - name: Cache Conda + uses: actions/cache@v4 + env: + # Increase this value to reset cache if deploy/conda-dev-spec.template has not changed in the workflow + CACHE_NUMBER: 0 + with: + path: ~/conda_pkgs_dir + key: ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-${{ + hashFiles('dev-spec.txt') }} + + - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} + name: Set up Conda Environment + uses: conda-incubator/setup-miniconda@v3 + with: + activate-environment: "mpas_analysis_ci" + miniforge-version: latest + channels: conda-forge + channel-priority: strict + auto-update-conda: true + python-version: ${{ matrix.python-version }} + + - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} + name: Install mpas_analysis + run: | + git config --global url."https://github.com/".insteadOf "git@github.com:" + conda create -n mpas_analysis_dev --file dev-spec.txt \ + python=${{ matrix.python-version }} + conda activate mpas_analysis_dev + python -m pip install -vv --no-deps --no-build-isolation -e . + + - name: Build Sphinx Docs + run: | + set -e + conda activate mpas_analysis_dev + pip check + mpas_analysis sync diags --help + cd docs + sphinx-multiversion . _build/html + - name: Copy Docs and Commit + run: | + set -e + conda activate mpas_analysis_dev + pip check + mpas_analysis sync diags --help + cd docs + # gh-pages branch must already exist + git clone https://github.com/MPAS-Dev/MPAS-Analysis.git --branch gh-pages --single-branch gh-pages + # Make sure we're in the gh-pages directory. + cd gh-pages + # Create `.nojekyll` (if it doesn't already exist) for proper GH Pages configuration. + touch .nojekyll + # Add `index.html` to point to the `develop` branch automatically. + printf '' > index.html + # Only replace docs in a directory with the destination branch name with latest changes. Docs for + # releases should be untouched. + rm -rf ${{ github.head_ref || github.ref_name }} + # don't clobber existing release versions (in case we retroactively fixed them) + cp -r -n ../_build/html/* . + # Configure git using GitHub Actions credentials. + git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" + git config --local user.name "github-actions[bot]" + # The second command will fail if no changes were present, so we ignore it + git add . + git commit -m "Update documentation" -a || true + - name: Push Changes + uses: ad-m/github-push-action@master + with: + branch: gh-pages + directory: docs/gh-pages + github_token: ${{ secrets.GITHUB_TOKEN }} + force: true + diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index cd8a76fbc..000000000 --- a/.travis.yml +++ /dev/null @@ -1,48 +0,0 @@ -# Based on https://github.com/NOAA-ORR-ERD/gridded/blob/master/.travis.yml -language: minimal - -sudo: false - -notifications: - email: false - -matrix: - fast_finish: true - include: - - name: "python-3.6" - python: 3.6 - env: CONDA_ENV=py36 - - name: "python-3.7" - python: 3.7 - env: CONDA_ENV=py37 - - name: "python-3.8" - python: 3.8 - env: CONDA_ENV=py38 - - name: "python-3.7-xarray-main" - python: 3.7 - env: CONDA_ENV=py37-xarray-main - -env: - global: - secure: "RcARu+7YXJCWMDkwaP151JUuJW0aei0DG8ES6xwjlPjCy0wht1aDyZlcS4aE+hoQOZL/dGM4ppKLoxy7PlTMg3bocn4782VbnGT1p94FieuNVj+irs54UrTBouKbDmJQtgGPNV8WnXt3suKlcb62304eJs5Ryfl2ZOIpS+yBFcfUgTFn3wBGba4WO+wzx2mG+e5E6CIOLkoFlLYaJJ+2vShXHaCNYIgq4DrLYR1U/Jq6HAli6x3iETPqL0ZPdLAtB96lYYssV+4VZjyMzAkxNjj3RhRAIFH8K/Fe9VWke1MZqwnz0Bu2Z8GyhwFkdc8u+epUDCUIgGxGps57RgiBi2dmLW0RyNdQ2pG5WgT7M5/dkx4STq2ofv/YAKniG7LyM30X/H7G9/RlyIQ354P54265zdul1sasREeHKhNe82QsX2tos/jo7/E2f0uYtrQ1btSo1jItyTukOHER11W6yqYTSc4yqFJLCXxWFbEuzSyaIfVp7AJFHGjYU3A/ZdMG6Y+cs9Q/xapwx2Zoon67GGDJ7OItQeScKcF7pIYbo1uWWYHdQ9GBedyqWgqz75H6wCXbxo2jGUze/pP5I/2plBMeDiY4eSBulbbpupRXpj1uNOAZv+PiP1UDjwMQCzz/NwBLCcG+jxEDwOHnzoPwPnZeOBNPHXkEAZxeW9z9TjA=" - -before_install: - - | - wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh - bash miniconda.sh -b -p $HOME/miniconda - source $HOME/miniconda/etc/profile.d/conda.sh - conda activate base - conda config --set always_yes yes --set changeps1 no --set show_channel_urls true - conda update conda - conda config --add channels conda-forge --force - conda config --set channel_priority strict - conda env create --file ci/environment-$CONDA_ENV.yml - source activate TEST - # conda info --all - # conda list - -install: - - ./ci/install.bash - -script: - - ./ci/test_and_publish_docs.bash \ No newline at end of file diff --git a/README.md b/README.md index 90dd17edf..7c7ce60de 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,4 @@ # MPAS-Analysis -[![Build Status](https://dev.azure.com/MPAS-Dev/MPAS-Analysis%20testing/_apis/build/status/MPAS-Dev.MPAS-Analysis?branchName=develop)](https://dev.azure.com/MPAS-Dev/MPAS-Analysis%20testing/_build/latest?definitionId=2&branchName=develop) Analysis for simulations produced with Model for Prediction Across Scales (MPAS) components and the Energy Exascale Earth System Model (E3SM), which @@ -28,7 +27,7 @@ used those components. ## Documentation -[https://mpas-dev.github.io/MPAS-Analysis/latest/](https://mpas-dev.github.io/MPAS-Analysis/latest/) +[https://mpas-dev.github.io/MPAS-Analysis/develop/](https://mpas-dev.github.io/MPAS-Analysis/develop/) ## Installation for users @@ -57,7 +56,7 @@ conda config --add channels conda-forge conda config --set channel_priority strict conda create -y -n mpas_dev --file dev-spec.txt conda activate mpas_dev -python -m pip install -e . +python -m pip install --no-deps --no-build-isolation -e . ``` If you are developing another conda package at the same time (this is common @@ -69,9 +68,9 @@ conda create -y -n mpas_dev --file tools/MPAS-Tools/conda_package/dev-spec.txt \ --file analysis/MPAS-Analysis/dev-spec.txt conda activate mpas_dev cd tools/MPAS-Tools/conda_package -python -m pip install -e . +python -m pip install --no-deps --no-build-isolation -e . cd ../../../analysis/MPAS-Analysis -python -m pip install -e . +python -m pip install --no-deps --no-build-isolation -e . ``` Obviously, the paths to the repos may be different in your local clones. With the `mpas_dev` environment as defined above, you can make changes to both @@ -290,7 +289,7 @@ to be generated and is set up properly. ## Generating Documentation -Create a development environment as described above in "Installation for +Create a development environment as described above in "Installation for developers". Then run: To generate the `sphinx` documentation, run: ``` @@ -301,4 +300,4 @@ make html The results can be viewed in your web browser by opening: ``` _build/html/index.html -``` \ No newline at end of file +``` diff --git a/azure-pipelines.yml b/azure-pipelines.yml deleted file mode 100644 index 38f423a2b..000000000 --- a/azure-pipelines.yml +++ /dev/null @@ -1,190 +0,0 @@ - -trigger: - branches: - include: - - main - - develop - tags: - include: - - '*' - -pr: - branches: - include: - - main - - develop - -jobs: -- job: - displayName: linux - pool: - vmImage: 'ubuntu-latest' - strategy: - matrix: - Python310: - python.version: '3.10' - Python311: - python.version: '3.11' - Python312: - python.version: '3.12' - - steps: - - bash: echo "##vso[task.prependpath]$CONDA/bin" - displayName: Add conda to PATH - - - bash: | - set -e - eval "$(conda shell.bash hook)" - conda config --add channels conda-forge - conda config --set channel_priority strict - conda create --yes --name build_env conda-build - displayName: Create build environment - - - bash: | - set -e - eval "$(conda shell.bash hook)" - conda activate build_env - conda build -m "ci/python${PYTHON_VERSION}.yaml" "ci/recipe" - displayName: Build MPAS-Analysis - - - bash: | - set -e - eval "$(conda shell.bash hook)" - conda activate - conda create --yes --quiet --name mpas \ - -c ${CONDA_PREFIX}/envs/build_env/conda-bld/ \ - python=$PYTHON_VERSION mpas-analysis pytest - displayName: Create Anaconda mpas environment - - - bash: | - set -e - eval "$(conda shell.bash hook)" - conda activate mpas - pytest --pyargs mpas_analysis - displayName: pytest - - - bash: | - set -e - eval "$(conda shell.bash hook)" - conda activate - conda create --yes --quiet --name docs \ - -c ${CONDA_PREFIX}/envs/build_env/conda-bld/ \ - python=$PYTHON_VERSION mpas-analysis sphinx mock sphinx_rtd_theme \ - tabulate "m2r2>=0.3.3" "mistune<2" - condition: eq(variables['python.version'], '3.10') - displayName: Create Anaconda docs environment - - - bash: | - set -e - eval "$(conda shell.bash hook)" - conda activate docs - - echo "source branch: $(Build.SourceBranch)" - echo "repository: $(Build.Repository.Name)" - - tag=$(git describe --tags $(git rev-list --tags --max-count=1)) - echo "tag: $tag" - - version=`python -c "import mpas_analysis; print(mpas_analysis.__version__)"` - echo "version: $version" - - REPO_PATH=$PWD - - if [[ "$(Build.SourceBranch)" == "refs/heads/develop" ]]; then - export DOCS_VERSION="latest" - deploy=True - elif [[ "$(Build.SourceBranch)" == "refs/heads/main" ]]; then - export DOCS_VERSION="stable" - deploy=True - elif [[ "$(Build.SourceBranch)" == refs/tags/* ]]; then - # this is a tag build - export DOCS_VERSION="$tag" - deploy=True - else - DOCS_VERSION="$version" - export DOCS_VERSION - deploy=False - fi - echo "Docs version: $DOCS_VERSION" - echo "Deploy to gh-pages? $deploy" - cd docs || exit 1 - make html - - cd "$REPO_PATH" || exit 1 - - if [[ "$deploy" == "True" ]]; then - PUBLICATION_BRANCH=gh-pages - # Checkout the branch - pushd $HOME || exit 1 - git clone --branch=$PUBLICATION_BRANCH https://$(GitHubToken)@github.com/$(Build.Repository.Name) publish - cd publish || exit 1 - - # Update pages - if [[ -d "$DOCS_VERSION" ]]; then - git rm -rf "$DOCS_VERSION" > /dev/null - fi - mkdir "$DOCS_VERSION" - cp -r "$REPO_PATH"/docs/_build/html/* "$DOCS_VERSION" - # Commit and push latest version - git add . - if git diff-index --quiet HEAD; then - echo "No changes in the docs." - else - git config --local user.name "Azure Pipelines" - git config --local user.email "azuredevops@microsoft.com" - git commit -m "[skip ci] Update $DOCS_VERSION" - git push -fq origin $PUBLICATION_BRANCH - fi - popd || exit 1 - fi - condition: eq(variables['python.version'], '3.10') - displayName: build and deploy docs - -- job: - displayName: xarray-main - pool: - vmImage: 'ubuntu-latest' - strategy: - matrix: - Python310: - python.version: '3.11' - - steps: - - bash: echo "##vso[task.prependpath]$CONDA/bin" - displayName: Add conda to PATH - - - bash: | - set -e - eval "$(conda shell.bash hook)" - conda config --add channels conda-forge - conda config --set channel_priority strict - conda create --yes --name build_env conda-build - displayName: Create build environment - - - bash: | - set -e - eval "$(conda shell.bash hook)" - conda activate build_env - # workaround based on recent failures - rm /usr/share/miniconda/pkgs/cache/*.json - conda build -m "ci/python${PYTHON_VERSION}.yaml" "ci/recipe" - displayName: Build MPAS-Analysis - - - bash: | - set -e - eval "$(conda shell.bash hook)" - conda activate - conda create --yes --quiet --name mpas \ - -c ${CONDA_PREFIX}/envs/build_env/conda-bld/ \ - python=$PYTHON_VERSION mpas-analysis pytest - conda activate mpas - pip install git+https://github.com/pydata/xarray.git - - displayName: Create Anaconda mpas environment - - - bash: | - set -e - eval "$(conda shell.bash hook)" - conda activate mpas - pytest --pyargs mpas_analysis - displayName: pytest diff --git a/ci/recipe/meta.yaml b/ci/recipe/meta.yaml index ce5acacc6..8a6ab9dad 100644 --- a/ci/recipe/meta.yaml +++ b/ci/recipe/meta.yaml @@ -1,5 +1,5 @@ {% set name = "MPAS-Analysis" %} -{% set version = "1.11.0rc1" %} +{% set version = "1.12.0" %} package: name: {{ name|lower }} @@ -10,7 +10,7 @@ source: build: number: 0 - script: {{ PYTHON }} -m pip install . --no-deps -vv + script: {{ PYTHON }} -m pip install --no-deps --no-build-isolation -vv . noarch: python entry_points: - mpas_analysis = mpas_analysis.__main__:main @@ -18,11 +18,10 @@ build: requirements: host: - - python >=3.8 + - python >=3.9 - pip run: - - python >=3.8 - - bottleneck + - python >=3.9 - cartopy >=0.18.0 - cartopy_offlinedata - cmocean @@ -30,15 +29,15 @@ requirements: - esmf >=8.4.2,<8.7.0 - esmf=*=mpi_mpich_* - f90nml - - geometric_features >=1.2.0 + - geometric_features >=1.4.0 - gsw - lxml - mache >=1.11.0 - - matplotlib-base >=3.6.0,!=3.7.2 - - mpas_tools >=0.30.0 - - nco >=4.8.1 + - matplotlib-base >=3.9.0 + - mpas_tools >=0.34.0 + - nco >=4.8.1,!=5.2.6 - netcdf4 - - numpy + - numpy >=2.0,<3.0 - pandas - pillow >=10.0.0,<11.0.0 - progressbar2 diff --git a/dev-spec.txt b/dev-spec.txt index 4beb253ca..b8e2c43f1 100644 --- a/dev-spec.txt +++ b/dev-spec.txt @@ -2,8 +2,7 @@ # $ conda create --name --file # Base -python>=3.8 -bottleneck +python>=3.9 cartopy >=0.18.0 cartopy_offlinedata cmocean @@ -11,17 +10,15 @@ dask esmf >=8.4.2,<8.7.0 esmf=*=mpi_mpich_* f90nml -geometric_features>=1.2.0 +geometric_features>=1.4.0 gsw lxml mache >=1.11.0 -# 3.7.2 contains a bug with tight layouts and insets -# https://github.com/matplotlib/matplotlib/pull/26291 -matplotlib-base>=3.6.0,!=3.7.2 -mpas_tools>=0.30.0 -nco>=4.8.1 +matplotlib-base>=3.9.0 +mpas_tools>=0.34.1 +nco>=4.8.1,!=5.2.6 netcdf4 -numpy +numpy>=2.0,<3.0 pandas pillow >=10.0.0,<11.0.0 progressbar2 @@ -46,4 +43,5 @@ m2r2>=0.3.3 mistune<2 sphinx sphinx_rtd_theme +sphinx-multiversion tabulate diff --git a/docs/_static/style.css b/docs/_static/style.css new file mode 100644 index 000000000..6cbfde333 --- /dev/null +++ b/docs/_static/style.css @@ -0,0 +1,4 @@ +.wy-nav-content { + max-width: 1200px !important; +} + diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html new file mode 100644 index 000000000..efc29758f --- /dev/null +++ b/docs/_templates/layout.html @@ -0,0 +1,5 @@ +{% extends "!layout.html" %} +{% block extrahead %} + +{% endblock %} + diff --git a/docs/_templates/versions.html b/docs/_templates/versions.html new file mode 100644 index 000000000..625a9a384 --- /dev/null +++ b/docs/_templates/versions.html @@ -0,0 +1,28 @@ +{%- if current_version %} +
+ + Other Versions + v: {{ current_version.name }} + + +
+ {%- if versions.tags %} +
+
Tags
+ {%- for item in versions.tags %} +
{{ item.name }}
+ {%- endfor %} +
+ {%- endif %} + {%- if versions.branches %} +
+
Branches
+ {%- for item in versions.branches %} +
{{ item.name }}
+ {%- endfor %} +
+ {%- endif %} +
+
+{%- endif %} + diff --git a/docs/conf.py b/docs/conf.py index 5f79cacc2..b72c49426 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -35,12 +35,16 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ['sphinx.ext.autodoc', - 'sphinx.ext.autosummary', - 'sphinx.ext.intersphinx', - 'sphinx.ext.mathjax', - 'sphinx.ext.viewcode', - 'sphinx.ext.napoleon'] +extensions = [ + 'sphinx_rtd_theme', + 'sphinx_multiversion', + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.intersphinx', + 'sphinx.ext.mathjax', + 'sphinx.ext.viewcode', + 'sphinx.ext.napoleon' +] autosummary_generate = True @@ -113,14 +117,7 @@ # a list of builtin themes. # -# on_rtd is whether we are on readthedocs.org, this line of code grabbed from -# docs.readthedocs.org -on_rtd = os.environ.get('READTHEDOCS', None) == 'True' - -if not on_rtd: # only import and set the theme if we're building docs locally - import sphinx_rtd_theme - html_theme = 'sphinx_rtd_theme' - html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] +html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -128,12 +125,6 @@ # # html_theme_options = {} -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -# html_static_path = ['_static'] - - # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. @@ -225,3 +216,20 @@ outFile.write(output) github_doc_root = 'https://github.com/rtfd/recommonmark/tree/master/doc/' + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +html_sidebars = { + "**": [ + "versions.html", + ], +} + +# -- Options sphinx-multiversion ------------------------------------------- +# Include tags like "tags/1.0.0" -- 1.7.2 doesn't build +smv_tag_whitelist = r'^(?!1.7.2)\d+\.\d+.\d+$' +smv_branch_whitelist = r'^(develop|main)$' +smv_remote_whitelist = 'origin' diff --git a/docs/developers_guide/api.rst b/docs/developers_guide/api.rst index b619d9016..58665f168 100644 --- a/docs/developers_guide/api.rst +++ b/docs/developers_guide/api.rst @@ -68,9 +68,11 @@ Ocean tasks ClimatologyMapMLDMinMax ClimatologyMapSSH ClimatologyMapEKE + ClimatologyMapFluxes ClimatologyMapOHCAnomaly ClimatologyMapAntarcticMelt ClimatologyMapSose + ClimatologyMapVel ClimatologyMapArgoTemperature ClimatologyMapArgoSalinity ClimatologyMapWaves diff --git a/docs/index.rst b/docs/index.rst index 4f72191c6..8f65f7b00 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -46,9 +46,3 @@ used those components. :maxdepth: 1 authors - -.. toctree:: - :caption: Versions - :maxdepth: 1 - - versions diff --git a/docs/tutorials/dev_add_task.rst b/docs/tutorials/dev_add_task.rst index 8cb0c8e5c..31135282f 100644 --- a/docs/tutorials/dev_add_task.rst +++ b/docs/tutorials/dev_add_task.rst @@ -526,7 +526,7 @@ I'll create or recreate my ``mpas_dev`` environment as in .. code-block:: bash conda activate mpas_dev - python -m pip install -e . + python -m pip install --no-deps --no-build-isolation -e . 4.1 ``ClimatologyMapBSF`` class ------------------------------- diff --git a/docs/tutorials/dev_getting_started.rst b/docs/tutorials/dev_getting_started.rst index abee8f44c..69cfcae26 100644 --- a/docs/tutorials/dev_getting_started.rst +++ b/docs/tutorials/dev_getting_started.rst @@ -268,7 +268,7 @@ mode by running: .. code-block:: bash $ conda activate mpas_dev - $ python -m pip install -e . + $ python -m pip install --no-deps --no-build-isolation -e . In this mode, any edits you make to the code in the worktree will be available in the conda environment. If you run ``mpas_analysis`` on the command line, @@ -281,7 +281,7 @@ it will know about the changes. .. code-block:: bash - python -m pip install -e . + python -m pip install --no-deps --no-build-isolation -e . .. _tutorial_dev_get_started_activ_env: @@ -317,7 +317,7 @@ You can just reinstall ``mpas_analysis`` itself by rerunning .. code-block:: bash - python -m pip install -e . + python -m pip install --no-deps --no-build-isolation -e . in the new worktree. If you forget this step, you will find that changes you make in the worktree don't affect the ``mpas_dev`` conda environment you are diff --git a/docs/tutorials/getting_started.rst b/docs/tutorials/getting_started.rst index 1a0036a11..79db487c1 100644 --- a/docs/tutorials/getting_started.rst +++ b/docs/tutorials/getting_started.rst @@ -77,7 +77,7 @@ install the latest version of the ``mpas-analysis`` package into it: .. code-block:: bash - $ conda create -n mpas-analysis python=3.8 mpas-analysis + $ conda create -n mpas-analysis python=3.12 mpas-analysis 1.3 Activating the environment ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/docs/users_guide/analysis_tasks.rst b/docs/users_guide/analysis_tasks.rst index 313adda83..60e1bfb2a 100644 --- a/docs/users_guide/analysis_tasks.rst +++ b/docs/users_guide/analysis_tasks.rst @@ -4,6 +4,8 @@ Analysis Tasks .. toctree:: :maxdepth: 1 + tasks/climatologyMapMassFluxes + tasks/climatologyMapHeatFluxes tasks/climatologyMapMLD tasks/climatologyMapMLDMinMax tasks/climatologyMapSST @@ -12,6 +14,7 @@ Analysis Tasks tasks/climatologyMapEKE tasks/climatologyMapOHCAnomaly tasks/climatologyMapSose + tasks/climatologyMapVel tasks/climatologyMapBGC tasks/climatologyMapArgoTemperature tasks/climatologyMapArgoSalinity diff --git a/docs/users_guide/config/regions.rst b/docs/users_guide/config/regions.rst index fb83a7c7a..4303f26a5 100644 --- a/docs/users_guide/config/regions.rst +++ b/docs/users_guide/config/regions.rst @@ -46,7 +46,7 @@ Several tasks (:ref:`task_hovmollerOceanRegions`, :ref:`task_oceanHistogram`, :ref:`task_oceanRegionalProfiles`, :ref:`task_regionalTSDiagrams`, and :ref:`task_timeSeriesOceanRegions`) can use any of the defined region groups. Currently, available region groups are: ``Artic Ocean Regions``, ``Antarctic Regions``, -``Ocean Basins``, ``Ice Shelves``, and ``Ocean Subbasins``. +``Greenland Regions``, ``Ocean Basins``, ``Ice Shelves``, and ``Ocean Subbasins``. The option ``regionMaskSubdirectory`` in the ``[diagnostics]`` section specifies the path to cached mask files for these region groups, typically diff --git a/docs/users_guide/tasks/climatologyMapHeatFluxes.rst b/docs/users_guide/tasks/climatologyMapHeatFluxes.rst new file mode 100644 index 000000000..8d1a03ce2 --- /dev/null +++ b/docs/users_guide/tasks/climatologyMapHeatFluxes.rst @@ -0,0 +1,8 @@ +.. _task_climatologyMapHeatFluxes: + +climatologyMapHeatFluxes +======================== + +See :ref:`task_climatologyMapMassFluxes`. This task functions identically +except a different config section is used and different variables are +appropriate. diff --git a/docs/users_guide/tasks/climatologyMapMassFluxes.rst b/docs/users_guide/tasks/climatologyMapMassFluxes.rst new file mode 100644 index 000000000..0401154ee --- /dev/null +++ b/docs/users_guide/tasks/climatologyMapMassFluxes.rst @@ -0,0 +1,74 @@ +.. _task_climatologyMapMassFluxes: + +climatologyMapMassFluxes +======================== + +An analysis task for comparison of global maps of surface mass fluxes. +No observational products are currently available. + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap, fluxes, mass + +Configuration Options +--------------------- + +The following configuration options are available for climatologyMapMassFluxes +and similarly for climatologyMapHeatFluxes:: + + [climatologyMapMassFluxes] + + # colormap for model + colormapNameResult = balance + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # color indices into colormapName for filled contours + colormapIndicesResult = [0, 40, 80, 110, 140, 170, 200, 230, 255] + # colormap levels/values for contour boundaries + colorbarLevelsResult = [-2, 0, 2, 6, 10, 16, 22, 26, 28, 32] + # the type of norm used in the colormap + normTypeResult = symLog + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -1e-3, 'vmax': 1e-3, 'linthresh': 1e-6} + + # colormap for differences + colormapNameDifference = balance + # whether the colormap is indexed or continuous + colormapTypeDifference = continuous + # color indices into colormapName for filled contours + colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255] + # colormap levels/values for contour boundaries + colorbarLevelsDifference = [-5, -3, -2, -1, -0.1, 0, 0.1, 1, 2, 3, 5] + # the type of norm used in the colormap + normTypeDifference = symLog + # A dictionary with keywords for the norm + normArgsDifference = {'vmin': -1e-3, 'vmax': 1e-3, 'linthresh': 1e-6} + + variables = ['riverRunoffFlux', 'iceRunoffFlux', 'snowFlux', 'rainFlux', 'evaporationFlux', 'seaIceFreshWaterFlux', 'landIceFreshwaterFlux'] + + seasons = ['JFM', 'JAS', 'ANN'] + + # comparison grid(s) on which to plot analysis + comparisonGrids = ['latlon', 'arctic', 'antarctic'] + +By default, a "preindustrial" climatology is computed for comparison with the +model results. For simulations covering a different time period, the range of +years (``obsStartYear`` and ``obsEndYear``) should be updated. + +For details on the remaining configuration options, see: + * :ref:`config_colormaps` + * :ref:`config_seasons` + * :ref:`config_comparison_grids` + +Observations +------------ + +None available. + +Example Result +-------------- + +.. image:: examples/seaIceFreshWaterFlux.png + :width: 500 px + :align: center diff --git a/docs/users_guide/tasks/climatologyMapVel.rst b/docs/users_guide/tasks/climatologyMapVel.rst new file mode 100644 index 000000000..cc9c9828d --- /dev/null +++ b/docs/users_guide/tasks/climatologyMapVel.rst @@ -0,0 +1,99 @@ +.. _task_climatologyMapVel: + +climatologyMapVel +================= + +An analysis task for plotting velocity climatologies at various depths. +Comparison against observations is not yet supported. + +Component and Tags:: + + component: ocean + tags: climatology, horizontalMap, climatologyMapVel + +Configuration Options +--------------------- + +The following configuration options are available for this task:: + + [climatologyMapVel] + ## options related to plotting climatology maps of Antarctic fields at + ## various levels, including the sea floor against control model results + ## and SOSE reanalysis data + + # comparison grid(s) + # only the Antarctic really makes sense but lat-lon could technically work. + comparisonGrids = ['latlon'] + + # Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, + # Oct, Nov, Dec, JFM, AMJ, JAS, OND, ANN) + seasons = ['ANN'] + + # list of depths in meters (positive up) at which to analyze, 'top' for the + # sea surface, 'bot' for the sea floor + depths = ['top', -200, -400, -600, -800, -1000, -1500, -2000, 'bot'] + + # a list of fields top plot for each transect. All supported fields are + # listed below + fieldList = ['zonalVelocity', 'meridionalVelocity', 'velocityMagnitude'] + + # set the suffix for files, e.g. if you want to use a different comparison + # grid from the default + fileSuffix = latlon + + # depth separating shallow from deep color maps + shallowVsDeepColormapDepth = -200 + + # colormap for model/observations + colormapNameResult = delta + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = linear + # A dictionary with keywords for the norm + normArgsResult = {'vmin': -0.2, 'vmax': 0.2} + + + [climatologyMapVel_VelocityMagnitudeShallow] + + # colormap for model/observations + colormapNameResult = ice + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = log + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 1.e-3, 'vmax': 1.} + + + [climatologyMapVel_VelocityMagnitudeDeep] + + # colormap for model/observations + colormapNameResult = ice + # whether the colormap is indexed or continuous + colormapTypeResult = continuous + # the type of norm used in the colormap + normTypeResult = log + # A dictionary with keywords for the norm + normArgsResult = {'vmin': 1.e-4, 'vmax': 5.e-1} + +There is a section for options that apply to all velocity climatology maps and +up to two for each field for specifying the color maps at shallow and deep +depths. + +The option ``depths`` is a list of (approximate) depths at which to sample +the potential temperature field. A value of ``'top'`` indicates the sea +surface (or the ice-ocean interface under ice shelves) while a value of +``'bot'`` indicates the seafloor. + +The user can select only to plot a subset of the supported fields by adding +only the desired field names to ``fieldList``. The default value shows the +list of all available fields. + + +Example Result +-------------- + +.. image:: examples/clim_vel_latlon.png + :width: 720 px + :align: center diff --git a/docs/users_guide/tasks/examples/clim_vel_latlon.png b/docs/users_guide/tasks/examples/clim_vel_latlon.png new file mode 100644 index 000000000..e650e350a Binary files /dev/null and b/docs/users_guide/tasks/examples/clim_vel_latlon.png differ diff --git a/docs/users_guide/tasks/examples/seaIceFreshWaterFlux.png b/docs/users_guide/tasks/examples/seaIceFreshWaterFlux.png new file mode 100644 index 000000000..dd56b304a Binary files /dev/null and b/docs/users_guide/tasks/examples/seaIceFreshWaterFlux.png differ diff --git a/docs/users_guide/tasks/regionalTSDiagrams.rst b/docs/users_guide/tasks/regionalTSDiagrams.rst index 6629cd9ff..c2d240c92 100644 --- a/docs/users_guide/tasks/regionalTSDiagrams.rst +++ b/docs/users_guide/tasks/regionalTSDiagrams.rst @@ -121,6 +121,8 @@ The following configuration options are available for this task: # Obserational data sets to compare against obs = ['WOA18'] +Similar config sections are included for other region groups. + Region Groups ------------- diff --git a/docs/users_guide/tasks/timeSeriesTransport.rst b/docs/users_guide/tasks/timeSeriesTransport.rst index 044ba77dd..ebb747cb5 100644 --- a/docs/users_guide/tasks/timeSeriesTransport.rst +++ b/docs/users_guide/tasks/timeSeriesTransport.rst @@ -23,8 +23,7 @@ The following configuration options are available for this task:: # available transects. transectsToPlot = ['Drake Passage', 'Tasmania-Ant', 'Africa-Ant', 'Antilles Inflow', 'Mona Passage', 'Windward Passage', 'Florida-Cuba', 'Florida-Bahamas', - 'Indonesian Throughflow', 'Agulhas', 'Mozambique Channel', 'Bering Strait', - 'Lancaster Sound', 'Fram Strait', 'Nares Strait'] + 'Indonesian Throughflow', 'Agulhas', 'Mozambique Channel'] # Number of months over which to compute moving average movingAverageMonths = 1 @@ -54,8 +53,9 @@ defined in the ``transportTransects`` transect group. These are:: "Japan blockage", "Lancaster Sound", "Mona Passage", "Mozambique Channel", "Nares Strait", "Nares Strait Deepen", "Persian Gulf Deepen", "Red Sea Deepen", "Sakhalin blockage", "Strait of Gibralter Deepen 1", - "Strait of Gibralter Deepen 2", "Tasmania-Ant", "White Sea", - "Windward Passage"] + "Hudson Bay-Labrador Sea", "OSNAP section East", "OSNAP section West", + "Strait of Gibralter Deepen 2", "Tasmania-Ant", "White Sea", + "Windward Passage"] Many of these are likely not of interest in most simulations, so a subset of the most relevant transects has been chosen in the default configuration. diff --git a/docs/versions.rst b/docs/versions.rst deleted file mode 100644 index db8c9aec1..000000000 --- a/docs/versions.rst +++ /dev/null @@ -1,62 +0,0 @@ -Versions -======== - -================ =============== -Documentation On GitHub -================ =============== -`stable`_ `main`_ -`latest`_ `develop`_ -`v1.2.6`_ `1.2.6`_ -`v1.2.7`_ `1.2.7`_ -`v1.2.8`_ `1.2.8`_ -`v1.2.9`_ `1.2.9`_ -`v1.3.0`_ `1.3.0`_ -`v1.4.0`_ `1.4.0`_ -`v1.5.0`_ `1.5.0`_ -`v1.6.0`_ `1.6.0`_ -`v1.6.1`_ `1.6.1`_ -`v1.7.0`_ `1.7.0`_ -`v1.7.1`_ `1.7.1`_ -`v1.7.2`_ `1.7.2`_ -`v1.8.0`_ `1.8.0`_ -`v1.9.0`_ `1.9.0`_ -`v1.10.0`_ `1.10.0`_ -`v1.11.0`_ `1.11.0`_ -================ =============== - -.. _`stable`: ../stable/index.html -.. _`latest`: ../latest/index.html -.. _`v1.2.6`: ../1.2.6/index.html -.. _`v1.2.7`: ../1.2.7/index.html -.. _`v1.2.8`: ../1.2.8/index.html -.. _`v1.2.9`: ../1.2.9/index.html -.. _`v1.3.0`: ../1.3.0/index.html -.. _`v1.4.0`: ../1.4.0/index.html -.. _`v1.5.0`: ../1.5.0/index.html -.. _`v1.6.0`: ../1.6.0/index.html -.. _`v1.6.1`: ../1.6.1/index.html -.. _`v1.7.0`: ../1.7.0/index.html -.. _`v1.7.1`: ../1.7.1/index.html -.. _`v1.7.2`: ../1.7.2/index.html -.. _`v1.8.0`: ../1.8.0/index.html -.. _`v1.9.0`: ../1.9.0/index.html -.. _`v1.10.0`: ../1.10.0/index.html -.. _`v1.11.0`: ../1.11.0/index.html -.. _`main`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/main -.. _`develop`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/develop -.. _`1.2.6`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.2.6 -.. _`1.2.7`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.2.7 -.. _`1.2.8`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.2.8 -.. _`1.2.9`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.2.9 -.. _`1.3.0`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.3.0 -.. _`1.4.0`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.4.0 -.. _`1.5.0`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.5.0 -.. _`1.6.0`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.6.0 -.. _`1.6.1`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.6.1 -.. _`1.7.0`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.7.0 -.. _`1.7.1`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.7.1 -.. _`1.7.2`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.7.2 -.. _`1.8.0`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.8.0 -.. _`1.9.0`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.9.0 -.. _`1.10.0`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.10.0 -.. _`1.11.0`: https://github.com/MPAS-Dev/MPAS-Analysis/tree/1.11.0 diff --git a/mpas_analysis/__init__.py b/mpas_analysis/__init__.py index 672aa1d48..9f67a0390 100644 --- a/mpas_analysis/__init__.py +++ b/mpas_analysis/__init__.py @@ -3,5 +3,5 @@ import matplotlib as mpl mpl.use('Agg') -__version_info__ = (1, 11, 0) +__version_info__ = (1, 12, 0) __version__ = '.'.join(str(vi) for vi in __version_info__) diff --git a/mpas_analysis/__main__.py b/mpas_analysis/__main__.py index f1f90e148..bc6f62120 100755 --- a/mpas_analysis/__main__.py +++ b/mpas_analysis/__main__.py @@ -136,6 +136,14 @@ def build_analysis_list(config, controlConfig): analyses.append(ocean.ClimatologyMapSST(config, oceanClimatologyTasks['avg'], controlConfig)) + analyses.append(ocean.ClimatologyMapFluxes(config, + oceanClimatologyTasks['avg'], + controlConfig, + fluxType='mass')) + analyses.append(ocean.ClimatologyMapFluxes(config, + oceanClimatologyTasks['avg'], + controlConfig, + fluxType='heat')) analyses.append(ocean.ClimatologyMapSSS(config, oceanClimatologyTasks['avg'], controlConfig)) @@ -145,6 +153,9 @@ def build_analysis_list(config, controlConfig): analyses.append(ocean.ClimatologyMapEKE(config, oceanClimatologyTasks['avg'], controlConfig)) + analyses.append(ocean.ClimatologyMapVel(config, + oceanClimatologyTasks['avg'], + controlConfig)) analyses.append(ocean.ClimatologyMapBSF(config, oceanClimatologyTasks['avg'], controlConfig)) @@ -433,11 +444,14 @@ def add_task_and_subtasks(analysisTask, analysesToGenerate, verbose, verbose, callCheckGenerate=False) totalFailures += failureCount if prereq._setupStatus != 'success': - assert(failureCount > 0) + if failureCount == 0: + raise ValueError(f'Error: prerequisite {prereq.printTaskName} of ' + f'{taskTitle} did not set up successfully but also ' + 'did not indicate a failure. This likely indicates ' + 'a bug like multiple tasks with the same name.') # a prereq failed setup_and_check - print("Warning: prerequisite of {} failed during check, " - "so this task will not be run".format( - taskTitle)) + print(f'Warning: prerequisite of {taskTitle} failed during check, ' + 'so this task will not be run') analysisTask._setupStatus = 'fail' totalFailures += 1 return totalFailures @@ -463,11 +477,14 @@ def add_task_and_subtasks(analysisTask, analysesToGenerate, verbose, verbose, callCheckGenerate=False) totalFailures += failureCount if subtask._setupStatus != 'success': - assert(failureCount > 0) + if failureCount == 0: + raise ValueError(f'Error: subtask {subtask.printTaskName} of ' + f'{taskTitle} did not set up successfully but also ' + 'did not indicate a failure. This likely indicates ' + 'a bug like multiple tasks with the same name.') # a subtask failed setup_and_check - print("Warning: subtask of {} failed during check, " - "so this task will not be run".format( - taskTitle)) + print(f'Warning: subtask of {taskTitle} failed during check, ' + 'so this task will not be run') analysisTask._setupStatus = 'fail' totalFailures += 1 return totalFailures diff --git a/mpas_analysis/default.cfg b/mpas_analysis/default.cfg index fdfa55fc3..b453faec9 100755 --- a/mpas_analysis/default.cfg +++ b/mpas_analysis/default.cfg @@ -331,6 +331,10 @@ fitColor1 = tab:blue # land_ice_mass_flux_components : Mass fluxes from land ice plotTypes = ['absolute_energy_error', 'absolute_salt_error', 'total_mass_change'] +# Number of points over which to compute moving average(e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 365 [index] ## options related to producing nino index. @@ -1073,6 +1077,78 @@ colorbarLevelsDifference = [-5, -3, -2, -1, -0.1, 0, 0.1, 1, 2, 3, 5] contourLevelsDifference = np.arange(-5., 6., 2.) +[climatologyMapHeatFluxes] + +# colormap for model +colormapNameResult = balance +# whether the colormap is indexed or continuous +colormapTypeResult = continuous +# color indices into colormapName for filled contours +colormapIndicesResult = [0, 40, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +colorbarLevelsResult = [-2, 0, 2, 6, 10, 16, 22, 26, 28, 32] +# the type of norm used in the colormap +normTypeResult = symLog +# A dictionary with keywords for the norm +normArgsResult = {'vmin': -5e2, 'vmax': 5e2, 'linthresh': 1e-2} + +# colormap for differences +colormapNameDifference = balance +# whether the colormap is indexed or continuous +colormapTypeDifference = continuous +# color indices into colormapName for filled contours +colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255] +# colormap levels/values for contour boundaries +colorbarLevelsDifference = [-5, -3, -2, -1, -0.1, 0, 0.1, 1, 2, 3, 5] +# the type of norm used in the colormap +normTypeDifference = symLog +# A dictionary with keywords for the norm +normArgsDifference = {'vmin': -5e2, 'vmax': 5e2, 'linthresh': 1e-2} + +variables = ['totalFreshWaterTemperatureFlux', 'latentHeatFlux', 'sensibleHeatFlux', 'longWaveHeatFluxUp', 'longWaveHeatFluxDown', 'seaIceHeatFlux', 'shortWaveHeatFlux'] + +seasons = ['JFM', 'JAS', 'ANN'] + +# comparison grid(s) on which to plot analysis +comparisonGrids = ['latlon', 'arctic', 'antarctic'] + + +[climatologyMapMassFluxes] + +# colormap for model +colormapNameResult = balance +# whether the colormap is indexed or continuous +colormapTypeResult = continuous +# color indices into colormapName for filled contours +colormapIndicesResult = [0, 40, 80, 110, 140, 170, 200, 230, 255] +# colormap levels/values for contour boundaries +colorbarLevelsResult = [-2, 0, 2, 6, 10, 16, 22, 26, 28, 32] +# the type of norm used in the colormap +normTypeResult = symLog +# A dictionary with keywords for the norm +normArgsResult = {'vmin': -1e-3, 'vmax': 1e-3, 'linthresh': 1e-6} + +# colormap for differences +colormapNameDifference = balance +# whether the colormap is indexed or continuous +colormapTypeDifference = continuous +# color indices into colormapName for filled contours +colormapIndicesDifference = [0, 28, 57, 85, 113, 128, 128, 142, 170, 198, 227, 255] +# colormap levels/values for contour boundaries +colorbarLevelsDifference = [-5, -3, -2, -1, -0.1, 0, 0.1, 1, 2, 3, 5] +# the type of norm used in the colormap +normTypeDifference = symLog +# A dictionary with keywords for the norm +normArgsDifference = {'vmin': -1e-3, 'vmax': 1e-3, 'linthresh': 1e-6} + +variables = ['riverRunoffFlux', 'iceRunoffFlux', 'snowFlux', 'rainFlux', 'evaporationFlux', 'seaIceFreshWaterFlux'] + +seasons = ['JFM', 'JAS', 'ANN'] + +# comparison grid(s) on which to plot analysis +comparisonGrids = ['latlon', 'arctic', 'antarctic'] + + [climatologyMapSST] ## options related to plotting horizontally remapped climatologies of ## sea surface temperature (SST) against control model results and @@ -1687,16 +1763,36 @@ obs = ['SOSE', 'WOA18'] [timeSeriesTransport] ## options related to plotting time series of transport through transects +transportGroups = ['Transport Transects'] +[timeSeriesTransportTransects] # list of ocean transects from geometric_features to plot or ['all'] for all # available transects. transectsToPlot = ['Drake Passage', 'Tasmania-Ant', 'Africa-Ant', 'Antilles Inflow', 'Mona Passage', 'Windward Passage', 'Florida-Cuba', 'Florida-Bahamas', 'Indonesian Throughflow', - 'Agulhas', 'Mozambique Channel', 'Bering Strait', - 'Lancaster Sound', 'Fram Strait', 'Nares Strait', - 'Denmark Strait', 'Iceland-Faroe-Scotland'] + 'Agulhas', 'Mozambique Channel'] + +# Number of months over which to compute moving average +movingAveragePoints = 1 + +# An optional first year for the tick marks on the x axis. Leave commented out +# to start at the beginning of the time series. + +# firstYearXTicks = 1 + +# An optional number of years between tick marks on the x axis. Leave +# commented out to determine the distance between ticks automatically. + +# yearStrideXTicks = 1 + +# The number of parallel tasks occupied by each timeSeriesTransport task. +# Analysis may run faster for large meshes when this value is set to 2 or 3 +subprocessCount = 1 + +[timeSeriesArcticTransportTransects] +transectsToPlot = ['all'] # Number of months over which to compute moving average movingAveragePoints = 1 @@ -1715,6 +1811,7 @@ movingAveragePoints = 1 subprocessCount = 1 + [regionalTSDiagrams] ## options related to plotting T/S diagrams of ocean regions @@ -1779,11 +1876,54 @@ normType = log # Obserational data sets to compare against obs = ['SOSE', 'WOA18'] +[TSDiagramsForGreenlandRegions] +## options related to plotting T/S diagrams of Greenland regions + +# list of regions to plot or ['all'] for all regions in the masks file. +# See "regionNames" in the greenlandRegions masks file in +# regionMaskSubdirectory for details. +regionNames = [] + +# diagram type, either 'volumetric' or 'scatter', depending on if the points +# should be binned the plot should show the volume fraction in each bin or +# scattered points colored by their depth +diagramType = volumetric + +# if diagramType == 'volumetric', the bin boundaries for T and S +# if diagramType == 'scatter', only the min and max are important (and the +# bins are only used for computing neutral density contours) +Tbins = numpy.linspace(-2.5, 8.0, 211) +Sbins = numpy.linspace(33.5, 35.5, 401) + +# density contour interval +rhoInterval = 0.3 + +# The color map for depth or volume +colormap = cmo.deep +# The following is more appropriate if diagramType == 'scatter' +# colormap = cmo.deep_r +# the type of norm used in the colormap {'linear', 'log'} +normType = log + +# The minimum and maximum depth over which fields are plotted, default is +# to take these values from the geojson feature's zmin and zmax properties. +# Add these to a custom config file to override the defaults. +zmin = -6000 +zmax = 0 + +# the minimum and maximum volume for the colorbar, default is the minimum and +# maximum over the mode output +#volMin = 3e9 +#volMax = 1e12 + +# Obserational data sets to compare against +obs = ['WOA18'] + [TSDiagramsForArcticOceanRegions] -## options related to plotting T/S diagrams of Antarctic regions +## options related to plotting T/S diagrams of Arctic regions # list of regions to plot or ['all'] for all regions in the masks file. -# See "regionNames" in the antarcticRegions masks file in +# See "regionNames" in the arcticRegions masks file in # regionMaskSubdirectory for details. regionNames = [] @@ -1862,6 +2002,68 @@ zmax = 0 obs = ['WOA18'] +[climatologyMapVel] +## options related to plotting climatology maps of Antarctic fields at various +## levels, including the sea floor against control model results and SOSE +## reanalysis data + +# comparison grid(s) +# only the Antarctic really makes sense but lat-lon could technically work. +comparisonGrids = ['latlon'] + +# Months or seasons to plot (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, +# Nov, Dec, JFM, AMJ, JAS, OND, ANN) +seasons = ['ANN'] + +# list of depths in meters (positive up) at which to analyze, 'top' for the +# sea surface, 'bot' for the sea floor +depths = ['top', -200, -400, -600, -800, -1000, -1500, -2000, 'bot'] + +# a list of fields top plot for each transect. All supported fields are listed +# below +fieldList = ['zonalVelocity', 'meridionalVelocity', 'velocityMagnitude'] + +# set the suffix for files, e.g. if you want to use a different comparison +# grid from the default +fileSuffix = latlon + +# depth separating shallow from deep color maps +shallowVsDeepColormapDepth = -200 + +# colormap for model/observations +colormapNameResult = delta +# whether the colormap is indexed or continuous +colormapTypeResult = continuous +# the type of norm used in the colormap +normTypeResult = linear +# A dictionary with keywords for the norm +normArgsResult = {'vmin': -0.2, 'vmax': 0.2} + + +[climatologyMapVel_VelocityMagnitudeShallow] + +# colormap for model/observations +colormapNameResult = ice +# whether the colormap is indexed or continuous +colormapTypeResult = continuous +# the type of norm used in the colormap +normTypeResult = log +# A dictionary with keywords for the norm +normArgsResult = {'vmin': 1.e-3, 'vmax': 1.} + + +[climatologyMapVel_VelocityMagnitudeDeep] + +# colormap for model/observations +colormapNameResult = ice +# whether the colormap is indexed or continuous +colormapTypeResult = continuous +# the type of norm used in the colormap +normTypeResult = log +# A dictionary with keywords for the norm +normArgsResult = {'vmin': 1.e-4, 'vmax': 5.e-1} + + [climatologyMapSose] ## options related to plotting climatology maps of Antarctic fields at various ## levels, including the sea floor against control model results and SOSE @@ -3553,6 +3755,9 @@ seasons = ['JFM', 'JAS', 'ANN'] # minimum and maximum depth of profile plots, or empty for the full depth range depthRange = [] +# maximum depth of cells to be included in profile statistics +maxDepth = None + # a list of region names from the region masks file to plot regionNames = ["Atlantic_Basin", "Pacific_Basin", "Indian_Basin", "Arctic_Basin", "Southern_Ocean_Basin", "Mediterranean_Basin", diff --git a/mpas_analysis/ocean/__init__.py b/mpas_analysis/ocean/__init__.py index 6dc6f3002..0a3c88813 100644 --- a/mpas_analysis/ocean/__init__.py +++ b/mpas_analysis/ocean/__init__.py @@ -1,4 +1,5 @@ from mpas_analysis.ocean.climatology_map_sst import ClimatologyMapSST +from mpas_analysis.ocean.climatology_map_vel import ClimatologyMapVel from mpas_analysis.ocean.climatology_map_mld import ClimatologyMapMLD from mpas_analysis.ocean.climatology_map_mld_min_max import \ ClimatologyMapMLDMinMax @@ -7,6 +8,7 @@ from mpas_analysis.ocean.climatology_map_eke import ClimatologyMapEKE from mpas_analysis.ocean.climatology_map_bsf import \ ClimatologyMapBSF +from mpas_analysis.ocean.climatology_map_fluxes import ClimatologyMapFluxes from mpas_analysis.ocean.climatology_map_ohc_anomaly import \ ClimatologyMapOHCAnomaly from mpas_analysis.ocean.climatology_map_bgc import ClimatologyMapBGC diff --git a/mpas_analysis/ocean/climatology_map_fluxes.py b/mpas_analysis/ocean/climatology_map_fluxes.py new file mode 100644 index 000000000..c55de400e --- /dev/null +++ b/mpas_analysis/ocean/climatology_map_fluxes.py @@ -0,0 +1,213 @@ +# This software is open source software available under the BSD-3 license. +# +# Copyright (c) 2022 Triad National Security, LLC. All rights reserved. +# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights +# reserved. +# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved. +# +# Additional copyright and license information can be found in the LICENSE file +# distributed with this code, or at +# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE +import xarray as xr +import datetime + +from mpas_analysis.shared import AnalysisTask + +from mpas_analysis.shared.constants import constants + +from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask + +from mpas_analysis.shared.plot import PlotClimatologyMapSubtask + + +class ClimatologyMapFluxes(AnalysisTask): + """ + An analysis task for plotting sea surface fluxes. Observational comparison + is not supported because no observational datasets are currently available. + """ + # Authors + # ------- + # Carolyn Begeman + + def __init__(self, config, mpasClimatologyTask, controlConfig=None, + fluxType='mass'): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + + fluxType : str, optional + The type of surface fluxes, which corresponds to a different config + section. One of 'mass' or 'heat'. + """ + # Authors + # ------- + # Carolyn Begeman + + taskName = f'climatologyMap{fluxType.title()}Fluxes' + groupLink = taskName + + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapFluxes, self).__init__( + config=config, taskName=taskName, + componentName='ocean', + tags=['climatology', 'horizontalMap', 'fluxes', fluxType]) + + iselValues = None + + # read in what seasons we want to plot + seasons = config.getexpression(taskName, 'seasons') + + if len(seasons) == 0: + raise ValueError(f'config section {taskName} does not contain ' + 'valid list of seasons') + + comparisonGridNames = config.getexpression(taskName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError(f'config section {taskName} does not contain ' + 'valid list of comparison grids') + + # the variable mpasFieldName will be added to mpasClimatologyTask + # along with the seasons. + variableList = config.getexpression(taskName, 'variables') + for variable in variableList: + + mpasFieldName = f'timeMonthly_avg_{variable}' + + # TemperatureFlux variables have different units and need to be + # converted to the units of heat flux variables to be compared + if 'TemperatureFlux' in variable: + remapClimatologySubtask = RemapMpasTemperatureFluxClimatology( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName=variable, + variableList=[mpasFieldName], + comparisonGridNames=comparisonGridNames, + seasons=seasons, + iselValues=iselValues, + subtaskName=f'remap_{variable}') + mpasFieldName = 'timeMonthly_avg_' \ + f'{variable.replace("TemperatureFlux", "HeatFlux")}' + else: + remapClimatologySubtask = RemapMpasClimatologySubtask( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName=variable, + variableList=[mpasFieldName], + comparisonGridNames=comparisonGridNames, + seasons=seasons, + iselValues=iselValues, + subtaskName=f'remap_{variable}') + + remapObservationsSubtask = None + galleryName = variable + if controlConfig is None: + refTitleLabel = None + refFieldName = None + diffTitleLabel = None + else: + control_run_name = controlConfig.get('runs', 'mainRunName') + refTitleLabel = f'Control: {control_run_name}' + refFieldName = mpasFieldName + diffTitleLabel = 'Main - Control' + + for comparisonGridName in comparisonGridNames: + for season in seasons: + # make a new subtask for this season and comparison grid + subtaskName = f'plot_{variable}_{season}_{comparisonGridName}' + outFileName = f'{variable}_{season}_{comparisonGridName}' + subtask = PlotClimatologyMapSubtask( + self, season, comparisonGridName, remapClimatologySubtask, + remapObservationsSubtask, controlConfig=controlConfig, + subtaskName=subtaskName) + + if 'TemperatureFlux' in variable: + fieldNameInTitle = variable.replace('TemperatureFlux', 'HeatFlux') + else: + fieldNameInTitle = variable + + if 'HeatFlux' in mpasFieldName: + groupSubtitle = 'Heat fluxes' + unitsLabel = r'W m$^{-2}$' + else: + groupSubtitle = 'Mass fluxes' + unitsLabel = r'kg m$^{-2}$ s^${-1}$' + + subtask.set_plot_info( + outFileLabel=outFileName, + fieldNameInTitle=fieldNameInTitle, + mpasFieldName=mpasFieldName, + refFieldName=refFieldName, + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=unitsLabel, + imageCaption=variable, + galleryGroup='surface fluxes', + groupSubtitle=None, + groupLink=groupLink, + galleryName=galleryName) + + self.add_subtask(subtask) + +# adds to the functionality of RemapMpasClimatology +class RemapMpasTemperatureFluxClimatology(RemapMpasClimatologySubtask): + """ + A subtask for computing climatologies of heat flux from temperature flux + """ + # Authors + # ------- + # Carolyn Begeman + + def customize_masked_climatology(self, climatology, season): + """ + Construct velocity magnitude as part of the climatology + + Parameters + ---------- + climatology : ``xarray.Dataset`` object + the climatology data set + + season : str + The name of the season to be masked + + Returns + ------- + climatology : ``xarray.Dataset`` object + the modified climatology data set + """ + # Authors + # ------- + # Carolyn Begeman + + # first, call the base class's version of this function so we extract + # the desired slices. + variable = self.variableList[0] + climatology = super(RemapMpasTemperatureFluxClimatology, + self).customize_masked_climatology(climatology, + season) + + # calculate heat flux from temperature flux + scaleFactor = constants.rho_sw * constants.cp_sw # C m s^-1 to W m^-2 + heatFlux = 0.5 * scaleFactor * climatology[variable] + # drop unnecessary fields before re-mapping + climatology.drop_vars([variable]) + + # this creates a variable with heat flux units in climatology (like netcdf) + variable = variable.replace('TemperatureFlux', 'HeatFlux') + climatology[variable] = heatFlux + climatology[variable].attrs['units'] = 'W m$^[-2]$' + climatology[variable].attrs['description'] = \ + f'{variable} converted to heat flux' + + return climatology diff --git a/mpas_analysis/ocean/climatology_map_vel.py b/mpas_analysis/ocean/climatology_map_vel.py new file mode 100644 index 000000000..79753267d --- /dev/null +++ b/mpas_analysis/ocean/climatology_map_vel.py @@ -0,0 +1,299 @@ +# This software is open source software available under the BSD-3 license. +# +# Copyright (c) 2022 Triad National Security, LLC. All rights reserved. +# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights +# reserved. +# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved. +# +# Additional copyright and license information can be found in the LICENSE file +# distributed with this code, or at +# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE +""" +Analysis tasks for comparing Antarctic climatology maps against observations +and reanalysis data. +""" +# Authors +# ------- +# Carolyn Begeman + +import numpy + +from mpas_analysis.shared import AnalysisTask + +from mpas_analysis.shared.climatology import RemapMpasClimatologySubtask + +from mpas_analysis.ocean.remap_depth_slices_subtask import \ + RemapDepthSlicesSubtask +from mpas_analysis.shared.plot import PlotClimatologyMapSubtask +from mpas_analysis.ocean.remap_sose_climatology import RemapSoseClimatology + +from mpas_analysis.shared.io.utility import build_obs_path + + +class ClimatologyMapVel(AnalysisTask): + """ + An analysis task for comparison of antarctic field against the Southern + Ocean State Estimate + """ + # Authors + # ------- + # Carolyn Begeman + + def __init__(self, config, mpasClimatologyTask, + controlConfig=None): + """ + Construct the analysis task. + + Parameters + ---------- + config : mpas_tools.config.MpasConfigParser + Configuration options + + mpasClimatologyTask : ``MpasClimatologyTask`` + The task that produced the climatology to be remapped and plotted + + controlconfig : mpas_tools.config.MpasConfigParser, optional + Configuration options for a control run (if any) + """ + # Authors + # ------- + # Carolyn Begeman + + fields = \ + [{'prefix': 'zonalVelocity', + 'mpas': 'timeMonthly_avg_velocityZonal', + 'units': r'm s$^{-1}$', + 'titleName': 'Zonal Velocity', + '3D': True, + 'obsFilePrefix': 'zonal_vel', + 'obsFieldName': 'zonalVel', + 'obsBotFieldName': 'botZonalVel'}, + {'prefix': 'meridionalVelocity', + 'mpas': 'timeMonthly_avg_velocityMeridional', + 'units': r'm s$^{-1}$', + 'titleName': 'Meridional Velocity', + '3D': True, + 'obsFilePrefix': 'merid_vel', + 'obsFieldName': 'meridVel', + 'obsBotFieldName': 'botMeridVel'}, + {'prefix': 'velocityMagnitude', + 'mpas': 'velMag', + 'units': r'm s$^{-1}$', + 'titleName': 'Velocity Magnitude', + '3D': True, + 'obsFilePrefix': 'vel_mag', + 'obsFieldName': 'velMag', + 'obsBotFieldName': 'botVelMag'}] + + tags = ['climatology', 'horizontalMap', 'climatologyMapVel'] + for field in fields: + tags.append(field['prefix']) + + # call the constructor from the base class (AnalysisTask) + super(ClimatologyMapVel, self).__init__( + config=config, taskName='climatologyMapVel', + componentName='ocean', + tags=tags) + + sectionName = self.taskName + + fileSuffix = config.get(sectionName, 'fileSuffix') + if fileSuffix.endswith('.nc'): + fileSuffix = fileSuffix.strip('.nc') + + fieldList = config.getexpression(sectionName, 'fieldList') + fields = [field for field in fields if field['prefix'] in fieldList] + + # read in what seasons we want to plot + seasons = config.getexpression(sectionName, 'seasons') + + if len(seasons) == 0: + raise ValueError(f'config section {sectionName} does not contain ' + 'valid list of seasons') + + comparisonGridNames = config.getexpression(sectionName, + 'comparisonGrids') + + if len(comparisonGridNames) == 0: + raise ValueError(f'config section {sectionName} does not contain ' + 'valid list of comparison grids') + + if not numpy.any([field['3D'] for field in fields]): + depths = None + else: + depths = config.getexpression(sectionName, 'depths') + + if len(depths) == 0: + raise ValueError(f'config section {sectionName} does not ' + 'contain valid list of depths') + + variableList = [] + for field in fields: + if field['mpas'] != 'velMag': + variableList.append(field['mpas']) + else: + for variable_dependency in ['timeMonthly_avg_velocityZonal', + 'timeMonthly_avg_velocityMeridional']: + if variable_dependency not in variableList: + variableList.append(variable_dependency) + + shallowVsDeepColormapDepth = config.getfloat( + sectionName, 'shallowVsDeepColormapDepth') + + shallow = [] + for depth in depths: + if depth == 'top': + shallow.append(True) + elif depth == 'bot': + shallow.append(False) + else: + shallow.append(depth >= shallowVsDeepColormapDepth) + + if depths is None: + remapMpasSubtask = RemapMpasClimatologySubtask( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName='vel', + variableList=variableList, + seasons=seasons, + comparisonGridNames=comparisonGridNames, + iselValues=None) + else: + remapMpasSubtask = RemapMpasVelMagClimatology( + mpasClimatologyTask=mpasClimatologyTask, + parentTask=self, + climatologyName='vel', + variableList=variableList, + seasons=seasons, + depths=depths, + comparisonGridNames=comparisonGridNames, + iselValues=None) + + for field in fields: + fieldPrefix = field['prefix'] + upperFieldPrefix = fieldPrefix[0].upper() + fieldPrefix[1:] + sectionName = f'{self.taskName}{upperFieldPrefix}' + + if field['3D']: + fieldDepths = depths + else: + fieldDepths = None + + remapObsSubtask = None + + refFieldName = None + outFileLabel = fieldPrefix + refTitleLabel = None + diffTitleLabel = None + galleryName = 'Velocity' + if controlConfig is not None: + refFieldName = field['mpas'] + controlRunName = controlConfig.get('runs', 'mainRunName') + galleryName = f'Control: {controlRunName}' + refTitleLabel = galleryName + diffTitleLabel = 'Main - Control' + + if field['3D']: + fieldDepths = depths + else: + fieldDepths = [None] + + for comparisonGridName in comparisonGridNames: + for depthIndex, depth in enumerate(fieldDepths): + for season in seasons: + + subtaskName = f'plot{upperFieldPrefix}_{season}_' \ + f'{comparisonGridName}' + if depth is not None: + subtaskName = f'{subtaskName}_depth_{depth}' + + subtask = PlotClimatologyMapSubtask( + parentTask=self, + season=season, + comparisonGridName=comparisonGridName, + remapMpasClimatologySubtask=remapMpasSubtask, + remapObsClimatologySubtask=remapObsSubtask, + controlConfig=controlConfig, + depth=depth, + subtaskName=subtaskName) + + configSectionName = 'climatologyMapVel_' \ + f'{upperFieldPrefix}' + + # if available, use a separate color map for shallow + # and deep + if depth is not None: + if shallow[depthIndex]: + suffix = 'Shallow' + else: + suffix = 'Deep' + testSectionName = f'{configSectionName}{suffix}' + + if config.has_section(testSectionName): + configSectionName = testSectionName + else: + configSectionName = 'climatologyMapVel' + + subtask.set_plot_info( + outFileLabel=outFileLabel, + fieldNameInTitle=field['titleName'], + mpasFieldName=field['mpas'], + refFieldName=refFieldName, + refTitleLabel=refTitleLabel, + diffTitleLabel=diffTitleLabel, + unitsLabel=field['units'], + imageCaption=field['titleName'], + galleryGroup=galleryName, + groupSubtitle=None, + groupLink=f'{fieldPrefix}Vel', + galleryName=field['titleName'], + configSectionName=configSectionName) + + self.add_subtask(subtask) + + +class RemapMpasVelMagClimatology(RemapDepthSlicesSubtask): + """ + A subtask for computing climatologies of velocity magnitude from zonal + and meridional components + """ + # Authors + # ------- + # Carolyn Begeman + + def customize_masked_climatology(self, climatology, season): + """ + Construct velocity magnitude as part of the climatology + + Parameters + ---------- + climatology : ``xarray.Dataset`` object + the climatology data set + + season : str + The name of the season to be masked + + Returns + ------- + climatology : ``xarray.Dataset`` object + the modified climatology data set + """ + # Authors + # ------- + # Carolyn Begeman + + # first, call the base class's version of this function so we extract + # the desired slices. + climatology = super(RemapMpasVelMagClimatology, + self).customize_masked_climatology(climatology, + season) + + if 'timeMonthly_avg_velocityZonal' in climatology and \ + 'timeMonthly_avg_velocityMeridional' in climatology: + zonalVel = climatology.timeMonthly_avg_velocityZonal + meridVel = climatology.timeMonthly_avg_velocityMeridional + climatology['velMag'] = numpy.sqrt(zonalVel**2 + meridVel**2) + climatology.velMag.attrs['units'] = 'm s$^{-1}$' + climatology.velMag.attrs['description'] = 'velocity magnitude' + + return climatology diff --git a/mpas_analysis/ocean/conservation.py b/mpas_analysis/ocean/conservation.py index 194373d5b..9d224d6d8 100644 --- a/mpas_analysis/ocean/conservation.py +++ b/mpas_analysis/ocean/conservation.py @@ -172,34 +172,54 @@ def setup_and_check(self): self.plotTypes = self.config.getexpression('timeSeriesConservation', 'plotTypes') - self.masterVariableList = {'absolute_energy_error': ['absoluteEnergyError'], - 'total_energy_flux': ['netEnergyFlux'], - 'absolute_salt_error': ['absoluteSaltError'], - 'ice_salt_flux': ['netSaltFlux'], - 'total_mass_flux': ['netMassFlux'], - 'total_mass_change': ['netMassChange'], - 'land_ice_mass_change': ['landIceMassChange'], - 'land_ice_ssh_change': ['landIceSshChange'], - 'land_ice_mass_flux': ['landIceMassFlux'], - 'land_ice_mass_flux_components': ['accumulatedIcebergFlux', - 'accumulatedLandIceFlux', - 'accumulatedRemovedRiverRunoffFlux', - 'accumulatedRemovedIceRunoffFlux']} + self.masterVariableList = { + 'absolute_energy_error': ['absoluteEnergyError'], + 'total_energy': ['finalEnergy'], + 'total_energy_flux': ['netEnergyFlux'], + 'total_energy_change': ['energyAnomaly'], + 'total_salt_change': ['saltAnomaly'], + 'total_salt': ['finalSalt'], + 'absolute_salt_error': ['absoluteSaltError'], + 'ice_salt_flux': ['netSaltFlux'], + 'sea_ice_salt_flux': ['accumulatedSeaIceSalinityFlux'], + 'total_mass_flux': ['netMassFlux'], + 'total_mass': ['finalMass'], + 'total_mass_change': ['massAnomaly'], + 'rain_mass_flux': ['accumulatedRainFlux'], + 'snow_mass_flux': ['accumulatedSnowFlux'], + 'evaporation_mass_flux': ['accumulatedEvaporationFlux'], + 'sea_ice_mass_flux': ['accumulatedSeaIceFlux'], + 'river_runoff_mass_flux': ['accumulatedRiverRunoffFlux'], + 'ice_runoff_mass_flux': ['accumulatedIceRunoffFlux'], + 'iceberg_mass_flux': ['accumulatedIcebergFlux'], + 'frazil_mass_flux': ['accumulatedFrazilFlux'], + 'ice_shelf_frazil_mass_flux': ['accumulatedLandIceFrazilFlux'], + 'ice_shelf_melt_mass_flux': ['accumulatedLandIceFlux'], + 'land_ice_mass_change': ['landIceMassChange'], + 'land_ice_ssh_change': ['landIceSshChange'], + 'land_ice_mass_flux': ['landIceMassFlux'], + 'land_ice_mass_flux_components': ['accumulatedIcebergFlux', + 'accumulatedLandIceFlux', + 'accumulatedRemovedRiverRunoffFlux', + 'accumulatedRemovedIceRunoffFlux']} # for each derived variable, which source variables are needed - self.derivedVariableList = {'netMassChange': ['massChange'], - 'landIceMassFlux': ['accumulatedIcebergFlux', - 'accumulatedLandIceFlux', - 'accumulatedRemovedRiverRunoffFlux', - 'accumulatedRemovedIceRunoffFlux'], - 'landIceSshChange': ['accumulatedIcebergFlux', - 'accumulatedLandIceFlux', - 'accumulatedRemovedRiverRunoffFlux', - 'accumulatedRemovedIceRunoffFlux'], - 'landIceMassChange': ['accumulatedIcebergFlux', - 'accumulatedLandIceFlux', - 'accumulatedRemovedRiverRunoffFlux', - 'accumulatedRemovedIceRunoffFlux']} + self.derivedVariableList = { + 'massAnomaly': ['massChange', 'netMassFlux'], + 'energyAnomaly': ['energyChange', 'netEnergyFlux'], + 'saltAnomaly': ['saltChange', 'netSaltFlux'], + 'landIceMassFlux': ['accumulatedIcebergFlux', + 'accumulatedLandIceFlux', + 'accumulatedRemovedRiverRunoffFlux', + 'accumulatedRemovedIceRunoffFlux'], + 'landIceSshChange': ['accumulatedIcebergFlux', + 'accumulatedLandIceFlux', + 'accumulatedRemovedRiverRunoffFlux', + 'accumulatedRemovedIceRunoffFlux'], + 'landIceMassChange': ['accumulatedIcebergFlux', + 'accumulatedLandIceFlux', + 'accumulatedRemovedRiverRunoffFlux', + 'accumulatedRemovedIceRunoffFlux']} # Determine the xml files for each plot and the variables each plot will use self.xmlFileNames = [] @@ -293,11 +313,27 @@ def _make_plot(self, plot_type): titles = {} titles['total_energy_flux'] = 'Total energy flux' + titles['total_energy'] = 'Total energy' titles['absolute_energy_error'] = 'Energy error' + titles['total_energy_change'] = 'Cumulative energy change' + titles['total_salt_change'] = 'Cumulative salt change' + titles['total_salt'] = 'Total salt' titles['ice_salt_flux'] = 'Salt flux related to land ice and sea ice' + titles['sea_ice_salt_flux'] = 'Salt flux related to sea ice, frazil excluded' titles['absolute_salt_error'] = 'Salt conservation error' titles['total_mass_flux'] = 'Total mass flux' - titles['total_mass_change'] = 'Total mass anomaly' + titles['total_mass'] = 'Total mass' + titles['total_mass_change'] = 'Cumulative mass change' + titles['rain_mass_flux'] = 'Mass flux due to rain' + titles['snow_mass_flux'] = 'Mass flux due to snow' + titles['evaporation_mass_flux'] = 'Mass flux due to evaporation' + titles['sea_ice_mass_flux'] = 'Mass flux due to sea ice' + titles['river_runoff_mass_flux'] = 'Mass flux due to river runoff' + titles['ice_runoff_mass_flux'] = 'Mass flux due to ice runoff' + titles['iceberg_mass_flux'] = 'Mass flux due to icebergs' + titles['frazil_mass_flux'] = 'Mass flux due to frazil' + titles['ice_shelf_frazil_mass_flux'] = 'Mass flux due to ice shelf frazil' + titles['ice_shelf_melt_mass_flux'] = 'Mass flux due to ice shelf melt' titles['land_ice_mass_flux'] = 'Mass flux due to land ice' titles['land_ice_mass_change'] = 'Mass anomaly due to land ice fluxes' titles['land_ice_ssh_change'] = 'SSH anomaly due to land ice fluxes' @@ -305,10 +341,26 @@ def _make_plot(self, plot_type): y_labels = {} y_labels['total_energy_flux'] = 'Energy flux (W)' + y_labels['total_energy'] = 'Energy (J)' y_labels['absolute_energy_error'] = 'Energy (J)' + y_labels['total_energy_change'] = 'Energy (J)' + y_labels['total_salt_change'] = 'Salt (Gt)' + y_labels['total_salt'] = 'Salt (Gt)' y_labels['ice_salt_flux'] = 'Salt flux (Gt/yr)' + y_labels['sea_ice_salt_flux'] = 'Salt flux (Gt/yr)' y_labels['absolute_salt_error'] = 'Salt (Gt)' y_labels['total_mass_flux'] = 'Mass flux (Gt/yr)' + y_labels['total_mass'] = 'Mass (Gt)' + y_labels['rain_mass_flux'] = 'Mass flux (Gt/yr)' + y_labels['snow_mass_flux'] = 'Mass flux (Gt/yr)' + y_labels['evaporation_mass_flux'] = 'Mass flux (Gt/yr)' + y_labels['sea_ice_mass_flux'] = 'Mass flux (Gt/yr)' + y_labels['river_runoff_mass_flux'] = 'Mass flux (Gt/yr)' + y_labels['ice_runoff_mass_flux'] = 'Mass flux (Gt/yr)' + y_labels['iceberg_mass_flux'] = 'Mass flux (Gt/yr)' + y_labels['frazil_mass_flux'] = 'Mass flux (Gt/yr)' + y_labels['ice_shelf_frazil_mass_flux'] = 'Mass flux (Gt/yr)' + y_labels['ice_shelf_melt_mass_flux'] = 'Mass flux (Gt/yr)' y_labels['total_mass_change'] = 'Mass (Gt)' y_labels['land_ice_mass_flux'] = 'Mass flux (Gt/yr)' y_labels['land_ice_mass_change'] = 'Mass (Gt)' @@ -317,15 +369,33 @@ def _make_plot(self, plot_type): captions = {} captions['total_energy_flux'] = 'Total energy flux' + captions['total_energy'] = 'Total energy' captions['absolute_energy_error'] = 'Absolute energy conservation error' + captions['total_energy_change'] = 'Cumulative energy change' + captions['total_salt_change'] = 'Cumulative salt change' + captions['total_salt'] = 'Total salt' captions['ice_salt_flux'] = 'Salt flux related to land ice and sea ice ' \ '(sea ice salinity flux, sea ice frazil flux, and land ice frazil flux)' + captions['sea_ice_salt_flux'] = 'Salt flux related to sea ice excluding frazil' \ + '(sea ice salinity flux)' captions['absolute_salt_error'] = 'Absolute salt conservation error' captions['total_mass_flux'] = 'Total mass flux' - captions['total_mass_change'] = 'Total mass anomaly' + captions['total_mass'] = 'Total mass' + captions['rain_mass_flux'] = 'Mass flux due to rain' + captions['snow_mass_flux'] = 'Mass flux due to snow' + captions['evaporation_mass_flux'] = 'Mass flux due to evaporation' + captions['sea_ice_mass_flux'] = 'Mass flux due to sea ice' + captions['river_runoff_mass_flux'] = 'Mass flux due to river runoff' + captions['ice_runoff_mass_flux'] = 'Mass flux due to ice runoff' + captions['iceberg_mass_flux'] = 'Mass flux due to icebergs' + captions['frazil_mass_flux'] = 'Mass flux due to frazil' + captions['ice_shelf_frazil_mass_flux'] = 'Mass flux due to ice shelf frazil' + captions['ice_shelf_melt_mass_flux'] = 'Mass flux due to ice shelf melt' + captions['total_mass_change'] = 'Cumulative mass change' captions['land_ice_mass_flux'] = 'Mass flux due to land ice' captions['land_ice_mass_change'] = 'Mass anomaly due to land ice fluxes' - captions['land_ice_ssh_change'] = 'SSH anomaly due to land ice fluxes. Assumes a constant ocean area.' + captions['land_ice_ssh_change'] = 'SSH anomaly due to land ice fluxes. ' \ + 'Assumes a constant ocean area.' captions['land_ice_mass_flux_components'] = 'Mass flux components from land ice' self.logger.info(f' Open conservation file {self.outputFile}...') @@ -391,15 +461,18 @@ def _make_plot(self, plot_type): fields.append(variable) legend_text = self.controlConfig.get('runs', 'mainRunName') if len(self.masterVariableList[plot_type]) > 1: - legend_text = f"{legend_text}, {varname.replace('accumulated', '').replace('Flux', '')}" + legend_text = f"{legend_text}, " \ + f"{varname.replace('accumulated', '').replace('Flux', '')}" legendText.append(legend_text) lineColors.append(config.get('timeSeries', 'controlColor')) lineStyles.append(lineStylesBase[index]) lineWidths = [3 for i in fields] - if config.has_option('timeSeries', 'movingAveragePoints'): - movingAveragePoints = config.getint('timeSeries', - 'movingAveragePoints') + if config.has_option('timeSeriesConservation', 'movingAveragePoints'): + # We assume here that movingAveragePoints is given in months + # and conservation output has daily frequency + movingAveragePoints = \ + config.getint('timeSeriesConservation', 'movingAveragePoints') else: movingAveragePoints = None @@ -446,13 +519,15 @@ def _get_variable(self, ds, varname, mks=False): variable = ds[varname] else: # Here we keep the units mks - if varname == 'netMassChange': - variable = self._get_variable(ds, 'massChange', mks=True) - # mass_flux = self._get_variable(ds, 'netMassFlux') - # # Assume that the frequency of output is monthly - # dt = constants.sec_per_month - # # Convert from kg/s to kg - # derived_variable = mass_flux.cumsum(axis=0) * dt + if varname == 'massAnomaly': + source_variable = self._get_variable(ds, 'massChange', mks=False) + variable = source_variable.cumsum(axis=0) + elif varname == 'saltAnomaly': + source_variable = self._get_variable(ds, 'saltChange', mks=False) + variable = source_variable.cumsum(axis=0) + elif varname == 'energyAnomaly': + source_variable = self._get_variable(ds, 'energyChange', mks=True) + variable = source_variable.cumsum(axis=0) elif varname == 'landIceMassChange': land_ice_mass_flux = self._get_variable(ds, 'landIceMassFlux', mks=True) # Assume that the frequency of output is monthly @@ -503,9 +578,9 @@ def _get_variable(self, ds, varname, mks=False): mass_vars = ['initialMass', 'finalMass', 'absoluteMassError', 'relativeMassError', 'massChange', 'landIceMassChange'] salt_vars = ['initialSalt', 'finalSalt', 'absoluteSaltError', - 'relativeSaltError'] + 'relativeSaltError', 'saltChange'] mass_flux_vars = ['netMassFlux', 'landIceMassFlux'] - salt_flux_vars = ['netSaltFlux'] + salt_flux_vars = ['netSaltFlux, accumulatedSeaIceSalinityFlux'] ssh_vars = ['landIceSshChange', 'sshChange'] if (varname in mass_vars) or (varname in salt_vars): # Convert from kg to Gt @@ -598,9 +673,10 @@ def _compute_time_series_with_ncrcat(self, variable_list): if append: args.append('--record_append') - printCommand = '{} {} ... {} {}'.format(' '.join(args), inputFiles[0], - inputFiles[-1], - self.outputFile) + allArgs = ' '.join(args) + printCommand = f'{allArgs} {inputFiles[0]} ... {inputFiles[-1]} ' \ + f'{self.outputFile}' + args.extend(inputFiles) args.append(self.outputFile) diff --git a/mpas_analysis/ocean/ocean_regional_profiles.py b/mpas_analysis/ocean/ocean_regional_profiles.py index 971a15c57..470ae5cd2 100644 --- a/mpas_analysis/ocean/ocean_regional_profiles.py +++ b/mpas_analysis/ocean/ocean_regional_profiles.py @@ -17,16 +17,19 @@ import matplotlib.pyplot as plt from geometric_features import FeatureCollection, read_feature_collection +from geometric_features.aggregation import get_aggregator_by_name from mpas_analysis.shared import AnalysisTask from mpas_analysis.shared.io.utility import build_config_full_path, \ get_files_year_month, make_directories, decode_strings from mpas_analysis.shared.io import open_mpas_dataset, write_netcdf_with_fill +from mpas_analysis.shared.io.utility import get_region_mask from mpas_analysis.shared.timekeeping.utility import days_to_datetime from mpas_analysis.shared.climatology import compute_climatology from mpas_analysis.shared.constants import constants from mpas_analysis.shared.html import write_image_xml from mpas_analysis.shared.plot import savefig, add_inset +from mpas_analysis.shared.regions.compute_region_masks_subtask import get_feature_list class OceanRegionalProfiles(AnalysisTask): @@ -83,15 +86,27 @@ def __init__(self, config, regionMasksTask, controlConfig=None): fields = config.getexpression(regionGroupSection, 'fields') + max_bottom_depth = config.getexpression(regionGroupSection, 'maxDepth') seasons = config.getexpression(regionGroupSection, 'seasons') regionNames = config.getexpression(regionGroupSection, 'regionNames') + if len(regionNames) == 0: return + if 'all' in regionNames: + aggregationFunction, prefix, date = get_aggregator_by_name( + regionGroup) + date = date + outFileSuffix = '{}{}'.format(prefix, date) + geojsonFileName = \ + get_region_mask(self.config, + '{}.geojson'.format(outFileSuffix)) + regionNames = get_feature_list(geojsonFileName) self.add_region_group(regionMasksTask, regionGroup, regionNames, - fields, startYear, endYear, seasons) + fields, startYear, endYear, max_bottom_depth, + seasons) combineSubtask = \ self.combineSubtasks[regionGroup][(startYear, endYear)] @@ -110,7 +125,8 @@ def __init__(self, config, regionMasksTask, controlConfig=None): self.add_subtask(plotSubtask) def add_region_group(self, regionMasksTask, regionGroup, regionNames, - fields, startYear, endYear, seasons=None): + fields, startYear, endYear, max_bottom_depth=None, + seasons=None): """ Add years to the profiles to compute @@ -162,7 +178,8 @@ def add_region_group(self, regionMasksTask, regionGroup, regionNames, else: computeSubtask = ComputeRegionalProfileTimeSeriesSubtask( self, masksSubtask, regionGroup, regionNames, fields, - startYear=year, endYear=year) + startYear=year, endYear=year, + max_bottom_depth=max_bottom_depth) computeSubtask.run_after(masksSubtask) combineSubtask.run_after(computeSubtask) self.computeSubtasks[regionGroup][key] = computeSubtask @@ -180,13 +197,16 @@ class ComputeRegionalProfileTimeSeriesSubtask(AnalysisTask): startYear, endYear : int The beginning and end of the time series to compute + + max_bottom_depth : float + The maximum bottom depth of cells to include in the profile statistics """ # Authors # ------- # Xylar Asay-Davis def __init__(self, parentTask, masksSubtask, regionGroup, regionNames, - fields, startYear, endYear): + fields, startYear, endYear, max_bottom_depth): """ Construct the analysis task. @@ -210,6 +230,10 @@ def __init__(self, parentTask, masksSubtask, regionGroup, regionNames, startYear, endYear : int The beginning and end of the time series to compute + + max_bottom_depth : float + The maximum bottom depth of cells to include in the profile + statistics """ # Authors # ------- @@ -227,10 +251,13 @@ def __init__(self, parentTask, masksSubtask, regionGroup, regionNames, parentTask.add_subtask(self) self.masksSubtask = masksSubtask + if 'all' in regionNames: + regionNames = get_feature_list(self.masksSubtask.geojsonFileName) self.regionNames = regionNames self.fields = fields self.startYear = startYear self.endYear = endYear + self.max_bottom_depth = max_bottom_depth def setup_and_check(self): """ @@ -331,6 +358,11 @@ def run_task(self): 'data': np.arange(nVertLevels)}) vertMask = vertIndex < dsRestart.maxLevelCell + if self.max_bottom_depth is not None: + depthMask = dsRestart.bottomDepth < self.max_bottom_depth + vertDepthMask = np.logical_and(vertMask, depthMask) + else: + vertDepthMask = vertMask # get region masks regionMaskFileName = self.masksSubtask.maskFileName @@ -351,7 +383,7 @@ def run_task(self): cellMasks = dsRegionMask.regionCellMasks regionNamesVar = dsRegionMask.regionNames - totalArea = self._masked_area_sum(cellMasks, areaCell, vertMask) + totalArea = self._masked_area_sum(cellMasks, areaCell, vertDepthMask) datasets = [] for timeIndex, fileName in enumerate(inputFiles): @@ -376,7 +408,7 @@ def run_task(self): prefix = field['prefix'] self.logger.info(' {}'.format(field['titleName'])) - var = dsLocal[variableName].where(vertMask) + var = dsLocal[variableName].where(vertDepthMask) meanName = '{}_mean'.format(prefix) dsLocal[meanName] = \ diff --git a/mpas_analysis/ocean/plot_hovmoller_subtask.py b/mpas_analysis/ocean/plot_hovmoller_subtask.py index b0d830e5c..419ce2910 100644 --- a/mpas_analysis/ocean/plot_hovmoller_subtask.py +++ b/mpas_analysis/ocean/plot_hovmoller_subtask.py @@ -379,22 +379,39 @@ def run_task(self): else: defaultFontSize = None - fig, _, suptitle = plot_vertical_section_comparison( - config, field, refField, diff, self.sectionName, xCoords=Time, - zCoord=z, colorbarLabel=self.unitsLabel, title=title, - modelTitle=mainRunName, refTitle=refTitle, diffTitle=diffTitle, - xlabels=xLabel, ylabel=yLabel, lineWidth=1, xCoordIsTime=True, - movingAveragePoints=movingAveragePoints, calendar=self.calendar, - firstYearXTicks=firstYearXTicks, yearStrideXTicks=yearStrideXTicks, - yLim=yLim, invertYAxis=False, titleFontSize=titleFontSize, - axisFontSize=axisFontSize, defaultFontSize=defaultFontSize) + is_empty = False + for size in field.sizes.values(): + if size == 0: + is_empty = True + break + + if is_empty: + # the plot will be empty + self.logger.warn('No cells in this region so the plot will be ' + 'empty') + fig = plt.figure() + suptitle = None + else: + + fig, _, suptitle = plot_vertical_section_comparison( + config, field, refField, diff, self.sectionName, xCoords=Time, + zCoord=z, colorbarLabel=self.unitsLabel, title=title, + modelTitle=mainRunName, refTitle=refTitle, diffTitle=diffTitle, + xlabels=xLabel, ylabel=yLabel, lineWidth=1, xCoordIsTime=True, + movingAveragePoints=movingAveragePoints, + calendar=self.calendar, + firstYearXTicks=firstYearXTicks, + yearStrideXTicks=yearStrideXTicks, + yLim=yLim, invertYAxis=False, titleFontSize=titleFontSize, + axisFontSize=axisFontSize, defaultFontSize=defaultFontSize) if self.regionMaskFile is not None: # shift the super-title a little to the left to make room for the # inset - pos = suptitle.get_position() - suptitle.set_position((pos[0] - 0.05, pos[1])) + if suptitle is not None: + pos = suptitle.get_position() + suptitle.set_position((pos[0] - 0.05, pos[1])) fcAll = read_feature_collection(self.regionMaskFile) diff --git a/mpas_analysis/ocean/plot_transect_subtask.py b/mpas_analysis/ocean/plot_transect_subtask.py index 9dda317c9..dc3168f41 100644 --- a/mpas_analysis/ocean/plot_transect_subtask.py +++ b/mpas_analysis/ocean/plot_transect_subtask.py @@ -19,7 +19,7 @@ import xarray as xr import numpy from matplotlib import colors -from matplotlib import cm +import matplotlib.pyplot as plt from geometric_features import FeatureCollection @@ -860,7 +860,7 @@ def _get_ds_triangulation(self, dsTransectTriangles): def _get_contour_colormap(): # https://stackoverflow.com/a/18926541/7728169 - cmap = cm.get_cmap('hot') + cmap = plt.get_cmap('hot') cmap = colors.LinearSegmentedColormap.from_list( f'trunc_{cmap.name}', cmap(numpy.linspace(0.1, 0.85, 100))) diff --git a/mpas_analysis/ocean/time_series_transport.py b/mpas_analysis/ocean/time_series_transport.py index a6c52008a..0d00a6a64 100644 --- a/mpas_analysis/ocean/time_series_transport.py +++ b/mpas_analysis/ocean/time_series_transport.py @@ -60,7 +60,7 @@ def __init__(self, config, controlConfig=None): # Xylar Asay-Davis # first, call the constructor from the base class (AnalysisTask) - super(TimeSeriesTransport, self).__init__( + super().__init__( config=config, taskName='timeSeriesTransport', componentName='ocean', @@ -69,41 +69,45 @@ def __init__(self, config, controlConfig=None): startYear = config.getint('timeSeries', 'startYear') endYear = config.getint('timeSeries', 'endYear') - years = [year for year in range(startYear, endYear + 1)] - - transectsToPlot = config.getexpression('timeSeriesTransport', - 'transectsToPlot') - if len(transectsToPlot) == 0: - return - - masksSubtask = ComputeTransectMasksSubtask( - parentTask=self, transectGroup='Transport Transects') - - transectsToPlot = masksSubtask.expand_transect_names(transectsToPlot) - transportTransectFileName = masksSubtask.geojsonFileName + transportGroups = config.getexpression('timeSeriesTransport', 'transportGroups') - self.add_subtask(masksSubtask) - - # in the end, we'll combine all the time series into one, but we - # create this task first so it's easier to tell it to run after all - # the compute tasks - combineSubtask = CombineTransportSubtask( - self, startYears=years, endYears=years) - - # run one subtask per year - for year in years: - computeSubtask = ComputeTransportSubtask( - self, startYear=year, endYear=year, masksSubtask=masksSubtask, - transectsToPlot=transectsToPlot) - self.add_subtask(computeSubtask) - computeSubtask.run_after(masksSubtask) - combineSubtask.run_after(computeSubtask) - - for index, transect in enumerate(transectsToPlot): - plotTransportSubtask = PlotTransportSubtask( - self, transect, index, controlConfig, transportTransectFileName) - plotTransportSubtask.run_after(combineSubtask) - self.add_subtask(plotTransportSubtask) + years = [year for year in range(startYear, endYear + 1)] + for transportGroup in transportGroups: + groupSuffix = transportGroup.replace(' ', '') + transectsToPlot = config.getexpression(f'timeSeries{groupSuffix}', + 'transectsToPlot') + if len(transectsToPlot) == 0: + return + + masksSubtask = ComputeTransectMasksSubtask( + parentTask=self, transectGroup=transportGroup) + + transectsToPlot = masksSubtask.expand_transect_names(transectsToPlot) + transportTransectFileName = masksSubtask.geojsonFileName + + self.add_subtask(masksSubtask) + + # in the end, we'll combine all the time series into one, but we + # create this task first so it's easier to tell it to run after all + # the compute tasks + combineSubtask = CombineTransportSubtask( + self, startYears=years, endYears=years, groupSuffix=groupSuffix) + + # run one subtask per year + for year in years: + computeSubtask = ComputeTransportSubtask( + self, startYear=year, endYear=year, masksSubtask=masksSubtask, + transectsToPlot=transectsToPlot, groupSuffix=groupSuffix) + self.add_subtask(computeSubtask) + computeSubtask.run_after(masksSubtask) + combineSubtask.run_after(computeSubtask) + + for index, transect in enumerate(transectsToPlot): + plotTransportSubtask = PlotTransportSubtask( + self, transect, index, controlConfig, transportTransectFileName, + transportGroup) + plotTransportSubtask.run_after(combineSubtask) + self.add_subtask(plotTransportSubtask) class ComputeTransportSubtask(AnalysisTask): @@ -120,6 +124,9 @@ class ComputeTransportSubtask(AnalysisTask): transectsToPlot : list of str A list of transects to plot + + groupSuffix : str + standard transects vs Arctic transects """ # Authors @@ -127,7 +134,7 @@ class ComputeTransportSubtask(AnalysisTask): # Xylar Asay-Davis, Stephen Price def __init__(self, parentTask, startYear, endYear, - masksSubtask, transectsToPlot): + masksSubtask, transectsToPlot, groupSuffix): """ Construct the analysis task. @@ -145,21 +152,23 @@ def __init__(self, parentTask, startYear, endYear, transectsToPlot : list of str A list of transects to plot + + groupSuffix : str + standard transects vs Arctic transects """ # Authors # ------- # Xylar Asay-Davis - + subtaskName = f'compute{groupSuffix}_{startYear:04d}-{endYear:04d}' # first, call the constructor from the base class (AnalysisTask) - super(ComputeTransportSubtask, self).__init__( + super().__init__( config=parentTask.config, taskName=parentTask.taskName, componentName=parentTask.componentName, tags=parentTask.tags, - subtaskName='computeTransport_{:04d}-{:04d}'.format(startYear, - endYear)) - - self.subprocessCount = self.config.getint('timeSeriesTransport', + subtaskName=subtaskName) + + self.subprocessCount = self.config.getint(f'timeSeries{groupSuffix}', 'subprocessCount') self.startYear = startYear self.endYear = endYear @@ -169,6 +178,7 @@ def __init__(self, parentTask, startYear, endYear, self.transectsToPlot = transectsToPlot self.restartFileName = None + self.groupSuffix = groupSuffix def setup_and_check(self): """ @@ -227,8 +237,7 @@ def run_task(self): except OSError: pass - outFileName = '{}/transport_{:04d}-{:04d}.nc'.format( - outputDirectory, self.startYear, self.endYear) + outFileName = f'{outputDirectory}/{self.groupSuffix}_{self.startYear:04d}-{self.endYear:04d}.nc' inputFiles = sorted(self.historyStreams.readpath( 'timeSeriesStatsMonthlyOutput', startDate=startDate, @@ -389,7 +398,7 @@ class CombineTransportSubtask(AnalysisTask): # ------- # Xylar Asay-Davis - def __init__(self, parentTask, startYears, endYears): + def __init__(self, parentTask, startYears, endYears, groupSuffix): """ Construct the analysis task. @@ -401,21 +410,25 @@ def __init__(self, parentTask, startYears, endYears): startYears, endYears : list of int The beginning and end of each time series to combine + groupSuffix : str + standard transects vs Arctic transects + """ # Authors # ------- # Xylar Asay-Davis - + # first, call the constructor from the base class (AnalysisTask) super(CombineTransportSubtask, self).__init__( config=parentTask.config, taskName=parentTask.taskName, componentName=parentTask.componentName, tags=parentTask.tags, - subtaskName='combineTimeSeries') - + subtaskName=f'combine{groupSuffix}TimeSeries') + #print(self.taskName, self.subtaskName) self.startYears = startYears self.endYears = endYears + self.groupSuffix = groupSuffix def run_task(self): """ @@ -424,19 +437,17 @@ def run_task(self): # Authors # ------- # Xylar Asay-Davis - + groupSuffix = self.groupSuffix outputDirectory = '{}/transport/'.format( build_config_full_path(self.config, 'output', 'timeseriesSubdirectory')) + outFileName = f'{outputDirectory}/{groupSuffix}_{self.startYears[0]:04d}-{self.endYears[-1]:04d}.nc' - outFileName = '{}/transport_{:04d}-{:04d}.nc'.format( - outputDirectory, self.startYears[0], self.endYears[-1]) if not os.path.exists(outFileName): inFileNames = [] for startYear, endYear in zip(self.startYears, self.endYears): - inFileName = '{}/transport_{:04d}-{:04d}.nc'.format( - outputDirectory, startYear, endYear) + inFileName = f'{outputDirectory}/{groupSuffix}_{startYear:04d}-{endYear:04d}.nc' inFileNames.append(inFileName) ds = xarray.open_mfdataset(inFileNames, combine='nested', @@ -460,6 +471,10 @@ class PlotTransportSubtask(AnalysisTask): controlConfig : mpas_tools.config.MpasConfigParser The configuration options for the control run (if any) + transportGroup : str (with spaces) + standard transects (``Transport Transects``) + vs Arctic transects (``Arctic Transport Transects``) + """ # Authors @@ -467,7 +482,7 @@ class PlotTransportSubtask(AnalysisTask): # Xylar Asay-Davis, Stephen Price def __init__(self, parentTask, transect, transectIndex, controlConfig, - transportTransectFileName): + transportTransectFileName, transportGroup): """ Construct the analysis task. @@ -486,23 +501,29 @@ def __init__(self, parentTask, transect, transectIndex, controlConfig, controlconfig : mpas_tools.config.MpasConfigParser, optional Configuration options for a control run (if any) + + transportGroup : str (with spaces) + standard transects (``Transport Transects``) + vs Arctic transects (``Arctic Transport Transects``) """ # Authors # ------- # Xylar Asay-Davis # first, call the constructor from the base class (AnalysisTask) + transectKey = transect.replace(' ', '_') super(PlotTransportSubtask, self).__init__( config=parentTask.config, taskName=parentTask.taskName, componentName=parentTask.componentName, tags=parentTask.tags, - subtaskName='plotTransport_{}'.format(transect.replace(' ', '_'))) + subtaskName=f'plotTransport_{transectKey}') self.transportTransectFileName = transportTransectFileName self.transect = transect self.transectIndex = transectIndex self.controlConfig = controlConfig + self.transportGroup = transportGroup def setup_and_check(self): """ @@ -556,6 +577,8 @@ def run_task(self): 'Davis Strait': [-1.6, -3.6], 'Barents Sea Opening': [1.4, 2.6], 'Nares Strait': [-1.8, 0.2], + 'OSNAP section East': [15.6 - 0.8, 15.6 + 0.8], + 'OSNAP section West': [2.1 - 0.3, 2.1 + 0.3], 'Denmark Strait': None, 'Iceland-Faroe-Scotland': None} @@ -580,7 +603,8 @@ def run_task(self): plotControl = self.controlConfig is not None mainRunName = config.get('runs', 'mainRunName') - movingAveragePoints = config.getint('timeSeriesTransport', + groupSuffix = self.transportGroup.replace(' ','') + movingAveragePoints = config.getint(f'timeSeries{groupSuffix}', 'movingAveragePoints') self.logger.info(' Plotting...') @@ -661,7 +685,8 @@ def run_task(self): groupLink='transporttime', thumbnailDescription=thumbnailDescription, imageDescription=caption, - imageCaption=caption) + imageCaption=caption, + gallery=self.transportGroup) def _load_transport(self, config): """ @@ -670,15 +695,13 @@ def _load_transport(self, config): # Authors # ------- # Xylar Asay-Davis - + groupSuffix = self.transportGroup.replace(' ', '') baseDirectory = build_config_full_path( config, 'output', 'timeSeriesSubdirectory') startYear = config.getint('timeSeries', 'startYear') endYear = config.getint('timeSeries', 'endYear') - - inFileName = '{}/transport/transport_{:04d}-{:04d}.nc'.format( - baseDirectory, startYear, endYear) + inFileName = f'{baseDirectory}/transport/{groupSuffix}_{startYear:04d}-{endYear:04d}.nc' dsIn = xarray.open_dataset(inFileName) transport = dsIn.transport.isel(nTransects=self.transectIndex) diff --git a/mpas_analysis/polar_regions.cfg b/mpas_analysis/polar_regions.cfg index 4edb4b9db..f0b05f03a 100644 --- a/mpas_analysis/polar_regions.cfg +++ b/mpas_analysis/polar_regions.cfg @@ -40,7 +40,21 @@ normArgsResult = {'vmin': 33.8, 'vmax': 35.0} ## options related to plotting T/S diagrams of ocean regions # the names of region groups to plot, each with its own section below -regionGroups = ['Antarctic Regions', 'Arctic Ocean Regions', 'Ocean Basins'] +regionGroups = ['Ocean Basins', 'Arctic Ocean Regions', + 'Greenland Regions', 'Antarctic Regions'] + +[TSDiagramsForGreenlandRegions] +## options related to plotting T/S diagrams of Greenland regions + +# list of regions to plot or ['all'] for all regions in the masks file. +# See "regionNames" in the antarcticRegions masks file in +# regionMaskSubdirectory for details. +regionNames = ['all'] + +# The minimum and maximum depth over which fields are plotted, default is +# to take these values from the geojson feature's zmin and zmax properties. +zmin = -1000 +zmax = 0 [TSDiagramsForAntarcticRegions] ## options related to plotting T/S diagrams of Antarctic regions @@ -226,8 +240,46 @@ colorbarLevelsDifference = [-0.5, -0.2, -0.1, -0.05, -0.02, 0, 0.02, 0.05, ## options related to plotting vertical profiles of regional means (and ## variability) of 3D MPAS fields -regionGroups = ['Arctic Ocean Regions', 'Antarctic Regions'] +regionGroups = ['Arctic Ocean Regions', 'Greenland Regions', 'Antarctic Regions'] + + +[profilesGreenlandRegions] +## options related to plotting vertical profiles Greenland regions + + +# a list of dictionaries for each field to plot. The dictionary includes +# prefix (used for file names, task names and sections) as well as the mpas +# name of the field, units for colorbars and a the name as it should appear +# in figure titles and captions. +fields = + [{'prefix': 'potentialTemperature', + 'mpas': 'timeMonthly_avg_activeTracers_temperature', + 'units': r'$$\degree$$C', + 'titleName': 'Potential Temperature'}, + {'prefix': 'salinity', + 'mpas': 'timeMonthly_avg_activeTracers_salinity', + 'units': r'PSU', + 'titleName': 'Salinity'}, + {'prefix': 'potentialDensity', + 'mpas': 'timeMonthly_avg_potentialDensity', + 'units': r'kg m$$^{-3}$$', + 'titleName': 'Potential Density'}] + +# Times for comparison times (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, +# Nov, Dec, JFM, AMJ, JAS, OND, ANN) +seasons = ['ANN'] + +# minimum and maximum depth of profile plots, or empty for the full depth range +depthRange = [] + +# maximum depth of cells to be included in profile statistics +maxDepth = 600 +# a list of region names from the region masks file to plot +regionNames = ['all'] + +# web gallery options +profileGalleryGroup = Greenland Regional Profiles [profilesArcticOceanRegions] ## options related to plotting vertical profiles Antarctic regions @@ -258,6 +310,9 @@ seasons = ['ANN'] # minimum and maximum depth of profile plots, or empty for the full depth range depthRange = [] +# maximum depth of cells to be included in profile statistics +maxDepth = None + # a list of region names from the region masks file to plot regionNames = ['Baffin Bay', 'Barents Sea', 'Canada Basin', 'Greenland Sea', 'Irminger Sea', 'Kara Sea', 'Labrador Sea', 'Norwegian Sea', 'Arctic Ocean - no Barents, Kara Seas'] @@ -293,6 +348,9 @@ seasons = ['ANN'] # minimum and maximum depth of profile plots, or empty for the full depth range depthRange = [-600., 0.] +# maximum depth of cells to be included in profile statistics +maxDepth = None + # a list of region names from the region masks file to plot regionNames = ["Southern Ocean 60S", "Weddell Sea Shelf", "Weddell Sea Deep", "Bellingshausen Sea Shelf", @@ -310,7 +368,40 @@ profileGalleryGroup = Antarctic Regional Profiles ## regional means of 3D MPAS fields # the names of region groups to plot, each with its own section below -regionGroups = ['Arctic Ocean Regions', 'Antarctic Regions'] +regionGroups = ['Arctic Ocean Regions', 'Greenland Regions', 'Antarctic Regions'] + + +[hovmollerGreenlandRegions] +## options related to plotting Hovmoller diagrams of Arctic Ocean Regions + +# a list of dictionaries for each field to plot. The dictionary includes +# prefix (used for file names, task names and sections) as well as the MPAS +# name of the field, units for colorbars and a the name as it should appear +# in figure titles and captions. +fields = + [{'prefix': 'potentialTemperature', + 'mpas': 'timeMonthly_avg_activeTracers_temperature', + 'units': r'$$\degree$$C', + 'titleName': 'Potential Temperature'}, + {'prefix': 'salinity', + 'mpas': 'timeMonthly_avg_activeTracers_salinity', + 'units': r'PSU', + 'titleName': 'Salinity'}, + {'prefix': 'potentialDensity', + 'mpas': 'timeMonthly_avg_potentialDensity', + 'units': r'kg m$$^{-3}$$', + 'titleName': 'Potential Density'}] + +# a list of region names from the region masks file to plot +regionNames = ['all'] + +# whether to compute an anomaly with respect to the start of the time series +computeAnomaly = False + +# Number of points over which to compute moving average(e.g., for monthly +# output, movingAveragePoints=12 corresponds to a 12-month moving average +# window) +movingAveragePoints = 12 [hovmollerArcticOceanRegions] @@ -458,6 +549,9 @@ makeTables = True # ['all'] for all 106 ice shelves and regions. iceShelvesInTable = ['all'] +[timeSeriesTransport] +## options related to plotting time series of transport through transects +transportGroups = ['Transport Transects', 'Arctic Transport Transects'] [timeSeriesConservation] ## options related to producing time series plots from the conservation diff --git a/mpas_analysis/shared/climatology/remap_mpas_climatology_subtask.py b/mpas_analysis/shared/climatology/remap_mpas_climatology_subtask.py index b441de516..fde743ae1 100644 --- a/mpas_analysis/shared/climatology/remap_mpas_climatology_subtask.py +++ b/mpas_analysis/shared/climatology/remap_mpas_climatology_subtask.py @@ -600,16 +600,16 @@ def _remap(self, inFileName, outFileName, remapper, comparisonGridName, parallel_exec = None if self.useNcremap: + basename, ext = os.path.splitext(outFileName) + ncremapFilename = f'{basename}_ncremap{ext}' remapper.remap_file(inFileName=inFileName, - outFileName=outFileName, + outFileName=ncremapFilename, overwrite=True, renormalize=renormalizationThreshold, logger=self.logger, parallel_exec=parallel_exec) - remappedClimatology = xr.open_dataset(outFileName) - remappedClimatology.load() - remappedClimatology.close() + remappedClimatology = xr.open_dataset(ncremapFilename) else: climatologyDataSet = xr.open_dataset(inFileName) diff --git a/mpas_analysis/shared/constants/constants.py b/mpas_analysis/shared/constants/constants.py index c6d96189b..d0a4d70ef 100644 --- a/mpas_analysis/shared/constants/constants.py +++ b/mpas_analysis/shared/constants/constants.py @@ -61,6 +61,12 @@ # small value to prevent division by zero eps = 1.E-10 +# density of reference seawater (kg/m^3) +rho_sw = 1026. + +# Heat capacity of seawater +cp_sw = 3.996e3 + # density of freshwater (kg/m^3) rho_fw = 1000. diff --git a/mpas_analysis/shared/io/mpas_reader.py b/mpas_analysis/shared/io/mpas_reader.py index 9cf0c8ee6..6d1d8c99a 100644 --- a/mpas_analysis/shared/io/mpas_reader.py +++ b/mpas_analysis/shared/io/mpas_reader.py @@ -194,6 +194,10 @@ def _parse_dataset_time(ds, inTimeVariableName, calendar, # convert to string timeStrings = [''.join(xtime.astype('U')).strip() for xtime in timeVar.values] + for i, timeString in enumerate(timeStrings): + if timeString == '0000-01-15_00:00:00' and \ + i + 1 < len(timeStrings): + timeStrings[i] = f'{timeStrings[i + 1][:7]}-01_00:00:00' days = string_to_days_since_date(dateString=timeStrings, referenceDate=referenceDate, calendar=calendar) diff --git a/mpas_analysis/shared/io/utility.py b/mpas_analysis/shared/io/utility.py index cadb72ce4..9c99c0f49 100644 --- a/mpas_analysis/shared/io/utility.py +++ b/mpas_analysis/shared/io/utility.py @@ -353,7 +353,7 @@ def decode_strings(da): # ------- # Xylar Asay-Davis - if da.dtype.type is numpy.string_: + if da.dtype.type is numpy.bytes_: strings = [bytes.decode(name) for name in da.values] else: strings = [name for name in da.values] diff --git a/mpas_analysis/shared/io/write_netcdf.py b/mpas_analysis/shared/io/write_netcdf.py index df6ff7de9..99079083a 100644 --- a/mpas_analysis/shared/io/write_netcdf.py +++ b/mpas_analysis/shared/io/write_netcdf.py @@ -53,7 +53,7 @@ def write_netcdf_with_fill(ds, fileName, fillValues=netCDF4.default_fillvals): encodingDict[variableName] = {'_FillValue': None} # make strings write as unicode instead - if dtype.type is numpy.string_: + if dtype.type is numpy.bytes_: encodingDict[variableName] = {'dtype': str} ds.to_netcdf(fileName, encoding=encodingDict) diff --git a/mpas_analysis/shared/plot/colormap.py b/mpas_analysis/shared/plot/colormap.py index c0c4acb03..c091f8967 100644 --- a/mpas_analysis/shared/plot/colormap.py +++ b/mpas_analysis/shared/plot/colormap.py @@ -296,7 +296,7 @@ def register_custom_colormaps(): name = 'white_cmo_deep' # modify cmo.deep to start at white - colors2 = plt.cm.get_cmap('cmo.deep')(np.linspace(0, 1, 224)) + colors2 = plt.get_cmap('cmo.deep')(np.linspace(0, 1, 224)) colorCount = 32 colors1 = np.ones((colorCount, 4), float) x = np.linspace(0., 1., colorCount+1)[0:-1] diff --git a/mpas_analysis/shared/timekeeping/utility.py b/mpas_analysis/shared/timekeeping/utility.py index f526001e2..41569a016 100644 --- a/mpas_analysis/shared/timekeeping/utility.py +++ b/mpas_analysis/shared/timekeeping/utility.py @@ -58,7 +58,7 @@ def get_simulation_start_time(streams): ds = xarray.open_dataset(restartFile) da = ds.simulationStartTime - if da.dtype.type is numpy.string_: + if da.dtype.type is numpy.bytes_: simulationStartTime = bytes.decode(da.values.tobytes()) else: simulationStartTime = da.values.tobytes() @@ -108,6 +108,8 @@ def string_to_datetime(dateString): (year, month, day, hour, minute, second) = \ _parse_date_string(dateString, isInterval=False) + if year == 0: + year = 1 return datetime.datetime(year=year, month=month, day=day, hour=hour, minute=minute, second=second) diff --git a/setup.py b/setup.py index 5b0a84a7a..d2cd135ff 100755 --- a/setup.py +++ b/setup.py @@ -16,15 +16,14 @@ install_requires = \ - ['bottleneck', - 'cartopy>=0.18.0', + [ 'cartopy>=0.18.0', 'cmocean', 'dask', 'gsw', 'lxml', - 'matplotlib >=3.6.0,!=3.7.2', + 'matplotlib >=3.9.0', 'netcdf4', - 'numpy', + 'numpy >=2.0,<3.0', 'pandas', 'pillow >=10.0.0,<11.0.0', 'progressbar2', @@ -59,10 +58,10 @@ 'Intended Audience :: Science/Research', 'Programming Language :: Python', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', 'Topic :: Scientific/Engineering', ], packages=find_packages(), diff --git a/suite/setup.py b/suite/setup.py index a847c159e..da097109d 100755 --- a/suite/setup.py +++ b/suite/setup.py @@ -54,7 +54,7 @@ def main(): simulation = '20200305.A_WCYCL1850.ne4_oQU480.anvil' mesh = 'QU480' else: - simulation = '20230406.GMPAS-IAF-ISMF.T62_oQU240wLI.chrysalis' + simulation = '20240718.GMPAS-IAF-PISMF.T62_oQU240wLI.chrysalis' mesh = 'oQU240wLI' if machine in ['anvil', 'chrysalis']: input_base = '/lcrc/group/e3sm/public_html/diagnostics/mpas_analysis/example_simulations'