diff --git a/.github/actions/testing-setup/action.yml b/.github/actions/testing-setup/action.yml new file mode 100644 index 0000000000..c47270af0d --- /dev/null +++ b/.github/actions/testing-setup/action.yml @@ -0,0 +1,77 @@ +name: 'Build-.testing-prerequisites' +description: 'Build pre-requisites for .testing including FMS and a symmetric MOM6 executable' +inputs: + build_symmetric: + description: 'If true, will build the symmetric MOM6 executable' + required: false + default: 'true' + install_python: + description: 'If true, will install the local python env needed for .testing' + required: false + default: 'true' +runs: + using: 'composite' + steps: + - name: Git info + shell: bash + run: | + echo "::group::Git commit info" + echo "git log:" + git log | head -60 + echo "::endgroup::" + + - name: Env + shell: bash + run: | + echo "::group::Environment" + env + echo "::endgroup::" + + - name: Install needed packages for compiling + shell: bash + run: | + echo "::group::Install linux packages" + sudo apt-get update + sudo apt-get install netcdf-bin libnetcdf-dev libnetcdff-dev mpich libmpich-dev + echo "::endgroup::" + + - name: Compile FMS library + shell: bash + run: | + echo "::group::Compile FMS library" + cd .testing + make deps/lib/libFMS.a -s -j + echo "::endgroup::" + + - name: Store compiler flags used in Makefile + shell: bash + run: | + echo "::group::config.mk" + cd .testing + echo "FCFLAGS_DEBUG=-g -O0 -Wextra -Wno-compare-reals -fbacktrace -ffpe-trap=invalid,zero,overflow -fcheck=bounds" >> config.mk + echo "FCFLAGS_REPRO=-g -O2 -fbacktrace" >> config.mk + echo "FCFLAGS_INIT=-finit-real=snan -finit-integer=2147483647 -finit-derived" >> config.mk + echo "FCFLAGS_COVERAGE=--coverage" >> config.mk + cat config.mk + echo "::endgroup::" + + - name: Compile MOM6 in symmetric memory mode + shell: bash + run: | + echo "::group::Compile MOM6 in symmetric memory mode" + cd .testing + test ${{ inputs.build_symmetric }} == true && make build/symmetric/MOM6 -j + echo "::endgroup::" + + - name: Install local python venv for generating input data + shell: bash + run: | + echo "::group::Create local python env for input data generation" + cd .testing + test ${{ inputs.install_python }} == true && make work/local-env + echo "::endgroup::" + + - name: Set flags + shell: bash + run: | + echo "TIMEFORMAT=... completed in %lR (user: %lU, sys: %lS)" >> $GITHUB_ENV diff --git a/.github/workflows/coupled-api.yml b/.github/workflows/coupled-api.yml new file mode 100644 index 0000000000..86d7262548 --- /dev/null +++ b/.github/workflows/coupled-api.yml @@ -0,0 +1,33 @@ +name: API for coupled drivers + +on: [push, pull_request] + +jobs: + test-top-api: + + runs-on: ubuntu-latest + defaults: + run: + working-directory: .testing + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - uses: ./.github/actions/testing-setup + with: + build_symmetric: 'false' + install_python: 'false' + + - name: Compile MOM6 for the GFDL coupled driver + shell: bash + run: make check_mom6_api_coupled -j + + - name: Compile MOM6 for the NUOPC driver + shell: bash + run: make check_mom6_api_nuopc -j + + - name: Compile MOM6 for the MCT driver + shell: bash + run: make check_mom6_api_mct -j diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml new file mode 100644 index 0000000000..60b85e412b --- /dev/null +++ b/.github/workflows/coverage.yml @@ -0,0 +1,24 @@ +name: Code coverage + +on: [push, pull_request] + +jobs: + build-test-nans: + + runs-on: ubuntu-latest + defaults: + run: + working-directory: .testing + + env: + REPORT_COVERAGE: true + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - uses: ./.github/actions/testing-setup + + - name: Run and post coverage + run: make run.symmetric -k -s diff --git a/.github/workflows/documentation-and-style.yml b/.github/workflows/documentation-and-style.yml new file mode 100644 index 0000000000..c171c538d5 --- /dev/null +++ b/.github/workflows/documentation-and-style.yml @@ -0,0 +1,39 @@ +name: Doxygen and style + +on: [push, pull_request] + +jobs: + doxygen: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - name: Check white space (non-blocking) + run: | + ./.testing/trailer.py -e TEOS10 -l 120 src config_src 2>&1 | tee style_errors + continue-on-error: true + + - name: Install packages used when generating documentation + run: | + sudo apt-get update + sudo apt-get install python3-sphinx python3-lxml perl + sudo apt-get install texlive-binaries texlive-base bibtool tex-common texlive-bibtex-extra + sudo apt-get install graphviz + + - name: Build doxygen HTML + run: | + cd docs + perl -e 'print "perl version $^V" . "\n"' + mkdir _build && make nortd DOXYGEN_RELEASE=Release_1_8_13 UPDATEHTMLEQS=Y + cat _build/doxygen_warn_nortd_log.txt + + - name: Report doxygen or style errors + run: | + grep "warning:" docs/_build/doxygen_warn_nortd_log.txt | grep -v "as part of a" | tee doxy_errors + cat style_errors doxy_errors > all_errors + cat all_errors + test ! -s all_errors diff --git a/.github/workflows/expression.yml b/.github/workflows/expression.yml new file mode 100644 index 0000000000..c504e6c15a --- /dev/null +++ b/.github/workflows/expression.yml @@ -0,0 +1,27 @@ +name: Expression verification + +on: [push, pull_request] + +jobs: + test-repro-and-dims: + + runs-on: ubuntu-latest + defaults: + run: + working-directory: .testing + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - uses: ./.github/actions/testing-setup + + - name: Compile MOM6 using repro optimization + run: make build/repro/MOM6 -j + + - name: Create validation data + run: make run.symmetric -k -s + + - name: Run tests + run: make test.repros test.dims -k -s diff --git a/.github/workflows/other.yml b/.github/workflows/other.yml new file mode 100644 index 0000000000..3406fa9bc8 --- /dev/null +++ b/.github/workflows/other.yml @@ -0,0 +1,27 @@ +name: OpenMP and Restart verification + +on: [push, pull_request] + +jobs: + test-openmp-nan-restarts: + + runs-on: ubuntu-latest + defaults: + run: + working-directory: .testing + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - uses: ./.github/actions/testing-setup + + - name: Compile with openMP + run: make build/openmp/MOM6 -j + + - name: Create validation data + run: make run.symmetric -k -s + + - name: Run tests + run: make test.openmps test.nans test.restarts -k -s diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml new file mode 100644 index 0000000000..7dd1f3c703 --- /dev/null +++ b/.github/workflows/regression.yml @@ -0,0 +1,27 @@ +name: Regression + +on: [pull_request] + +jobs: + build-test-regression: + + runs-on: ubuntu-latest + defaults: + run: + working-directory: .testing + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - uses: ./.github/actions/testing-setup + + - name: Compile reference model + run: make build.regressions MOM_TARGET_SLUG=$GITHUB_REPOSITORY MOM_TARGET_LOCAL_BRANCH=$GITHUB_BASE_REF DO_REGRESSION_TESTS=true -j + + - name: Create validation data + run: make run.symmetric -k -s + + - name: Regression test + run: make test.regressions DO_REGRESSION_TESTS=true -k -s diff --git a/.github/workflows/stencil.yml b/.github/workflows/stencil.yml new file mode 100644 index 0000000000..20081747cc --- /dev/null +++ b/.github/workflows/stencil.yml @@ -0,0 +1,27 @@ +name: Stencil related verification + +on: [push, pull_request] + +jobs: + test-symmetric-layout-rotation: + + runs-on: ubuntu-latest + defaults: + run: + working-directory: .testing + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - uses: ./.github/actions/testing-setup + + - name: Compile MOM6 in asymmetric memory mode + run: make build/asymmetric/MOM6 -j + + - name: Create validation data + run: make run.symmetric -k -s + + - name: Run tests + run: make test.grids test.layouts test.rotations -k -s diff --git a/.gitignore b/.gitignore index ccaecbbead..25f7524d1c 100644 --- a/.gitignore +++ b/.gitignore @@ -4,13 +4,7 @@ html -# Build output -*.o -*.mod -MOM6 - - -# Autoconf +# Autoconf output aclocal.m4 autom4te.cache/ config.log diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 1622ae9886..befac14642 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -31,7 +31,7 @@ setup: # Clone regressions directory - git clone --recursive http://gitlab.gfdl.noaa.gov/ogrp/Gaea-stats-MOM6-examples.git tests && cd tests # Install / update testing scripts - - git clone https://github.com/adcroft/MRS.git MRS + - git clone -b new-code-struct https://github.com/adcroft/MRS.git MRS # Update MOM6-examples and submodules - (cd MOM6-examples && git checkout . && git checkout dev/gfdl && git pull && git submodule init && git submodule update) - (cd MOM6-examples/src/MOM6 && git submodule update) @@ -60,7 +60,7 @@ gnu:ocean-only-nolibs: - time tar zxf $CACHE_DIR/tests_$CI_PIPELINE_ID.tgz && cd tests - make -f MRS/Makefile.build build/gnu/env && cd build/gnu # mkdir -p build/gnu/repro/symmetric_dynamic/ocean_only && cd build/gnu/repro/symmetric_dynamic/ocean_only - - ../../MOM6-examples/src/mkmf/bin/list_paths -l ../../../config_src/{solo_driver,dynamic_symmetric,ext*} ../../../src ../../MOM6-examples/src/FMS + - ../../MOM6-examples/src/mkmf/bin/list_paths -l ../../../config_src/{drivers/solo_driver,memory/dynamic_symmetric,infra/FMS1,ext*} ../../../src ../../MOM6-examples/src/FMS - sed -i '/FMS\/.*\/test_/d' path_names - ../../MOM6-examples/src/mkmf/bin/mkmf -t ../../MOM6-examples/src/mkmf/templates/ncrc-gnu.mk -p MOM6 -c"-Duse_libMPI -Duse_netCDF" path_names - time (source ./env ; make NETCDF=3 REPRO=1 MOM6 -s -j) @@ -73,7 +73,7 @@ gnu:ice-ocean-nolibs: - time tar zxf $CACHE_DIR/tests_$CI_PIPELINE_ID.tgz && cd tests - make -f MRS/Makefile.build build/gnu/env && cd build/gnu # mkdir -p build/gnu/repro/symmetric_dynamic/ocean_only && cd build/gnu/repro/symmetric_dynamic/ocean_only - - ../../MOM6-examples/src/mkmf/bin/list_paths -l ../../../config_src/{coupled_driver,dynamic,ext*} ../../../src ../../MOM6-examples/src/{FMS,coupler,SIS2,icebergs,ice_param,land_null,atmos_null} + - ../../MOM6-examples/src/mkmf/bin/list_paths -l ../../../config_src/{drivers/FMS_cap,memory/dynamic_nonsymmetric,infra/FMS1,ext*} ../../../src ../../MOM6-examples/src/{FMS,coupler,SIS2,icebergs,ice_param,land_null,atmos_null} - sed -i '/FMS\/.*\/test_/d' path_names - ../../MOM6-examples/src/mkmf/bin/mkmf -t ../../MOM6-examples/src/mkmf/templates/ncrc-gnu.mk -p MOM6 -c"-Duse_libMPI -Duse_netCDF -D_USE_LEGACY_LAND_ -Duse_AM3_physics" path_names - time (source ./env ; make NETCDF=3 REPRO=1 MOM6 -s -j) @@ -117,11 +117,36 @@ run: - time tar zxf $CACHE_DIR/build-pgi-repro-$CI_PIPELINE_ID.tgz # time tar zxf $CACHE_DIR/build-gnu-debug-$CI_PIPELINE_ID.tgz - (echo '#!/bin/tcsh';echo 'make -f MRS/Makefile.tests all') > job.sh - - sbatch --clusters=c3,c4 --nodes=29 --time=0:34:00 --account=gfdl_o --qos=debug --job-name=mom6_regressions --output=log.$CI_PIPELINE_ID --wait job.sh + - sbatch --clusters=c3,c4 --nodes=29 --time=0:34:00 --account=gfdl_o --qos=debug --job-name=mom6_regressions --output=log.$CI_PIPELINE_ID --wait job.sh || MJOB_RETURN_STATE=Fail - cat log.$CI_PIPELINE_ID + - test -z "$MJOB_RETURN_STATE" - test -f restart_results_gnu.tar.gz - time tar zvcf $CACHE_DIR/results-$CI_PIPELINE_ID.tgz *.tar.gz +gnu.testing: + stage: run + tags: + - ncrc4 + script: + - cd .testing + - module unload PrgEnv-pgi PrgEnv-intel PrgEnv-gnu darshan ; module load PrgEnv-gnu ; module unload netcdf gcc ; module load gcc/7.3.0 cray-hdf5 cray-netcdf + - make work/local-env + - make -s -j + - (echo '#!/bin/bash';echo '. ./work/local-env/bin/activate';echo 'make MPIRUN="srun -mblock --exclusive" test -s -j') > job.sh + - sbatch --clusters=c3,c4 --nodes=5 --time=0:05:00 --account=gfdl_o --qos=debug --job-name=MOM6.gnu.testing --output=log.$CI_PIPELINE_ID --wait job.sh || cat log.$CI_PIPELINE_ID && make test + +intel.testing: + stage: run + tags: + - ncrc4 + script: + - cd .testing + - module unload PrgEnv-pgi PrgEnv-intel PrgEnv-gnu darshan; module load PrgEnv-intel; module unload netcdf intel; module load intel/18.0.6.288 cray-hdf5 cray-netcdf + - make work/local-env + - make -s -j + - (echo '#!/bin/bash';echo '. ./work/local-env/bin/activate';echo 'make MPIRUN="srun -mblock --exclusive" test -s -j') > job.sh + - sbatch --clusters=c3,c4 --nodes=5 --time=0:05:00 --account=gfdl_o --qos=debug --job-name=MOM6.gnu.testing --output=log.$CI_PIPELINE_ID --wait job.sh || cat log.$CI_PIPELINE_ID && make test + # Tests gnu:non-symmetric: stage: tests diff --git a/.readthedocs.yml b/.readthedocs.yml index b95a9b901f..f7ad4421b4 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,11 +1,16 @@ -# don't build extra formats (like HTML zip) -formats: - - none +version: 2 -# path to pip requirements file to bring in -# doxygen extensions -requirements_file: docs/requirements.txt +# Extra formats +# PDF generation is failing for now; disabled on 2020-12-02 +#formats: +# - pdf + +# Build documentation +sphinx: + configuration: docs/conf.py python: # make sure we're using Python 3 - version: 3.5 + version: 3 + install: + - requirements: docs/requirements.txt diff --git a/.testing/Makefile b/.testing/Makefile index 4b3dfdefb8..02f6557c09 100644 --- a/.testing/Makefile +++ b/.testing/Makefile @@ -1,20 +1,80 @@ +# MOM6 Test suite Makefile +# +# Usage: +# make -j +# Build the FMS library and test executables +# +# make -j test +# Run the test suite, defined in the `tc` directores. +# +# make clean +# Wipe the MOM6 test executables +# (NOTE: This does not delete FMS in the `deps`) +# +# +# Configuration: +# These settings can be provided as either command-line flags, or saved in a +# `config.mk` file. +# +# Test suite configuration: +# +# MPIRUN MPI job launcher (mpirun, srun, etc) +# DO_REPRO_TESTS Enable production ("repro") testing equivalence +# DO_REGRESSION_TESTS: Enable regression tests (usually dev/gfdl) +# REPORT_COVERAGE Enable code coverage and report to codecov +# +# Compiler configuration: +# (NOTE: These are environment variables and may be inherited from a shell.) +# +# CC C compiler +# MPICC MPI C compiler +# FC Fortran compiler +# MPIFC MPI Fortran compiler +# +# Build configuration: +# FCFLAGS_DEBUG Testing ("debug") compiler flags +# FCFLAGS_REPRO Production ("repro") compiler flags +# FCFLAGS_INIT Variable initialization flags +# FCFLAGS_COVERAGE Code coverage flags +# +# Regression repository ("target") configuration: +# (NOTE: These would typically be configured by a CI such as Travis.) +# +# MOM_TARGET_SLUG URL slug (minus domain) of the target repo +# MOM_TARGET_URL Full URL of the target repo +# MOM_TARGET_LOCAL_BRANCH Target branch name +# +#---- + +# TODO: Bourne shell compatibility SHELL = bash + +# No implicit rules .SUFFIXES: +# No implicit variables +MAKEFLAGS += -R + # User-defined configuration -include config.mk # Set the MPI launcher here +# TODO: This needs more automated configuration MPIRUN ?= mpirun -# Default target compiler flags +# Generic compiler variables are pass through to the builds +export CC +export MPICC +export FC +export MPIFC + +# Builds are distinguished by FCFLAGS # NOTE: FMS will be built using FCFLAGS_DEBUG FCFLAGS_DEBUG ?= -g -O0 FCFLAGS_REPRO ?= -g -O2 FCFLAGS_INIT ?= FCFLAGS_COVERAGE ?= # Additional notes: -# # - The default values are simple, minimalist flags, supported by nearly all # compilers which are comparable to GFDL's canonical DEBUG and REPRO builds. # @@ -24,11 +84,17 @@ FCFLAGS_COVERAGE ?= # - FMS cannot be built with the same aggressive initialization flags as MOM6, # so FCFLAGS_INIT is used to provide additional MOM6 configuration. +# User-defined LDFLAGS (applied to all builds and FMS) +LDFLAGS_USER ?= + # Set to `true` to require identical results from DEBUG and REPRO builds +# NOTE: Many compilers (Intel, GCC on ARM64) do not yet produce identical +# results across DEBUG and REPRO builds (as defined below), so we disable on +# default. DO_REPRO_TESTS ?= -# Many compilers (Intel, GCC on ARM64) do not yet produce identical results -# across DEBUG and REPRO builds (as defined below), so we disable on default. +# Time measurement (configurable by the CI) +TIME ?= time #--- # Dependencies @@ -60,7 +126,6 @@ endif # DO_REGRESSION_TESTS: true if $(TRAVIS_PULL_REQUEST) is a PR number # MOM_TARGET_SLUG: TRAVIS_REPO_SLUG # MOM_TARGET_LOCAL_BRANCH: TRAVIS_BRANCH - # These are set to true by our Travis configuration if testing a pull request DO_REGRESSION_TESTS ?= REPORT_COVERAGE ?= @@ -91,10 +156,12 @@ SOURCE = \ $(foreach ext,F90 inc c h,$(wildcard $(1)/*/*.$(ext) $(1)/*/*/*.$(ext))) MOM_SOURCE = $(call SOURCE,../src) \ - $(wildcard ../config_src/solo_driver/*.F90) \ + $(wildcard ../config_src/infra/FMS1/*.F90) \ + $(wildcard ../config_src/drivers/solo_driver/*.F90) \ $(wildcard ../config_src/ext*/*/*.F90) TARGET_SOURCE = $(call SOURCE,build/target_codebase/src) \ - $(wildcard build/target_codebase/config_src/solo_driver/*.F90) \ + $(wildcard build/target_codebase/config_src/infra/FMS1/*.F90) \ + $(wildcard build/target_codebase/config_src/drivers/solo_driver/*.F90) \ $(wildcard build/target_codebase/config_src/ext*/*.F90) FMS_SOURCE = $(call SOURCE,$(DEPS)/fms/src) @@ -153,8 +220,8 @@ REPRO_FCFLAGS := FCFLAGS="$(FCFLAGS_REPRO) $(FCFLAGS_FMS)" OPENMP_FCFLAGS := FCFLAGS="$(FCFLAGS_DEBUG) $(FCFLAGS_INIT) $(FCFLAGS_FMS)" TARGET_FCFLAGS := FCFLAGS="$(FCFLAGS_DEBUG) $(FCFLAGS_INIT) $(FCFLAGS_FMS)" -MOM_LDFLAGS := LDFLAGS="$(LDFLAGS_FMS)" -SYMMETRIC_LDFLAGS := LDFLAGS="$(COVERAGE) $(LDFLAGS_FMS)" +MOM_LDFLAGS := LDFLAGS="$(LDFLAGS_FMS) $(LDFLAGS_USER)" +SYMMETRIC_LDFLAGS := LDFLAGS="$(COVERAGE) $(LDFLAGS_FMS) $(LDFLAGS_USER)" # Environment variable configuration @@ -163,7 +230,9 @@ build/asymmetric/Makefile: MOM_ENV=$(PATH_FMS) $(ASYMMETRIC_FCFLAGS) $(MOM_LDFLA build/repro/Makefile: MOM_ENV=$(PATH_FMS) $(REPRO_FCFLAGS) $(MOM_LDFLAGS) build/openmp/Makefile: MOM_ENV=$(PATH_FMS) $(OPENMP_FCFLAGS) $(MOM_LDFLAGS) build/target/Makefile: MOM_ENV=$(PATH_FMS) $(TARGET_FCFLAGS) $(MOM_LDFLAGS) - +build/coupled/Makefile: MOM_ENV=$(PATH_FMS) $(SYMMETRIC_FCFLAGS) $(SYMMETRIC_LDFLAGS) +build/nuopc/Makefile: MOM_ENV=$(PATH_FMS) $(SYMMETRIC_FCFLAGS) $(SYMMETRIC_LDFLAGS) +build/mct/Makefile: MOM_ENV=$(PATH_FMS) $(SYMMETRIC_FCFLAGS) $(SYMMETRIC_LDFLAGS) # Configure script flags build/symmetric/Makefile: MOM_ACFLAGS= @@ -171,7 +240,9 @@ build/asymmetric/Makefile: MOM_ACFLAGS=--enable-asymmetric build/repro/Makefile: MOM_ACFLAGS= build/openmp/Makefile: MOM_ACFLAGS=--enable-openmp build/target/Makefile: MOM_ACFLAGS= - +build/coupled/Makefile: MOM_ACFLAGS=--with-driver=coupled_driver +build/nuopc/Makefile: MOM_ACFLAGS=--with-driver=nuopc_driver +build/mct/Makefile: MOM_ACFLAGS=--with-driver=mct_driver # Fetch regression target source code build/target/Makefile: | $(TARGET_CODEBASE) @@ -182,13 +253,13 @@ build/target/Makefile: | $(TARGET_CODEBASE) # Ideally we would want to re-run both Makefile and mkmf, but our mkmf call # is inside ./configure, so we must re-run ./configure as well. $(foreach b,$(filter-out target,$(BUILDS)),build/$(b)/Makefile): $(MOM_SOURCE) -build/target/configure: $(TARGET_SOURCE) +build/target_codebase/configure: $(TARGET_SOURCE) # Build MOM6 .PRECIOUS: $(foreach b,$(BUILDS),build/$(b)/MOM6) build/%/MOM6: build/%/Makefile - cd $(@D) && time $(MAKE) -j + cd $(@D) && $(TIME) $(MAKE) -j # Use autoconf to construct the Makefile for each target @@ -218,15 +289,15 @@ $(TARGET_CODEBASE)/ac/configure: $(TARGET_CODEBASE) $(TARGET_CODEBASE): git clone --recursive $(MOM_TARGET_URL) $@ - cd $@ && git checkout $(MOM_TARGET_BRANCH) - # Copy modern autoconf files to target? - mkdir -p $(TARGET_CODEBASE)/ac - cp -r ../ac/{configure.ac,Makefile.in,m4} $(TARGET_CODEBASE)/ac + cd $@ && git checkout --recurse-submodules $(MOM_TARGET_BRANCH) #--- # FMS +# Set up the FMS build environment variables +FMS_ENV = PATH="${PATH}:../../bin" FCFLAGS="$(FCFLAGS_DEBUG)" + # TODO: *.mod dependencies? $(DEPS)/lib/libFMS.a: $(DEPS)/fms/build/libFMS.a $(MAKE) -C $(DEPS) lib/libFMS.a @@ -235,7 +306,8 @@ $(DEPS)/fms/build/libFMS.a: $(DEPS)/fms/build/Makefile $(MAKE) -C $(DEPS) fms/build/libFMS.a $(DEPS)/fms/build/Makefile: $(DEPS)/fms/src/configure $(DEPS)/Makefile.fms.in $(MKMF) $(LIST_PATHS) - PATH_ENV="${PATH}:../../bin" FCFLAGS_ENV="$(FCFLAGS_DEBUG)" $(MAKE) -C $(DEPS) fms/build/Makefile + $(FMS_ENV) $(MAKE) -C $(DEPS) fms/build/Makefile + $(MAKE) -C $(DEPS) fms/build/Makefile $(DEPS)/Makefile.fms.in: ../ac/deps/Makefile.fms.in $(DEPS)/Makefile cp $< $(DEPS) @@ -261,8 +333,29 @@ $(DEPS)/Makefile: ../ac/deps/Makefile mkdir -p $(@D) cp $< $@ +#--- +# The following block does a non-library build of a coupled driver interface to MOM, along with everything below it. +# This simply checks that we have not broken the ability to compile. This is not a means to build a complete coupled executable. +# Todo: +# - avoid re-building FMS and MOM6 src by re-using existing object/mod files +# - use autoconf rather than mkmf templates +MK_TEMPLATE ?= ../../$(DEPS)/mkmf/templates/ncrc-gnu.mk +# NUOPC driver +build/nuopc/mom_ocean_model_nuopc.o: build/nuopc/Makefile + cd $(@D) && make $(@F) +check_mom6_api_nuopc: build/nuopc/mom_ocean_model_nuopc.o +# GFDL coupled driver +build/coupled/ocean_model_MOM.o: build/coupled/Makefile + cd $(@D) && make $(@F) +check_mom6_api_coupled: build/coupled/ocean_model_MOM.o +# MCT driver +build/mct/mom_ocean_model_mct.o: build/mct/Makefile + cd $(@D) && make $(@F) +check_mom6_api_mct: build/mct/mom_ocean_model_mct.o + #--- # Python preprocessing + # NOTE: Some less mature environments (e.g. Arm64 Ubuntu) require explicit # installation of numpy before netCDF4, as well as wheel and cython support. work/local-env: @@ -273,6 +366,7 @@ work/local-env: && pip3 install numpy \ && pip3 install netCDF4 + #--- # Testing @@ -304,16 +398,16 @@ run.nans: $(foreach c,$(CONFIGS),work/$(c)/nan/ocean.stats) run.openmp: $(foreach c,$(CONFIGS),work/$(c)/openmp/ocean.stats) # Color highlights for test results -RED=\033[0;31m -YELLOW=\033[0;33m -GREEN=\033[0;32m -MAGENTA=\033[0;35m -RESET=\033[0m +RED = \033[0;31m +YELLOW = \033[0;33m +GREEN = \033[0;32m +MAGENTA = \033[0;35m +RESET = \033[0m -DONE=${GREEN}DONE${RESET} -PASS=${GREEN}PASS${RESET} -WARN=${YELLOW}WARN${RESET} -FAIL=${RED}FAIL${RESET} +DONE = ${GREEN}DONE${RESET} +PASS = ${GREEN}PASS${RESET} +WARN = ${YELLOW}WARN${RESET} +FAIL = ${RED}FAIL${RESET} # Comparison rules # $(1): Test type (grid, layout, &c.) @@ -321,9 +415,10 @@ FAIL=${RED}FAIL${RESET} define CMP_RULE .PRECIOUS: $(foreach b,$(2),work/%/$(b)/ocean.stats) %.$(1): $(foreach b,$(2),work/%/$(b)/ocean.stats) + @test "$$(shell ls -A results/$$* 2>/dev/null)" || rm -rf results/$$* @cmp $$^ || !( \ mkdir -p results/$$*; \ - (diff $$^ | tee results/$$*/ocean.stats.$(1).diff | head) ; \ + (diff $$^ | tee results/$$*/ocean.stats.$(1).diff | head -n 20) ; \ echo -e "$(FAIL): Solutions $$*.$(1) have changed." \ ) @echo -e "$(PASS): Solutions $$*.$(1) agree." @@ -332,7 +427,7 @@ define CMP_RULE %.$(1).diag: $(foreach b,$(2),work/%/$(b)/chksum_diag) @cmp $$^ || !( \ mkdir -p results/$$*; \ - (diff $$^ | tee results/$$*/chksum_diag.$(1).diff | head) ; \ + (diff $$^ | tee results/$$*/chksum_diag.$(1).diff | head -n 20) ; \ echo -e "$(FAIL): Diagnostics $$*.$(1).diag have changed." \ ) @echo -e "$(PASS): Diagnostics $$*.$(1).diag agree." @@ -351,10 +446,11 @@ $(foreach d,$(DIMS),$(eval $(call CMP_RULE,dim.$(d),symmetric dim.$(d)))) # Restart tests only compare the final stat record .PRECIOUS: $(foreach b,symmetric restart target,work/%/$(b)/ocean.stats) %.restart: $(foreach b,symmetric restart,work/%/$(b)/ocean.stats) + @test "$(shell ls -A results/$* 2>/dev/null)" || rm -rf results/$* @cmp $(foreach f,$^,<(tr -s ' ' < $(f) | cut -d ' ' -f3- | tail -n 1)) \ || !( \ mkdir -p results/$*; \ - (diff $^ | tee results/$*/chksum_diag.restart.diff | head) ; \ + (diff $^ | tee results/$*/chksum_diag.restart.diff | head -n 20) ; \ echo -e "$(FAIL): Solutions $*.restart have changed." \ ) @echo -e "$(PASS): Solutions $*.restart agree." @@ -363,22 +459,26 @@ $(foreach d,$(DIMS),$(eval $(call CMP_RULE,dim.$(d),symmetric dim.$(d)))) # stats rule is unchanged, but we cannot use CMP_RULE to generate it. %.regression: $(foreach b,symmetric target,work/%/$(b)/ocean.stats) + @test "$(shell ls -A results/$* 2>/dev/null)" || rm -rf results/$* @cmp $^ || !( \ mkdir -p results/$*; \ - (diff $^ | tee results/$*/ocean.stats.regression.diff | head) ; \ + (diff $^ | tee results/$*/ocean.stats.regression.diff | head -n 20) ; \ echo -e "$(FAIL): Solutions $*.regression have changed." \ ) @echo -e "$(PASS): Solutions $*.regression agree." # Regression testing only checks for changes in existing diagnostics %.regression.diag: $(foreach b,symmetric target,work/%/$(b)/chksum_diag) - @! diff $^ | grep "^[<>]" | grep "^>" \ + @! diff $^ | grep "^[<>]" | grep "^>" > /dev/null \ || ! (\ mkdir -p results/$*; \ - (diff $^ | tee results/$*/chksum_diag.regression.diff | head) ; \ + (diff $^ | tee results/$*/chksum_diag.regression.diff | head -n 20) ; \ echo -e "$(FAIL): Diagnostics $*.regression.diag have changed." \ ) - @diff $^ || echo -e "$(WARN): New diagnostics in $<" + @cmp $^ || ( \ + diff $^ | head -n 20; \ + echo -e "$(WARN): New diagnostics in $<" \ + ) @echo -e "$(PASS): Diagnostics $*.regression.diag agree." @@ -407,12 +507,13 @@ work/%/$(1)/ocean.stats work/%/$(1)/chksum_diag: build/$(2)/MOM6 $(VENV_PATH) fi mkdir -p $$(@D)/RESTART echo -e "$(4)" > $$(@D)/MOM_override + rm -f results/$$*/std.$(1).{out,err} cd $$(@D) \ - && time $(5) $(MPIRUN) -n $(6) ../../../$$< 2> std.err > std.out \ + && $(TIME) $(5) $(MPIRUN) -n $(6) ../../../$$< 2> std.err > std.out \ || !( \ mkdir -p ../../../results/$$*/ ; \ - cat std.out | tee ../../../results/$$*/std.$(1).out | tail -20 ; \ - cat std.err | tee ../../../results/$$*/std.$(1).err | tail -20 ; \ + cat std.out | tee ../../../results/$$*/std.$(1).out | tail -n 20 ; \ + cat std.err | tee ../../../results/$$*/std.$(1).err | tail -n 20 ; \ rm ocean.stats chksum_diag ; \ echo -e "$(FAIL): $$*.$(1) failed at runtime." \ ) @@ -468,11 +569,13 @@ work/%/restart/ocean.stats: build/symmetric/MOM6 $(VENV_PATH) && printf -v timeunit_int "%.f" "$${timeunit}" \ && halfperiod=$$(printf "%.f" $$(bc <<< "scale=10; 0.5 * $${daymax} * $${timeunit_int}")) \ && printf "\n&ocean_solo_nml\n seconds = $${halfperiod}\n/\n" >> input.nml + # Remove any previous archived output + rm -f results/$*/std.restart{1,2}.{out,err} # Run the first half-period - cd $(@D) && time $(MPIRUN) -n 1 ../../../$< 2> std1.err > std1.out \ + cd $(@D) && $(TIME) $(MPIRUN) -n 1 ../../../$< 2> std1.err > std1.out \ || !( \ - cat std1.out | tee ../../../results/$*/std.restart1.out | tail ; \ - cat std1.err | tee ../../../results/$*/std.restart1.err | tail ; \ + cat std1.out | tee ../../../results/$*/std.restart1.out | tail -n 20 ; \ + cat std1.err | tee ../../../results/$*/std.restart1.err | tail -n 20 ; \ echo -e "$(FAIL): $*.restart failed at runtime." \ ) # Setup the next inputs @@ -480,10 +583,10 @@ work/%/restart/ocean.stats: build/symmetric/MOM6 $(VENV_PATH) mkdir $(@D)/RESTART cd $(@D) && sed -i -e "s/input_filename *= *'n'/input_filename = 'r'/g" input.nml # Run the second half-period - cd $(@D) && time $(MPIRUN) -n 1 ../../../$< 2> std2.err > std2.out \ + cd $(@D) && $(TIME) $(MPIRUN) -n 1 ../../../$< 2> std2.err > std2.out \ || !( \ - cat std2.out | tee ../../../results/$*/std.restart2.out | tail ; \ - cat std2.err | tee ../../../results/$*/std.restart2.err | tail ; \ + cat std2.out | tee ../../../results/$*/std.restart2.out | tail -n 20 ; \ + cat std2.err | tee ../../../results/$*/std.restart2.err | tail -n 20 ; \ echo -e "$(FAIL): $*.restart failed at runtime." \ ) diff --git a/.testing/README.md b/.testing/README.md index adc56e56cd..ef02bcfa09 100644 --- a/.testing/README.md +++ b/.testing/README.md @@ -229,6 +229,7 @@ configurations in the MOM6-examples repository. - `tc2`: An ALE configuration based on tc1 with tides - `tc2.a`: Use sigma, PPM_H4 and no tides - `tc3`: An open-boundary condition (OBC) test based on `circle_obcs` +- `tc4`: Sponges and initialization using I/O ## Code coverage diff --git a/.testing/trailer.py b/.testing/trailer.py index a483bf9995..64f016275f 100755 --- a/.testing/trailer.py +++ b/.testing/trailer.py @@ -1,100 +1,137 @@ #!/usr/bin/env python +"""Subroutines for Validating the whitespace of the source code.""" import argparse import os import re import sys + def parseCommandLine(): - """ - Parse the command line positional and optional arguments. - This is the highest level procedure invoked from the very end of the script. - """ + """Parse the command line positional and optional arguments. + + This is the highest level procedure invoked from the very end of the + script. + """ + # Arguments + parser = argparse.ArgumentParser( + description='trailer.py checks Fortran files for trailing white ' + 'space.', + epilog='Written by A.Adcroft, 2017.' + ) + parser.add_argument( + 'files_or_dirs', type=str, nargs='+', + metavar='FILE|DIR', + help='Fortran files or director in which to search for Fortran files ' + '(with .f, .f90, .F90 suffixes).''' + ) + parser.add_argument( + '-e', '--exclude_dir', type=str, action='append', + metavar='DIR', + help='''Exclude directories from search that end in DIR.''' + ) + parser.add_argument( + '-l', '--line_length', type=int, default=512, + help='''Maximum allowed length of a line.''' + ) + parser.add_argument( + '-s', '--source_line_length', type=int, default=132, + help='''Maximum allowed length of a source line excluding comments.''' + ) + parser.add_argument( + '-d', '--debug', action='store_true', + help='turn on debugging information.' + ) + args = parser.parse_args() - # Arguments - parser = argparse.ArgumentParser(description='''trailer.py checks Fortran files for trailing white space.''', - epilog='Written by A.Adcroft, 2017.') - parser.add_argument('files_or_dirs', type=str, nargs='+', - metavar='FILE|DIR', - help='''Fortran files or director in which to search for Fortran files (with .f, .f90, .F90 suffixes).''') - parser.add_argument('-e','--exclude_dir', type=str, action='append', - metavar='DIR', - help='''Exclude directories from search that end in DIR.''') - parser.add_argument('-l','--line_length', type=int, default=512, - help='''Maximum allowed length of a line.''') - parser.add_argument('-s','--source_line_length', type=int, default=132, - help='''Maximum allowed length of a source line excluding comments.''') - parser.add_argument('-d','--debug', action='store_true', - help='turn on debugging information.') - args = parser.parse_args() + global debug + debug = args.debug - global debug - debug = args.debug + main(args) - main(args) def main(args): - ''' - Does the actual work - ''' - if (debug): print(args) + """Do the actual work.""" + if (debug): + print(args) - # Process files_or_dirs argument into list of files - all_files = [] - for a in args.files_or_dirs: - if os.path.isfile(a): all_files.append(a) - elif os.path.isdir(a): - for d,s,files in os.walk(a): - ignore = False - if args.exclude_dir is not None: - for e in args.exclude_dir: - if e+'/' in d+'/': ignore = True - if not ignore: - for f in files: - _,ext = os.path.splitext(f) - if ext in ('.f','.F','.f90','.F90'): all_files.append( os.path.join(d,f) ) - else: raise Exception('Argument '+a+' is not a file or directory! Stopping.') - if (debug): print('Found: ',all_files) + # Process files_or_dirs argument into list of files + all_files = [] + for a in args.files_or_dirs: + if os.path.isfile(a): + all_files.append(a) + elif os.path.isdir(a): + for d, s, files in os.walk(a): + ignore = False + if args.exclude_dir is not None: + for e in args.exclude_dir: + if e+'/' in d+'/': + ignore = True + if not ignore: + for f in files: + _, ext = os.path.splitext(f) + if ext in ('.f', '.F', '.f90', '.F90'): + all_files.append(os.path.join(d, f)) + else: + raise Exception('Argument '+a+' is not a file or directory! ' + 'Stopping.') + if (debug): + print('Found: ', all_files) + + # For each file, check for trailing white space + fail = False + for filename in all_files: + this = scan_file(filename, line_length=args.line_length, + source_line_length=args.source_line_length) + fail = fail or this + if fail: + sys.exit(1) - # For each file, check for trailing white space - fail = False - for filename in all_files: - this = scan_file(filename, line_length=args.line_length, source_line_length=args.source_line_length) - fail = fail or this - if fail: sys.exit(1) def scan_file(filename, line_length=512, source_line_length=132): - '''Scans file for trailing white space''' - def msg(filename,lineno,mesg,line=None): - if line is None: print('%s, line %i: %s'%(filename,lineno,mesg)) - else: print('%s, line %i: %s "%s"'%(filename,lineno,mesg,line)) - white_space_detected = False - tabs_space_detected = False - long_line_detected = False - with open(filename) as file: - trailing_space = re.compile(r'.* +$') - tabs = re.compile(r'.*\t.*') - lineno = 0 - for line in file.readlines(): - lineno += 1 - line = line.replace('\n','') - srcline = line.split('!', 1)[0] # Discard comments - if trailing_space.match(line) is not None: - if debug: print(filename,lineno,line,trailing_space.match(line)) - if len(line.strip())>0: msg(filename,lineno,'Trailing space detected',line) - else: msg(filename,lineno,'Blank line contains spaces') - white_space_detected = True - if tabs.match(line) is not None: - if len(line.strip())>0: msg(filename,lineno,'Tab detected',line) - else: msg(filename,lineno,'Blank line contains tabs') - tabs_space_detected = True - if len(line)>line_length: - if len(line.strip())>0: msg(filename,lineno,'Line length exceeded',line) - else: msg(filename,lineno,'Blank line exceeds line length limit') - long_line_detected = True - if len(srcline)>source_line_length: - msg(filename,lineno,'Non-comment line length exceeded',line) - return white_space_detected or tabs_space_detected or long_line_detected + """Scan file for trailing white space.""" + def msg(filename, lineno, mesg, line=None): + if line is None: + print('%s, line %i: %s' % (filename, lineno, mesg)) + else: + print('%s, line %i: %s "%s"' % (filename, lineno, mesg, line)) + white_space_detected = False + tabs_space_detected = False + long_line_detected = False + with open(filename) as file: + trailing_space = re.compile(r'.* +$') + tabs = re.compile(r'.*\t.*') + lineno = 0 + for line in file.readlines(): + lineno += 1 + line = line.replace('\n', '') + srcline = line.split('!', 1)[0] # Discard comments + if trailing_space.match(line) is not None: + if debug: + print(filename, lineno, line, trailing_space.match(line)) + if len(line.strip()) > 0: + msg(filename, lineno, 'Trailing space detected', line) + else: + msg(filename, lineno, 'Blank line contains spaces') + white_space_detected = True + if tabs.match(line) is not None: + if len(line.strip()) > 0: + msg(filename, lineno, 'Tab detected', line) + else: + msg(filename, lineno, 'Blank line contains tabs') + tabs_space_detected = True + if len(line) > line_length: + if len(line.strip()) > 0: + msg(filename, lineno, 'Line length exceeded', line) + else: + msg(filename, lineno, + 'Blank line exceeds line length limit') + long_line_detected = True + if len(srcline) > source_line_length: + msg(filename, lineno, 'Non-comment line length exceeded', line) + return white_space_detected or tabs_space_detected or long_line_detected + # Invoke parseCommandLine(), the top-level procedure -if __name__ == '__main__': parseCommandLine() +if __name__ == '__main__': + parseCommandLine() diff --git a/.travis.yml b/.travis.yml index 22c497f916..c34089ddf6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,10 +12,12 @@ addons: packages: - tcsh pkg-config netcdf-bin libnetcdf-dev libnetcdff-dev gfortran - mpich libmpich-dev - - doxygen graphviz flex bison cmake + - graphviz flex bison cmake - python-numpy python-netcdf4 - - python3 python3-dev python3-venv python3-pip + - python3 python3-dev python3-venv python3-pip python3-sphinx python3-lxml - bc + - perl + - texlive-binaries texlive-base bibtool tex-common texlive-bibtex-extra # Environment variables env: @@ -34,8 +36,12 @@ jobs: # Whitespace - ./.testing/trailer.py -e TEOS10 -l 120 src config_src # API Documentation - - cd docs && doxygen Doxyfile_nortd - - grep -v "config_src/solo_driver/coupler_types.F90" doxygen.log | tee doxy_errors + - perl -e 'print "perl version $^V" . "\n"' + - cd docs && mkdir _build && make nortd DOXYGEN_RELEASE=Release_1_8_13 UPDATEHTMLEQS=Y + # We can tighten up the warnings here. Math im image captions should only generate + # \f warnings. All other latex math should be double escaped (\\) like (\\Phi) for + # html image captions. + - grep "warning:" _build/doxygen_warn_nortd_log.txt | grep -v 'Illegal command f as part of a \\image' | tee doxy_errors - test ! -s doxy_errors - env: @@ -56,7 +62,6 @@ jobs: env: - JOB="x86 Regression testing" - DO_REGRESSION_TESTS=true - - REPORT_COVERAGE=true - MOM_TARGET_SLUG=${TRAVIS_REPO_SLUG} - MOM_TARGET_LOCAL_BRANCH=${TRAVIS_BRANCH} script: @@ -67,18 +72,6 @@ jobs: - time make -k -s test.regressions - make test.summary - - if: NOT type = pull_request - env: - - JOB="Coverage upload" - - REPORT_COVERAGE=true - - DO_REGRESSION_TESTS=false - script: - - cd .testing - - echo 'Build executables...' && echo -en 'travis_fold:start:script.1\\r' - - make build/symmetric/MOM6 - - echo -en 'travis_fold:end:script.1\\r' - - make -k -s run.symmetric - - arch: arm64 env: - JOB="ARM64 verification testing" diff --git a/ac/Makefile.in b/ac/Makefile.in index ce8173e6f1..7be6c5bf2b 100644 --- a/ac/Makefile.in +++ b/ac/Makefile.in @@ -10,19 +10,28 @@ # # The following variables are used by Makefiles generated by mkmf. # -# CC: C compiler -# CXX: C++ compiler -# FC: Fortran compiler (f77 and f90) -# LD: Linker +# CC C compiler +# CXX C++ compiler +# FC Fortran compiler (f77 and f90) +# LD Linker +# AR Archiver # -# CPPDEFS: Preprocessor macros -# CPPFLAGS: C preprocessing flags -# CXXFLAGS: C++ preprocessing flags -# FPPFLAGS: Fortran preprocessing flags +# CPPDEFS Preprocessor macros +# CPPFLAGS C preprocessing flags +# CXXFLAGS C++ preprocessing flags +# FPPFLAGS Fortran preprocessing flags +# +# CFLAGS C compiler flags +# FFLAGS Fortran compiler flags +# LDFLAGS Linker flags + libraries +# ARFLAGS Archiver flags +# +# OTHERFLAGS Additional flags for all languages (C, C++, Fortran) +# OTHER_CFLAGS Optional C flags +# OTHER_CXXFLAGS Optional C++ flags +# OTHER_FFLAGS Optional Fortran flags +# TMPFILES Placeholder for `make clean` deletion (as `make neat`). # -# CFLAGS: C compiler flags -# FFLAGS: Fortran compiler flags -# LDFLAGS: Linker flags + libraries # # NOTES: # - FPPFLAGS and FFLAGS always appear as a pair, and autoconf does not use @@ -33,13 +42,6 @@ # # - LDFLAGS does not distinguish between autoconf's LDFLAGS and LIBS. # It also places both after the executable rather than just LIBS. -# -# OTHERFLAGS: Additional flags for all languages (C, C++, Fortran) -# OTHER_CFLAGS: Optional C flags -# OTHER_CXXFLAGS: Optional C++ flags -# OTHER_FFLAGS: Optional Fortran flags -# -# TMPFILES: Placeholder for `make clean` deletion (as `make neat`). FC = @FC@ LD = @FC@ diff --git a/ac/README.md b/ac/README.md index d5a5310ab8..f50275c3a0 100644 --- a/ac/README.md +++ b/ac/README.md @@ -28,25 +28,31 @@ support. The following instructions will allow a new user to quickly create a MOM6 executable for ocean-only simulations. -Each set of instructions is meant to be run from the root directory of the -repository. +Before starting, ensure that all submodules have been updated. +``` +$ git submodule update --init --recursive +``` -A separate Makefile in `ac/deps/` is provided to gather and build any GFDL -dependencies. +Next, fetch the GFDL `mkmf` build tool and build the FMS framework library. + +For new users, a separate Makefile in `./ac/deps/` is provided for this step. ``` $ cd ac/deps $ make -j ``` -This will fetch the `mkmf` tool and build the FMS library. -To build MOM6, first generate the Autoconf `configure` script. +To build MOM6, first generate the Autoconf `configure` script in `./ac`. ``` +$ cd ../.. # Return to the root directory $ cd ac $ autoreconf ``` -Then select your build directory, e.g. `./build`, run the configure script, and -build the model. +Then select your build directory, run the configure script, and build the +model. + +The instructions below build the model in the `./build` directory. ``` +$ cd .. # Return to the root directory $ mkdir -p build $ cd build $ ../ac/configure @@ -54,31 +60,32 @@ $ make -j ``` This will create the MOM6 executable in the build directory. -This executable is only useable for ocean-only simulations, and cannot be used -for coupled modeling. It also requires the necessary experiment configuration -files, such as `input.nml` and `MOM_input`. For more information, consult the -[MOM6-examples wiki](https://github.com/NOAA-GFDL/MOM6-examples/wiki). +The steps above will produce an executable for ocean-only simulations, and +cannot be used for coupled modeling. It also requires the necessary experiment +configuration files, such as `input.nml` and `MOM_input`. For more +information, consult the [MOM6-examples +wiki](https://github.com/NOAA-GFDL/MOM6-examples/wiki). # Build rules The Makefile produced by Autoconf provides the following rules. -``make`` +`make` Build the MOM6 executable. -``make clean`` +`make clean` Delete the executable and any object and module files, but preserve the Autoconf output. -``make distclean`` +`make distclean` Delete all of the files above, as well as any files generated by `./configure`. Note that this will delete the Makefile containing this rule. -``make ac-clean`` +`make ac-clean` Delete all of the files above, including `./configure` and any other files created by `autoreconf`. As with `make distclean`, this will also delete the @@ -124,31 +131,24 @@ For the complete list of settings, run `./configure --help`. # GFDL Dependencies -This section briefly describes the management of GFDL dependencies `mkmf` and -FMS. - -The `configure` script will first check if the compiler and its configured -flags (`FCFLAGS`, `LDFLAGS`, etc.) can find `mkmf` and the FMS library. If -unavailable, then it will search in the local `ac/deps` library. If still -unavailable, then the build will abort. - -Running `make -C ac/deps` will ensure that the libraries are available. But if -the user wishes to target an external FMS library, then they should add the -appropriate `FCFLAGS` and `LDFLAGS` to find the library. - -Similar options are provided for `mkmf` with respect to `PATH`, although it -is usually not necessary to use an external `mkmf` script. - -Some configuration options are provided by the `ac/deps` Makefile: +This section briefly describes the management of the `mkmf` and FMS +dependencies. -`PATH_ENV` +When building MOM6, the `configure` script will first check if the compiler and +its configured flags (`FCFLAGS`, `LDFLAGS`, etc.) can locate `mkmf` and the FMS +library. If unavailable, then it will search in the local `ac/deps` library. +If still unavailable, then the build will abort. - This variable will override the value of `PATH` when building the dependencies. +The dependencies are not automatically provided in `ac/deps`. However, running +`make -C ac/deps` will fetch and build them. If the user wishes to target an +external FMS library or `mkmf` tools, then they should set `PATH`, `FCFLAGS` +and `LDFLAGS` so that `configure` can locate them. -`FCFLAGS_ENV` +Exported environment variables such as `FC` or `FCFLAGS` will be passed to the +corresponding `configure` scripts. - Used to override the default Autoconf flags, `-g -O2`. This is useful if, - for example, one wants to build with `-O0` to speed up the build time. +The following configuration options are also provided, which can be used to +specify the git URL and commit of the dependencies. `MKMF_URL` (*default:* https://github.com/NOAA-GFDL/mkmf.git) @@ -157,12 +157,6 @@ Some configuration options are provided by the `ac/deps` Makefile: `FMS_URL` (*default:* https://github.com/NOAA-GFDL/FMS.git) `FMS_COMMIT` (*default:* `2019.01.03`) - - These are used to specify where to check out the source code for each - respective project. - -Additional hooks for FMS builds do not yet exist, but can be added if -necessary. # Known issues / Future development diff --git a/ac/configure.ac b/ac/configure.ac index ee6b76dacb..6ff4ae5e8b 100644 --- a/ac/configure.ac +++ b/ac/configure.ac @@ -8,6 +8,8 @@ # - We would probably run this inside of a script to avoid the explicit # dependency on git. +AC_PREREQ([2.63]) + AC_INIT( [MOM6], [ ], @@ -40,12 +42,18 @@ srcdir=$srcdir/.. # Default to symmetric grid # NOTE: --enable is more properly used to add a feature, rather than to select # a compile-time mode, so this is not exactly being used as intended. -MEM_LAYOUT=${srcdir}/config_src/dynamic_symmetric +MEM_LAYOUT=${srcdir}/config_src/memory/dynamic_symmetric AC_ARG_ENABLE([asymmetric], AS_HELP_STRING([--enable-asymmetric], [Use the asymmetric grid])) AS_IF([test "$enable_asymmetric" = yes], - [MEM_LAYOUT=${srcdir}/config_src/dynamic]) + [MEM_LAYOUT=${srcdir}/config_src/memory/dynamic_nonsymmetric]) +# Default to solo_driver +DRIVER_DIR=${srcdir}/config_src/drivers/solo_driver +AC_ARG_WITH([driver], + AS_HELP_STRING([--with-driver=coupled_driver|solo_driver], [Select directory for driver source code])) +AS_IF([test "x$with_driver" != "x"], + [DRIVER_DIR=${srcdir}/config_src/drivers/${with_driver}]) # TODO: Rather than point to a pre-configured header file, autoconf could be # used to configure a header based on a template. @@ -80,18 +88,57 @@ AX_FC_CHECK_MODULE([mpi], # netCDF configuration -AC_PATH_PROG([NC_CONFIG], [nc-config]) -AS_IF([test -n "$NC_CONFIG"], - [CPPFLAGS="$CPPFLAGS -I$($NC_CONFIG --includedir)" - FCFLAGS="$FCFLAGS -I$($NC_CONFIG --includedir)" - LDFLAGS="$LDFLAGS -L$($NC_CONFIG --libdir)"], - [AC_MSG_ERROR([Could not find nc-config.])]) - -AX_FC_CHECK_MODULE([netcdf], - [], [AC_MSG_ERROR([Could not find FMS library.])]) -AX_FC_CHECK_LIB([netcdff], [nf_create], [netcdf], - [], [AC_MSG_ERROR([Could not link netcdff library.])] -) + +# Search for the Fortran netCDF module, fallback to nf-config. +AX_FC_CHECK_MODULE([netcdf], [], [ + AS_UNSET([ax_fc_cv_mod_netcdf]) + AC_PATH_PROG([NF_CONFIG], [nf-config]) + AS_IF([test -n "$NF_CONFIG"], [ + AC_SUBST([FCFLAGS], ["$FCFLAGS -I$($NF_CONFIG --includedir)"]) + ], [AC_MSG_ERROR([Could not find nf-config.])] + ) + AX_FC_CHECK_MODULE([netcdf], [], [ + AC_MSG_ERROR([Could not find netcdf module.]) + ]) +]) + +# FMS may invoke netCDF C calls, so we link to libnetcdf. +AC_LANG_PUSH([C]) +AC_CHECK_LIB([netcdf], [nc_create], [], [ + AS_UNSET([ac_cv_lib_netcdf_nc_create]) + AC_PATH_PROG([NC_CONFIG], [nc-config]) + AS_IF([test -n "$NC_CONFIG"], [ + AC_SUBST([LDFLAGS], + ["$LDFLAGS -L$($NC_CONFIG --libdir)"] + ) + ], [AC_MSG_ERROR([Could not find nc-config.])] + ) + AC_CHECK_LIB([netcdf], [nc_create], [], [ + AC_MSG_ERROR([Could not find libnetcdf.]) + ]) +]) +AC_LANG_POP([C]) + +# NOTE: We test for nf_create, rather than nf90_create, because AX_FC_CHECK_LIB +# is currently not yet able to properly probe inside modules. +# Testing of the nf90_* functions will require a macro update. +# NOTE: nf-config does not have a --libdir flag, so we use --prefix and assume +# that libraries are in the $prefix/lib directory. + +# Link to Fortran netCDF library, netcdff +AX_FC_CHECK_LIB([netcdff], [nf_create], [], [], [ + AS_UNSET([ax_fc_cv_lib_netcdff_nf_create]) + AC_PATH_PROG([NF_CONFIG], [nf-config]) + AS_IF([test -n "$NF_CONFIG"], [ + AC_SUBST([LDFLAGS], + ["$LDFLAGS -L$($NF_CONFIG --prefix)/lib"] + ) + ], [AC_MSG_ERROR([Could not find nf-config.])] + ) + AX_FC_CHECK_LIB([netcdff], [nf_create], [], [], [ + AC_MSG_ERROR([Could not find libnetcdff.]) + ]) +]) # Force 8-byte reals @@ -102,11 +149,22 @@ AS_IF( # OpenMP configuration -AC_OPENMP + +# NOTE: AC_OPENMP fails on `Fortran` for Autoconf <2.69 due to a m4 bug. +# For older versions, we test against CC and use the result for FC. +m4_version_prereq([2.69], [AC_OPENMP], [ + AC_LANG_PUSH([C]) + AC_OPENMP + AC_LANG_POP([C]) + OPENMP_FCFLAGS="$OPENMP_CFLAGS" +]) + +# NOTE: Only apply OpenMP flags if explicitly enabled. AS_IF( - [test "$enable_openmp" = yes], - [FCFLAGS="$FCFLAGS $OPENMP_FCFLAGS" - LDFLAGS="$LDFLAGS $OPENMP_FCFLAGS"]) + [test "$enable_openmp" = yes], [ + FCFLAGS="$FCFLAGS $OPENMP_FCFLAGS" + LDFLAGS="$LDFLAGS $OPENMP_FCFLAGS" +]) # FMS support @@ -158,10 +216,11 @@ AS_IF([test -z "$MKMF"], [ AC_CONFIG_COMMANDS([path_names], [list_paths -l \ ${srcdir}/src \ - ${srcdir}/config_src/solo_driver \ + ${srcdir}/config_src/infra/FMS1 \ ${srcdir}/config_src/ext* \ + ${DRIVER_DIR} \ ${MEM_LAYOUT} -], [MEM_LAYOUT=$MEM_LAYOUT]) +], [MEM_LAYOUT=$MEM_LAYOUT DRIVER_DIR=$DRIVER_DIR]) AC_CONFIG_COMMANDS([Makefile.mkmf], diff --git a/ac/deps/Makefile b/ac/deps/Makefile index 91fe343047..0ed4fd19a7 100644 --- a/ac/deps/Makefile +++ b/ac/deps/Makefile @@ -1,22 +1,10 @@ SHELL = bash -.SUFFIXES: - -# FMS build configuration -PATH_ENV ?= -FCFLAGS_ENV ?= - -# Only set FCFLAGS if an argument is provided. -FMS_FCFLAGS = -ifneq ($(FCFLAGS_ENV),) - FMS_FCFLAGS := FCFLAGS="$(FCFLAGS_ENV)" -endif +# Disable implicit rules +.SUFFIXES: -# Ditto for path -FMS_PATH = -ifneq ($(PATH_ENV),) - FMS_PATH := PATH="$(PATH_ENV)" -endif +# Disable implicit variables +MAKEFLAGS += -R # mkmf, list_paths (GFDL build toolchain) @@ -44,7 +32,6 @@ FMS_SOURCE = $(call SOURCE,fms/src) .PHONY: all all: bin/mkmf bin/list_paths lib/libFMS.a - #--- # mkmf checkout @@ -65,6 +52,7 @@ mkmf: # This is a flawed approach, since module files are untracked and could be # handled more safely, but this is adequate for now. + # TODO: track *.mod copy? lib/libFMS.a: fms/build/libFMS.a fms/build/Makefile mkdir -p {lib,include} @@ -76,13 +64,10 @@ fms/build/libFMS.a: fms/build/Makefile make -C fms/build libFMS.a -# TODO: Include FC, CC, CFLAGS? -fms/build/Makefile: FMS_ENV=$(FMS_PATH) $(FMS_FCFLAGS) - fms/build/Makefile: Makefile.fms.in fms/src/configure bin/mkmf bin/list_paths mkdir -p fms/build cp Makefile.fms.in fms/src/Makefile.in - cd $(@D) && $(FMS_ENV) ../src/configure --srcdir=../src + cd $(@D) && ../src/configure --srcdir=../src # TODO: Track m4 macros? @@ -100,7 +85,6 @@ fms/src: clean: rm -rf fms/build lib include bin - .PHONY: distclean distclean: clean rm -rf fms mkmf diff --git a/ac/deps/Makefile.fms.in b/ac/deps/Makefile.fms.in index 694ad8e0b0..0286d94b58 100644 --- a/ac/deps/Makefile.fms.in +++ b/ac/deps/Makefile.fms.in @@ -1,46 +1,58 @@ -# Makefile template for MOM6 +# Makefile template for autoconf builds using mkmf # -# Previously this would have been generated by mkmf using a template file. +# Compiler flags are configured by autoconf's configure script. # -# The proposed autoconf build inverts this approach by constructing the -# information previously stored in the mkmf template, such as compiler names -# and flags, and importing the un-templated mkmf output for its rules and -# dependencies. +# Source code dependencies are configured by mkmf and list_paths, specified in +# the `Makefile.mkmf` file. # -# While this approach does not eliminate our dependency on mkmf, it does -# promises to eliminate our reliance on platform-specific templates, and -# instead allows us to provide a configure script for determining our compilers -# and flags. As a last resort, we provide hooks to override such settings. - -# NOTE: mkmf conventions are close, but not identical, to autoconf. -# -# CC: C compiler -# CXX: C++ compiler -# FC: Fortran compiler (f77 and f90) -# LD: Linker -# -# CPPDEFS: Preprocessor macros -# CPPFLAGS: C preprocessing flags -# CXXFLAGS: C++ preprocessing flags -# FPPFLAGS: Fortran preprocessing flags -# -# CFLAGS: C compiler flags -# FFLAGS: Fortran compiler flags (f77 and f90) -# LDFLAGS: Linker flags -# -# OTHERFLAGS: Additional flags for all languages (C, C++, Fortran) -# OTHER_CFLAGS: Optional C flags -# OTHER_CXXFLAGS: Optional C++ flags -# OTHER_FFLAGS: Optional Fortran flags +# mkmf conventions are close, but not identical, to autoconf. We attempt to +# map the autoconf variables to the mkmf variables. +# +# The following variables are used by Makefiles generated by mkmf. +# +# CC C compiler +# CXX C++ compiler +# FC Fortran compiler (f77 and f90) +# LD Linker +# AR Archiver +# +# CPPDEFS Preprocessor macros +# CPPFLAGS C preprocessing flags +# CXXFLAGS C++ preprocessing flags +# FPPFLAGS Fortran preprocessing flags +# +# CFLAGS C compiler flags +# FFLAGS Fortran compiler flags +# LDFLAGS Linker flags + libraries +# ARFLAGS Archiver flags +# +# OTHERFLAGS Additional flags for all languages (C, C++, Fortran) +# OTHER_CFLAGS Optional C flags +# OTHER_CXXFLAGS Optional C++ flags +# OTHER_FFLAGS Optional Fortran flags +# TMPFILES Placeholder for `make clean` deletion (as `make neat`). +# +# +# NOTES: +# - FPPFLAGS and FFLAGS always appear as a pair, and autoconf does not use +# FPPFLAGS, so FPPFLAGS does not serve much purpose. +# +# - mkmf's FFLAGS does not distinguish between autoconf's fixed-format +# FFLAGS and free-format FCFLAGS. +# +# - LDFLAGS does not distinguish between autoconf's LDFLAGS and LIBS. +# It also places both after the executable rather than just LIBS. CC = @CC@ FC = @FC@ LD = @FC@ +AR = @AR@ CPPDEFS = @DEFS@ CPPFLAGS = @CPPFLAGS@ FFLAGS = @FCFLAGS@ LDFLAGS = @LDFLAGS@ +ARFLAGS = @ARFLAGS@ # Gather modulefiles TMPFILES = $(wildcard *.mod) diff --git a/ac/deps/configure.fms.ac b/ac/deps/configure.fms.ac index 1d66194c81..bf899126cc 100644 --- a/ac/deps/configure.fms.ac +++ b/ac/deps/configure.fms.ac @@ -1,4 +1,6 @@ # Autoconf configuration +AC_PREREQ([2.63]) + AC_INIT( [FMS], [ ], @@ -13,13 +15,14 @@ AC_PROG_CC AX_MPI CC=$MPICC + # FMS configuration -# Linux and OSX have a gettid system call, but it is not implemented in older +# Linux and macOS have a gettid system call, but it is not implemented in older # glibc implementations. When unavailable, a native syscall is used. # # On Linux, this is defined in unistd.h as __NR_gettid, and FMS is hard-coded -# to use this value. In OS X, this is defined in sys/syscall.h as SYS_gettid, +# to use this value. In macOS, this is defined in sys/syscall.h as SYS_gettid, # so we override this macro if __NR_gettid is unavailable. AC_CHECK_FUNCS([gettid], [], [ AC_MSG_CHECKING([if __NR_gettid must be redefined]) @@ -71,18 +74,39 @@ AC_DEFINE([use_libMPI]) # netCDF configuration -AC_PATH_PROG([NC_CONFIG], [nc-config]) -AS_IF([test -n "$NC_CONFIG"], - [CPPFLAGS="$CPPFLAGS -I$($NC_CONFIG --includedir)" - FCFLAGS="$FCFLAGS -I$($NC_CONFIG --includedir)" - LDFLAGS="$LDFLAGS -L$($NC_CONFIG --libdir)"], - [AC_MSG_ERROR([Could not find nc-config.])]) - -AX_FC_CHECK_MODULE([netcdf], - [], [AC_MSG_ERROR([Could not find FMS library.])]) -AX_FC_CHECK_LIB([netcdff], [nf_create], [netcdf], - [], [AC_MSG_ERROR([Could not link netcdff library.])] -) + +# Check for netcdf.h header function declarations. +# If unavailable, then try to invoke nc-create. +AC_LANG_PUSH([C]) +AC_CHECK_HEADERS([netcdf.h], [], [ + AS_UNSET([ac_cv_header_netcdf_h]) + AC_PATH_PROG([NC_CONFIG], [nc-config]) + AS_IF([test -n "$NC_CONFIG"], [ + AC_SUBST([CPPFLAGS], ["$CPPFLAGS -I$($NC_CONFIG --includedir)"]) + ], + [AC_MSG_ERROR([Could not find nc-config.])] + ) + AC_CHECK_HEADERS([netcdf.h], [], [ + AC_MSG_ERROR([Could not find netcdf.h]) + ]) +]) +AC_LANG_POP([C]) + +# Search for the Fortran netCDF module, fallback to nf-config. +AX_FC_CHECK_MODULE([netcdf], [], [ + AS_UNSET([ax_fc_cv_mod_netcdf]) + AC_PATH_PROG([NF_CONFIG], [nf-config]) + AS_IF([test -n "$NF_CONFIG"], [ + AC_SUBST([FCFLAGS], ["$FCFLAGS -I$($NF_CONFIG --includedir)"]) + ], + [AC_MSG_ERROR([Could not find nf-config.])] + ) + AX_FC_CHECK_MODULE([netcdf], [], [ + AC_MSG_ERROR([Could not find netcdf module.]) + ]) +]) + +# FMS requires this macro to signal netCDF support. AC_DEFINE([use_netCDF]) @@ -98,15 +122,31 @@ AS_IF( # OpenMP configuration -AC_OPENMP + +# NOTE: AC_OPENMP fails in Autoconf <2.69 when LANG is Fortran or Fortran 77. +# For older versions, we test against CC and use the result for FC. +m4_version_prereq([2.69], [AC_OPENMP], [ + AC_LANG_PUSH([C]) + AC_OPENMP + AC_LANG_POP([C]) + OPENMP_FCFLAGS="$OPENMP_CFLAGS" +]) + +# NOTE: Only apply OpenMP flags if explicitly enabled. AS_IF( - [test "$enable_openmp" = yes], - [FCFLAGS="$FCFLAGS $OPENMP_FCFLAGS" - LDFLAGS="$LDFLAGS $OPENMP_FCFLAGS"]) + [test "$enable_openmp" = yes], [ + FCFLAGS="$FCFLAGS $OPENMP_FCFLAGS" + LDFLAGS="$LDFLAGS $OPENMP_FCFLAGS" +]) + +# Unlimited line length (2.67) +# AC_FC_LINE_LENGTH was added in 2.67. +m4_version_prereq([2.67], + [AC_FC_LINE_LENGTH([unlimited])], + [AX_FC_LINE_LENGTH([unlimited])] +) -# Unlimited line length -AC_FC_LINE_LENGTH([unlimited]) # Allow invaliz BOZ assignment AX_FC_ALLOW_INVALID_BOZ @@ -141,12 +181,22 @@ AS_IF([test -z "$MKMF"], [ # MKMF commands AC_CONFIG_COMMANDS([path_names], [${LIST_PATHS} -l ${srcdir}], - [LIST_PATHS=${LIST_PATHS}]) + [LIST_PATHS=${LIST_PATHS}] +) AC_CONFIG_COMMANDS([mkmf], [${MKMF} -p libFMS.a -m Makefile.mkmf path_names], - [MKMF=${MKMF}]) + [MKMF=${MKMF}] +) + + +# Autoconf does not configure the archiver (ar), as it is handled by Automake. +# TODO: Properly configure this tool. For now, we hard-set this to `ar`. +AR=ar +ARFLAGS=rv +AC_SUBST(AR) +AC_SUBST(ARFLAGS) # Prepare output diff --git a/ac/deps/m4/ax_fc_cray_pointer.m4 b/ac/deps/m4/ax_fc_cray_pointer.m4 index a9f5d9bbe3..57ed186afa 100644 --- a/ac/deps/m4/ax_fc_cray_pointer.m4 +++ b/ac/deps/m4/ax_fc_cray_pointer.m4 @@ -19,8 +19,8 @@ dnl AC_DEFUN([AX_FC_CRAY_POINTER], [ AC_LANG_ASSERT([Fortran]) AC_MSG_CHECKING([for $FC option to support Cray pointers]) - AC_CACHE_VAL([ac_cv_prog_fc_cray_ptr], [ - ac_cv_prog_fc_cray_ptr='unknown' + AC_CACHE_VAL([ac_cv_fc_cray_ptr], [ + ac_cv_fc_cray_ptr='unknown' ac_save_FCFLAGS=$FCFLAGS for ac_option in none -fcray-pointer -Mcray=pointer; do test "$ac_option" != none && FCFLAGS="$ac_save_FCFLAGS $ac_option" @@ -29,21 +29,21 @@ AC_DEFUN([AX_FC_CRAY_POINTER], [ integer aptr(2) pointer (iptr, aptr) ])], - [ac_cv_prog_fc_cray_ptr=$ac_option], + [ac_cv_fc_cray_ptr=$ac_option], ) FCFLAGS=$ac_save_FCFLAGS - AS_IF([test "$ac_cv_prog_fc_cray_ptr" != unknown], [break]) + AS_IF([test "$ac_cv_fc_cray_ptr" != unknown], [break]) done ]) - AS_CASE([ac_cv_prog_fc_cray_ptr], + AS_CASE([ac_cv_fc_cray_ptr], [none], [AC_MSG_RESULT([none_needed])], [unknown], [AC_MSG_RESULT([unsupported])], - [AC_MSG_RESULT([$ac_cv_prog_fc_cray_ptr])] + [AC_MSG_RESULT([$ac_cv_fc_cray_ptr])] ) - AS_IF([test "$ac_cv_prog_fc_cray_ptr" != unknown], [ + AS_IF([test "$ac_cv_fc_cray_ptr" != unknown], [ m4_default([$1], [ - AS_IF([test "$ac_cv_prog_fc_cray_ptr" != none], - [FCFLAGS="$FCFLAGS $ac_cv_prog_fc_cray_ptr"] + AS_IF([test "$ac_cv_fc_cray_ptr" != none], + [FCFLAGS="$FCFLAGS $ac_cv_fc_cray_ptr"] ) ])], [m4_default([$2], [AC_MSG_ERROR(["$FC does not support Cray pointers"])])] diff --git a/ac/deps/m4/ax_fc_line_length.m4 b/ac/deps/m4/ax_fc_line_length.m4 new file mode 100644 index 0000000000..97271da1f6 --- /dev/null +++ b/ac/deps/m4/ax_fc_line_length.m4 @@ -0,0 +1,101 @@ +# AX_FC_LINE_LENGTH([LENGTH], [ACTION-IF-SUCCESS], +# [ACTION-IF-FAILURE = FAILURE]) +# ------------------------------------------------ +# This is a backport of the AC_FC_LINE_LENGTH macro in Autoconf 2.67 and newer. +# Comments below are from the Autoconf 2.69 implementation. +# +# Look for a compiler flag to make the Fortran (FC) compiler accept long lines +# in the current (free- or fixed-format) source code, and adds it to FCFLAGS. +# The optional LENGTH may be 80, 132 (default), or `unlimited' for longer +# lines. Note that line lengths above 250 columns are not portable, and some +# compilers (hello ifort) do not accept more than 132 columns at least for +# fixed format. Call ACTION-IF-SUCCESS (defaults to nothing) if successful +# (i.e. can compile code using new extension) and ACTION-IF-FAILURE (defaults +# to failing with an error message) if not. (Defined via DEFUN_ONCE to +# prevent flag from being added to FCFLAGS multiple times.) +# You should call AC_FC_FREEFORM or AC_FC_FIXEDFORM to set the desired format +# prior to using this macro. +# +# The known flags are: +# -f{free,fixed}-line-length-N with N 72, 80, 132, or 0 or none for none. +# -ffree-line-length-none: GNU gfortran +# -ffree-line-length-huge: g95 (also -ffixed-line-length-N as above) +# -qfixed=132 80 72: IBM compiler (xlf) +# -Mextend: Cray +# -132 -80 -72: Intel compiler (ifort) +# Needs to come before -extend_source because ifort +# accepts that as well with an optional parameter and +# doesn't fail but only warns about unknown arguments. +# -extend_source: SGI compiler +# -W, -WNN (132, 80, 72): Absoft Fortran +# +es, +extend_source: HP Fortran (254 in either form, default is 72 fixed, +# 132 free) +# -w, (-)-wide: Lahey/Fujitsu Fortran (255 cols in fixed form) +# -e: Sun Fortran compiler (132 characters) +# -132: NAGWare +# -72, -f, -Wf,-f: f2c (a weak form of "free-form" and long lines). +# /XLine: Open Watcom + +AC_DEFUN_ONCE([AX_FC_LINE_LENGTH], [ + AC_LANG_ASSERT([Fortran]) + m4_case(m4_default([$1], [132]), + [unlimited], [ + ac_fc_line_len_string=unlimited + ac_fc_line_len=0 + ac_fc_line_length_test=' + subroutine longer_than_132(arg1,arg2,arg3,arg4,arg5,arg6,arg7,arg8,'\ +'arg9,arg10,arg11,arg12,arg13,arg14,arg15,arg16,arg17,arg18,arg19)' + ], + [132], [ + ac_fc_line_len=132 + ac_fc_line_length_test=' + subroutine longer_than_80(arg1,arg2,arg3,arg4,arg5,arg6,arg7,arg8,arg9,'\ +'arg10)' + ], + [80], [ + ac_fc_line_len=80 + ac_fc_line_length_test=' + subroutine longer_than_72(arg1,arg2,arg3,arg4,arg5,arg6,arg7,arg8,arg9)' + ], + [m4_warning([Invalid length argument `$1'])] + ) + : ${ac_fc_line_len_string=$ac_fc_line_len} + AC_MSG_CHECKING([for Fortran flag needed to accept $ac_fc_line_len_string column source lines]) + AC_CACHE_VAL([ac_cv_fc_line_length], [ + ac_cv_fc_line_length=unknown + ac_save_FCFLAGS=$FCFLAGS + for ac_flag in none \ + -ffree-line-length-none \ + -ffixed-line-length-none \ + -ffree-line-length-huge \ + -ffree-line-length-$ac_fc_line_len \ + -ffixed-line-length-$ac_fc_line_len \ + -qfixed=$ac_fc_line_len \ + -Mextend \ + -$ac_fc_line_len \ + -extend_source \ + -W$ac_fc_line_len \ + -W +extend_source +es -wide --wide -w -e -f -Wf,-f -xline + do + test "$ac_flag" != none && FCFLAGS="$ac_save_FCFLAGS $ac_flag" + AC_COMPILE_IFELSE([$ac_fc_line_length_test + end subroutine + ], [ac_cv_fc_line_length=$ac_flag] + ) + FCFLAGS=$ac_save_FCFLAGS + dnl TODO: Remove conftest.{err,$ac_objext,$ac_ext} ?? + AS_IF([test "$ac_cv_fc_line_length" != unknown], [break]) + done + ]) + AC_MSG_RESULT([$ac_cv_fc_line_length]) + AS_IF([test "$ac_cv_fc_line_length" != unknown], [ + m4_default([$2], [ + AS_IF([test "$ac_cv_fc_line_length" != none], [ + FCFLAGS="$FCFLAGS $ac_cv_fc_line_length" + ]) + ])], [ + m4_default([$3], [ + AC_MSG_ERROR([Fortran does not accept long source lines], 77) + ]) + ]) +]) diff --git a/ac/deps/m4/ax_mpi.m4 b/ac/deps/m4/ax_mpi.m4 index ecce2e141a..3d9966a19d 100644 --- a/ac/deps/m4/ax_mpi.m4 +++ b/ac/deps/m4/ax_mpi.m4 @@ -67,7 +67,7 @@ AU_ALIAS([ACX_MPI], [AX_MPI]) AC_DEFUN([AX_MPI], [ -AC_PREREQ(2.50) dnl for AC_LANG_CASE +AC_PREREQ([2.50]) dnl for AC_LANG_CASE AC_LANG_CASE([C], [ AC_REQUIRE([AC_PROG_CC]) @@ -135,16 +135,16 @@ if test x = x"$MPILIBS"; then AC_CHECK_LIB(mpich, MPI_Init, [MPILIBS="-lmpich"]) fi -dnl We have to use AC_TRY_COMPILE and not AC_CHECK_HEADER because the +dnl We have to use AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[]])],[],[]) and not AC_CHECK_HEADER because the dnl latter uses $CPP, not $CC (which may be mpicc). AC_LANG_CASE([C], [if test x != x"$MPILIBS"; then AC_MSG_CHECKING([for mpi.h]) - AC_TRY_COMPILE([#include ],[],[AC_MSG_RESULT(yes)], [MPILIBS="" + AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include ]], [[]])],[AC_MSG_RESULT(yes)],[MPILIBS="" AC_MSG_RESULT(no)]) fi], [C++], [if test x != x"$MPILIBS"; then AC_MSG_CHECKING([for mpi.h]) - AC_TRY_COMPILE([#include ],[],[AC_MSG_RESULT(yes)], [MPILIBS="" + AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include ]], [[]])],[AC_MSG_RESULT(yes)],[MPILIBS="" AC_MSG_RESULT(no)]) fi], [Fortran 77], [if test x != x"$MPILIBS"; then diff --git a/ac/m4/ax_fc_check_lib.m4 b/ac/m4/ax_fc_check_lib.m4 index c0accab6cd..a7f848cd60 100644 --- a/ac/m4/ax_fc_check_lib.m4 +++ b/ac/m4/ax_fc_check_lib.m4 @@ -18,7 +18,7 @@ dnl library with different -L flags, or perhaps other ld configurations. dnl dnl Results are cached in the ax_fc_cv_lib_LIBRARY_FUNCTION variable. dnl -AC_DEFUN([AX_FC_CHECK_LIB],[dnl +AC_DEFUN([AX_FC_CHECK_LIB],[ AS_VAR_PUSHDEF([ax_fc_Lib], [ax_fc_cv_lib_$1_$2]) m4_ifval([$6], [ax_fc_lib_msg_LDFLAGS=" with $6"], @@ -29,14 +29,15 @@ AC_DEFUN([AX_FC_CHECK_LIB],[dnl LDFLAGS="$6 $LDFLAGS" ax_fc_check_lib_save_LIBS=$LIBS LIBS="-l$1 $7 $LIBS" - AS_IF([test -n $3], + AS_IF([test -n "$3"], [ax_fc_use_mod="use $3"], [ax_fc_use_mod=""]) - AC_LINK_IFELSE([ - AC_LANG_PROGRAM([], [dnl + AC_LINK_IFELSE([dnl +dnl Begin 7-column code block +AC_LANG_PROGRAM([], [dnl $ax_fc_use_mod - call $2]dnl - ) + call $2])dnl +dnl End code block ], [AS_VAR_SET([ax_fc_Lib], [yes])], [AS_VAR_SET([ax_fc_Lib], [no])] diff --git a/ac/m4/ax_mpi.m4 b/ac/m4/ax_mpi.m4 index ecce2e141a..3d9966a19d 100644 --- a/ac/m4/ax_mpi.m4 +++ b/ac/m4/ax_mpi.m4 @@ -67,7 +67,7 @@ AU_ALIAS([ACX_MPI], [AX_MPI]) AC_DEFUN([AX_MPI], [ -AC_PREREQ(2.50) dnl for AC_LANG_CASE +AC_PREREQ([2.50]) dnl for AC_LANG_CASE AC_LANG_CASE([C], [ AC_REQUIRE([AC_PROG_CC]) @@ -135,16 +135,16 @@ if test x = x"$MPILIBS"; then AC_CHECK_LIB(mpich, MPI_Init, [MPILIBS="-lmpich"]) fi -dnl We have to use AC_TRY_COMPILE and not AC_CHECK_HEADER because the +dnl We have to use AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[]])],[],[]) and not AC_CHECK_HEADER because the dnl latter uses $CPP, not $CC (which may be mpicc). AC_LANG_CASE([C], [if test x != x"$MPILIBS"; then AC_MSG_CHECKING([for mpi.h]) - AC_TRY_COMPILE([#include ],[],[AC_MSG_RESULT(yes)], [MPILIBS="" + AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include ]], [[]])],[AC_MSG_RESULT(yes)],[MPILIBS="" AC_MSG_RESULT(no)]) fi], [C++], [if test x != x"$MPILIBS"; then AC_MSG_CHECKING([for mpi.h]) - AC_TRY_COMPILE([#include ],[],[AC_MSG_RESULT(yes)], [MPILIBS="" + AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include ]], [[]])],[AC_MSG_RESULT(yes)],[MPILIBS="" AC_MSG_RESULT(no)]) fi], [Fortran 77], [if test x != x"$MPILIBS"; then diff --git a/config_src/coupled_driver/MOM_surface_forcing_gfdl.F90 b/config_src/drivers/FMS_cap/MOM_surface_forcing_gfdl.F90 similarity index 96% rename from config_src/coupled_driver/MOM_surface_forcing_gfdl.F90 rename to config_src/drivers/FMS_cap/MOM_surface_forcing_gfdl.F90 index 7075fb7c10..bb89c4e85e 100644 --- a/config_src/coupled_driver/MOM_surface_forcing_gfdl.F90 +++ b/config_src/drivers/FMS_cap/MOM_surface_forcing_gfdl.F90 @@ -5,10 +5,14 @@ module MOM_surface_forcing_gfdl !#CTRL# use MOM_controlled_forcing, only : apply_ctrl_forcing, register_ctrl_forcing_restarts !#CTRL# use MOM_controlled_forcing, only : controlled_forcing_init, controlled_forcing_end !#CTRL# use MOM_controlled_forcing, only : ctrl_forcing_CS -use MOM_coms, only : reproducing_sum +use MOM_coms, only : reproducing_sum, field_chksum use MOM_constants, only : hlv, hlf +use MOM_coupler_types, only : coupler_2d_bc_type, coupler_type_write_chksums +use MOM_coupler_types, only : coupler_type_initialized, coupler_type_spawn +use MOM_coupler_types, only : coupler_type_copy_data use MOM_cpu_clock, only : cpu_clock_id, cpu_clock_begin, cpu_clock_end use MOM_cpu_clock, only : CLOCK_SUBCOMPONENT +use MOM_data_override, only : data_override_init, data_override use MOM_diag_mediator, only : diag_ctrl, safe_alloc_ptr, time_type use MOM_domains, only : pass_vector, pass_var, fill_symmetric_edges use MOM_domains, only : AGRID, BGRID_NE, CGRID_NE, To_All @@ -21,7 +25,9 @@ module MOM_surface_forcing_gfdl use MOM_forcing_type, only : allocate_mech_forcing, deallocate_mech_forcing use MOM_get_input, only : Get_MOM_Input, directories use MOM_grid, only : ocean_grid_type -use MOM_io, only : slasher, write_version_number, MOM_read_data +use MOM_interpolate, only : init_external_field, time_interp_external +use MOM_interpolate, only : time_interp_external_init +use MOM_io, only : slasher, write_version_number, MOM_read_data, stdout use MOM_restart, only : register_restart_field, restart_init, MOM_restart_CS use MOM_restart, only : restart_init_end, save_restart, restore_state use MOM_string_functions, only : uppercase @@ -31,15 +37,6 @@ module MOM_surface_forcing_gfdl use user_revise_forcing, only : user_alter_forcing, user_revise_forcing_init use user_revise_forcing, only : user_revise_forcing_CS -use coupler_types_mod, only : coupler_2d_bc_type, coupler_type_write_chksums -use coupler_types_mod, only : coupler_type_initialized, coupler_type_spawn -use coupler_types_mod, only : coupler_type_copy_data -use data_override_mod, only : data_override_init, data_override -use fms_mod, only : stdout -use mpp_mod, only : mpp_chksum -use time_interp_external_mod, only : init_external_field, time_interp_external -use time_interp_external_mod, only : time_interp_external_init - implicit none ; private #include @@ -319,8 +316,7 @@ subroutine convert_IOB_to_fluxes(IOB, fluxes, index_bounds, Time, valid_time, G, if ((.not.coupler_type_initialized(fluxes%tr_fluxes)) .and. & coupler_type_initialized(IOB%fluxes)) & - call coupler_type_spawn(IOB%fluxes, fluxes%tr_fluxes, & - (/is,is,ie,ie/), (/js,js,je,je/)) + call coupler_type_spawn(IOB%fluxes, fluxes%tr_fluxes, (/is,is,ie,ie/), (/js,js,je,je/)) ! It might prove valuable to use the same array extents as the rest of the ! ocean model, rather than using haloless arrays, in which case the last line ! would be: ( (/isd,is,ie,ied/), (/jsd,js,je,jed/)) @@ -350,7 +346,7 @@ subroutine convert_IOB_to_fluxes(IOB, fluxes, index_bounds, Time, valid_time, G, ! Salinity restoring logic if (CS%restore_salt) then - call time_interp_external(CS%id_srestore,Time,data_restore) + call time_interp_external(CS%id_srestore, Time, data_restore) ! open_ocn_mask indicates where to restore salinity (1 means restore, 0 does not) open_ocn_mask(:,:) = 1.0 if (CS%mask_srestore_under_ice) then ! Do not restore under sea-ice @@ -407,7 +403,7 @@ subroutine convert_IOB_to_fluxes(IOB, fluxes, index_bounds, Time, valid_time, G, ! SST restoring logic if (CS%restore_temp) then - call time_interp_external(CS%id_trestore,Time,data_restore) + call time_interp_external(CS%id_trestore, Time, data_restore) do j=js,je ; do i=is,ie delta_sst = data_restore(i,j)- sfc_state%SST(i,j) delta_sst = sign(1.0,delta_sst)*min(abs(delta_sst),CS%max_delta_trestore) @@ -1121,28 +1117,27 @@ subroutine apply_flux_adjustments(G, US, CS, Time, fluxes) isc = G%isc; iec = G%iec ; jsc = G%jsc; jec = G%jec - overrode_h = .false. - call data_override('OCN', 'hflx_adj', temp_at_h(isc:iec,jsc:jec), Time, override=overrode_h) + call data_override(G%Domain, 'hflx_adj', temp_at_h, Time, override=overrode_h, & + scale=US%W_m2_to_QRZ_T) if (overrode_h) then ; do j=jsc,jec ; do i=isc,iec - fluxes%heat_added(i,j) = fluxes%heat_added(i,j) + US%W_m2_to_QRZ_T*temp_at_h(i,j)* G%mask2dT(i,j) + fluxes%heat_added(i,j) = fluxes%heat_added(i,j) + temp_at_h(i,j)* G%mask2dT(i,j) enddo ; enddo ; endif ! Not needed? ! if (overrode_h) call pass_var(fluxes%heat_added, G%Domain) - overrode_h = .false. - call data_override('OCN', 'sflx_adj', temp_at_h(isc:iec,jsc:jec), Time, override=overrode_h) + call data_override(G%Domain, 'sflx_adj', temp_at_h, Time, override=overrode_h, & + scale=US%kg_m2s_to_RZ_T) if (overrode_h) then ; do j=jsc,jec ; do i=isc,iec - fluxes%salt_flux_added(i,j) = fluxes%salt_flux_added(i,j) + & - US%kg_m2s_to_RZ_T * temp_at_h(i,j)* G%mask2dT(i,j) + fluxes%salt_flux_added(i,j) = fluxes%salt_flux_added(i,j) + temp_at_h(i,j) * G%mask2dT(i,j) enddo ; enddo ; endif ! Not needed? ! if (overrode_h) call pass_var(fluxes%salt_flux_added, G%Domain) - overrode_h = .false. - call data_override('OCN', 'prcme_adj', temp_at_h(isc:iec,jsc:jec), Time, override=overrode_h) + call data_override(G%Domain, 'prcme_adj', temp_at_h, Time, override=overrode_h, & + scale=US%kg_m2s_to_RZ_T) if (overrode_h) then ; do j=jsc,jec ; do i=isc,iec - fluxes%vprec(i,j) = fluxes%vprec(i,j) + US%kg_m2s_to_RZ_T * temp_at_h(i,j)* G%mask2dT(i,j) + fluxes%vprec(i,j) = fluxes%vprec(i,j) + temp_at_h(i,j)* G%mask2dT(i,j) enddo ; enddo ; endif ! Not needed? ! if (overrode_h) call pass_var(fluxes%vprec, G%Domain) end subroutine apply_flux_adjustments @@ -1174,8 +1169,8 @@ subroutine apply_force_adjustments(G, US, CS, Time, forces) tempx_at_h(:,:) = 0.0 ; tempy_at_h(:,:) = 0.0 ! Either reads data or leaves contents unchanged overrode_x = .false. ; overrode_y = .false. - call data_override('OCN', 'taux_adj', tempx_at_h(isc:iec,jsc:jec), Time, override=overrode_x) - call data_override('OCN', 'tauy_adj', tempy_at_h(isc:iec,jsc:jec), Time, override=overrode_y) + call data_override(G%Domain, 'taux_adj', tempx_at_h, Time, override=overrode_x, scale=Pa_conversion) + call data_override(G%Domain, 'tauy_adj', tempy_at_h, Time, override=overrode_y, scale=Pa_conversion) if (overrode_x .or. overrode_y) then if (.not. (overrode_x .and. overrode_y)) call MOM_error(FATAL,"apply_flux_adjustments: "//& @@ -1190,8 +1185,8 @@ subroutine apply_force_adjustments(G, US, CS, Time, forces) if (rDlon > 0.) rDlon = 1. / rDlon cosA = dLonDx * rDlon sinA = dLonDy * rDlon - zonal_tau = Pa_conversion * tempx_at_h(i,j) - merid_tau = Pa_conversion * tempy_at_h(i,j) + zonal_tau = tempx_at_h(i,j) + merid_tau = tempy_at_h(i,j) tempx_at_h(i,j) = cosA * zonal_tau - sinA * merid_tau tempy_at_h(i,j) = sinA * zonal_tau + cosA * merid_tau enddo ; enddo @@ -1486,7 +1481,7 @@ subroutine surface_forcing_init(Time, G, US, param_file, diag, CS, wind_stagger) enddo ; enddo endif - call time_interp_external_init + call time_interp_external_init() ! Optionally read a x-y gustiness field in place of a global constant. call get_param(param_file, mdl, "READ_GUST_2D", CS%read_gust_2d, & @@ -1554,11 +1549,11 @@ subroutine surface_forcing_init(Time, G, US, param_file, diag, CS, wind_stagger) "above land points (i.e. G%mask2dT = 0).", default=.false., & debuggingParam=.true.) - call data_override_init(Ocean_domain_in=G%Domain%mpp_domain) + call data_override_init(G%Domain) if (CS%restore_salt) then salt_file = trim(CS%inputdir) // trim(CS%salt_restore_file) - CS%id_srestore = init_external_field(salt_file, CS%salt_restore_var_name, domain=G%Domain%mpp_domain) + CS%id_srestore = init_external_field(salt_file, CS%salt_restore_var_name, MOM_domain=G%Domain) call safe_alloc_ptr(CS%srestore_mask,isd,ied,jsd,jed); CS%srestore_mask(:,:) = 1.0 if (CS%mask_srestore) then ! read a 2-d file containing a mask for restoring fluxes flnam = trim(CS%inputdir) // 'salt_restore_mask.nc' @@ -1568,7 +1563,7 @@ subroutine surface_forcing_init(Time, G, US, param_file, diag, CS, wind_stagger) if (CS%restore_temp) then temp_file = trim(CS%inputdir) // trim(CS%temp_restore_file) - CS%id_trestore = init_external_field(temp_file, CS%temp_restore_var_name, domain=G%Domain%mpp_domain) + CS%id_trestore = init_external_field(temp_file, CS%temp_restore_var_name, MOM_domain=G%Domain) call safe_alloc_ptr(CS%trestore_mask,isd,ied,jsd,jed); CS%trestore_mask(:,:) = 1.0 if (CS%mask_trestore) then ! read a 2-d file containing a mask for restoring fluxes flnam = trim(CS%inputdir) // 'temp_restore_mask.nc' @@ -1629,30 +1624,30 @@ subroutine ice_ocn_bnd_type_chksum(id, timestep, iobt) !! ocean in a coupled model whose checksums are reported integer :: n,m, outunit - outunit = stdout() + outunit = stdout write(outunit,*) "BEGIN CHECKSUM(ice_ocean_boundary_type):: ", id, timestep - write(outunit,100) 'iobt%u_flux ', mpp_chksum( iobt%u_flux ) - write(outunit,100) 'iobt%v_flux ', mpp_chksum( iobt%v_flux ) - write(outunit,100) 'iobt%t_flux ', mpp_chksum( iobt%t_flux ) - write(outunit,100) 'iobt%q_flux ', mpp_chksum( iobt%q_flux ) - write(outunit,100) 'iobt%salt_flux ', mpp_chksum( iobt%salt_flux ) - write(outunit,100) 'iobt%lw_flux ', mpp_chksum( iobt%lw_flux ) - write(outunit,100) 'iobt%sw_flux_vis_dir', mpp_chksum( iobt%sw_flux_vis_dir) - write(outunit,100) 'iobt%sw_flux_vis_dif', mpp_chksum( iobt%sw_flux_vis_dif) - write(outunit,100) 'iobt%sw_flux_nir_dir', mpp_chksum( iobt%sw_flux_nir_dir) - write(outunit,100) 'iobt%sw_flux_nir_dif', mpp_chksum( iobt%sw_flux_nir_dif) - write(outunit,100) 'iobt%lprec ', mpp_chksum( iobt%lprec ) - write(outunit,100) 'iobt%fprec ', mpp_chksum( iobt%fprec ) - write(outunit,100) 'iobt%runoff ', mpp_chksum( iobt%runoff ) - write(outunit,100) 'iobt%calving ', mpp_chksum( iobt%calving ) - write(outunit,100) 'iobt%p ', mpp_chksum( iobt%p ) + write(outunit,100) 'iobt%u_flux ', field_chksum( iobt%u_flux ) + write(outunit,100) 'iobt%v_flux ', field_chksum( iobt%v_flux ) + write(outunit,100) 'iobt%t_flux ', field_chksum( iobt%t_flux ) + write(outunit,100) 'iobt%q_flux ', field_chksum( iobt%q_flux ) + write(outunit,100) 'iobt%salt_flux ', field_chksum( iobt%salt_flux ) + write(outunit,100) 'iobt%lw_flux ', field_chksum( iobt%lw_flux ) + write(outunit,100) 'iobt%sw_flux_vis_dir', field_chksum( iobt%sw_flux_vis_dir) + write(outunit,100) 'iobt%sw_flux_vis_dif', field_chksum( iobt%sw_flux_vis_dif) + write(outunit,100) 'iobt%sw_flux_nir_dir', field_chksum( iobt%sw_flux_nir_dir) + write(outunit,100) 'iobt%sw_flux_nir_dif', field_chksum( iobt%sw_flux_nir_dif) + write(outunit,100) 'iobt%lprec ', field_chksum( iobt%lprec ) + write(outunit,100) 'iobt%fprec ', field_chksum( iobt%fprec ) + write(outunit,100) 'iobt%runoff ', field_chksum( iobt%runoff ) + write(outunit,100) 'iobt%calving ', field_chksum( iobt%calving ) + write(outunit,100) 'iobt%p ', field_chksum( iobt%p ) if (associated(iobt%ustar_berg)) & - write(outunit,100) 'iobt%ustar_berg ', mpp_chksum( iobt%ustar_berg ) + write(outunit,100) 'iobt%ustar_berg ', field_chksum( iobt%ustar_berg ) if (associated(iobt%area_berg)) & - write(outunit,100) 'iobt%area_berg ', mpp_chksum( iobt%area_berg ) + write(outunit,100) 'iobt%area_berg ', field_chksum( iobt%area_berg ) if (associated(iobt%mass_berg)) & - write(outunit,100) 'iobt%mass_berg ', mpp_chksum( iobt%mass_berg ) + write(outunit,100) 'iobt%mass_berg ', field_chksum( iobt%mass_berg ) 100 FORMAT(" CHECKSUM::",A20," = ",Z20) call coupler_type_write_chksums(iobt%fluxes, outunit, 'iobt%') @@ -1664,7 +1659,8 @@ subroutine check_mask_val_consistency(val, mask, i, j, varname, G) real, intent(in) :: val !< value of flux/variable passed by IOB real, intent(in) :: mask !< value of ocean mask - integer, intent(in) :: i, j !< model grid cell indices + integer, intent(in) :: i !< model grid cell indices + integer, intent(in) :: j !< model grid cell indices character(len=*), intent(in) :: varname !< variable name type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure ! Local variables diff --git a/config_src/coupled_driver/ocean_model_MOM.F90 b/config_src/drivers/FMS_cap/ocean_model_MOM.F90 similarity index 92% rename from config_src/coupled_driver/ocean_model_MOM.F90 rename to config_src/drivers/FMS_cap/ocean_model_MOM.F90 index 082099158c..f635e886a5 100644 --- a/config_src/coupled_driver/ocean_model_MOM.F90 +++ b/config_src/drivers/FMS_cap/ocean_model_MOM.F90 @@ -15,13 +15,19 @@ module ocean_model_mod use MOM, only : extract_surface_state, allocate_surface_state, finish_MOM_initialization use MOM, only : get_MOM_state_elements, MOM_state_is_synchronized use MOM, only : get_ocean_stocks, step_offline +use MOM_coms, only : field_chksum use MOM_constants, only : CELSIUS_KELVIN_OFFSET, hlf +use MOM_coupler_types, only : coupler_1d_bc_type, coupler_2d_bc_type +use MOM_coupler_types, only : coupler_type_spawn, coupler_type_write_chksums +use MOM_coupler_types, only : coupler_type_initialized, coupler_type_copy_data +use MOM_coupler_types, only : coupler_type_set_diags, coupler_type_send_data use MOM_diag_mediator, only : diag_ctrl, enable_averaging, disable_averaging use MOM_diag_mediator, only : diag_mediator_close_registration, diag_mediator_end -use MOM_domains, only : pass_var, pass_vector, AGRID, BGRID_NE, CGRID_NE -use MOM_domains, only : TO_ALL, Omit_Corners +use MOM_domains, only : MOM_domain_type, domain2d, clone_MOM_domain, get_domain_extent +use MOM_domains, only : pass_var, pass_vector, AGRID, BGRID_NE, CGRID_NE, TO_ALL, Omit_Corners use MOM_error_handler, only : MOM_error, MOM_mesg, FATAL, WARNING, is_root_pe use MOM_error_handler, only : callTree_enter, callTree_leave +use MOM_EOS, only : gsw_sp_from_sr, gsw_pt_from_ct use MOM_file_parser, only : get_param, log_version, close_param_file, param_file_type use MOM_forcing_type, only : forcing, mech_forcing, allocate_forcing_type use MOM_forcing_type, only : fluxes_accumulate, get_net_mass_forcing @@ -29,7 +35,7 @@ module ocean_model_mod use MOM_forcing_type, only : forcing_diagnostics, mech_forcing_diags use MOM_get_input, only : Get_MOM_Input, directories use MOM_grid, only : ocean_grid_type -use MOM_io, only : close_file, file_exists, read_data, write_version_number +use MOM_io, only : write_version_number, stdout use MOM_marine_ice, only : iceberg_forces, iceberg_fluxes, marine_ice_init, marine_ice_CS use MOM_restart, only : MOM_restart_CS, save_restart use MOM_string_functions, only : uppercase @@ -48,16 +54,6 @@ module ocean_model_mod use MOM_verticalGrid, only : verticalGrid_type use MOM_ice_shelf, only : initialize_ice_shelf, shelf_calc_flux, ice_shelf_CS use MOM_ice_shelf, only : add_shelf_forces, ice_shelf_end, ice_shelf_save_restart -use coupler_types_mod, only : coupler_1d_bc_type, coupler_2d_bc_type -use coupler_types_mod, only : coupler_type_spawn, coupler_type_write_chksums -use coupler_types_mod, only : coupler_type_initialized, coupler_type_copy_data -use coupler_types_mod, only : coupler_type_set_diags, coupler_type_send_data -use mpp_domains_mod, only : domain2d, mpp_get_layout, mpp_get_global_domain -use mpp_domains_mod, only : mpp_define_domains, mpp_get_compute_domain, mpp_get_data_domain -use atmos_ocean_fluxes_mod, only : aof_set_coupler_flux -use fms_mod, only : stdout -use mpp_mod, only : mpp_chksum -use MOM_EOS, only : gsw_sp_from_sr, gsw_pt_from_ct use MOM_wave_interface, only: wave_parameters_CS, MOM_wave_interface_init use MOM_wave_interface, only: MOM_wave_interface_init_lite, Update_Surface_Waves @@ -107,7 +103,7 @@ module ocean_model_mod !! points of the two velocity components. Valid entries !! include AGRID, BGRID_NE, CGRID_NE, BGRID_SW, and CGRID_SW, !! corresponding to the community-standard Arakawa notation. - !! (These are named integers taken from mpp_parameter_mod.) + !! (These are named integers taken from the MOM_domains module.) !! Following MOM5, stagger is BGRID_NE by default when the !! ocean is initialized, but here it is set to -999 so that !! a global max across ocean and non-ocean processors can be @@ -121,6 +117,8 @@ module ocean_model_mod !! i.e. dzt(1) + eta_t + patm/rho0/grav [m] frazil =>NULL(), & !< Accumulated heating [J m-2] from frazil !! formation in the ocean. + melt_potential => NULL(), & !< Instantaneous heat used to melt sea ice [J m-2]. + OBLD => NULL(), & !< Ocean boundary layer depth [m]. area => NULL() !< cell area of the ocean surface [m2]. type(coupler_2d_bc_type) :: fields !< A structure that may contain named !! arrays of tracer-related surface fields. @@ -179,13 +177,13 @@ module ocean_model_mod !! processes before time stepping the dynamics. type(directories) :: dirs !< A structure containing several relevant directory paths. - type(mech_forcing) :: forces !< A structure with the driving mechanical surface forces - type(forcing) :: fluxes !< A structure containing pointers to - !! the thermodynamic ocean forcing fields. - type(forcing) :: flux_tmp !< A secondary structure containing pointers to the + type(mech_forcing) :: forces !< A structure with the driving mechanical surface forces + type(forcing) :: fluxes !< A structure containing pointers to + !! the thermodynamic ocean forcing fields. + type(forcing) :: flux_tmp !< A secondary structure containing pointers to the !! ocean forcing fields for when multiple coupled !! timesteps are taken per thermodynamic step. - type(surface) :: sfc_state !< A structure containing pointers to + type(surface) :: sfc_state !< A structure containing pointers to !! the ocean surface state fields. type(ocean_grid_type), pointer :: & grid => NULL() !< A pointer to a grid structure containing metrics @@ -267,6 +265,10 @@ subroutine ocean_model_init(Ocean_sfc, OS, Time_init, Time_in, wind_stagger, gas endif allocate(OS) +! allocate(OS%fluxes) +! allocate(OS%forces) +! allocate(OS%flux_tmp) + OS%is_ocean_pe = Ocean_sfc%is_ocean_pe if (.not.OS%is_ocean_pe) return @@ -355,6 +357,7 @@ subroutine ocean_model_init(Ocean_sfc, OS, Time_init, Time_in, wind_stagger, gas use_melt_pot=.false. endif + !allocate(OS%sfc_state) call allocate_surface_state(OS%sfc_state, OS%grid, use_temperature, do_integrals=.true., & gas_fields_ocn=gas_fields_ocn, use_meltpot=use_melt_pot) @@ -384,14 +387,8 @@ subroutine ocean_model_init(Ocean_sfc, OS, Time_init, Time_in, wind_stagger, gas call MOM_wave_interface_init_lite(param_file) endif - if (associated(OS%grid%Domain%maskmap)) then - call initialize_ocean_public_type(OS%grid%Domain%mpp_domain, Ocean_sfc, & - OS%diag, maskmap=OS%grid%Domain%maskmap, & - gas_fields_ocn=gas_fields_ocn) - else - call initialize_ocean_public_type(OS%grid%Domain%mpp_domain, Ocean_sfc, & - OS%diag, gas_fields_ocn=gas_fields_ocn) - endif + call initialize_ocean_public_type(OS%grid%Domain, Ocean_sfc, OS%diag, & + gas_fields_ocn=gas_fields_ocn) ! This call can only occur here if the coupler_bc_type variables have been ! initialized already using the information from gas_fields_ocn. @@ -506,8 +503,7 @@ subroutine update_ocean_model(Ice_ocean_boundary, OS, Ocean_sfc, time_start_upda (/is,is,ie,ie/), (/js,js,je,je/), as_needed=.true.) ! Translate Ice_ocean_boundary into fluxes and forces. - call mpp_get_compute_domain(Ocean_sfc%Domain, index_bnds(1), index_bnds(2), & - index_bnds(3), index_bnds(4)) + call get_domain_extent(Ocean_sfc%Domain, index_bnds(1), index_bnds(2), index_bnds(3), index_bnds(4)) if (do_dyn) then call convert_IOB_to_forces(Ice_ocean_boundary, OS%forces, index_bnds, OS%Time_dyn, OS%grid, OS%US, & @@ -726,7 +722,7 @@ end subroutine ocean_model_end subroutine ocean_model_save_restart(OS, Time, directory, filename_suffix) type(ocean_state_type), pointer :: OS !< A pointer to the structure containing the !! internal ocean state (in). - type(time_type), intent(in) :: Time !< The model time at this call, needed for mpp_write calls. + type(time_type), intent(in) :: Time !< The model time at this call, needed for writing files. character(len=*), optional, intent(in) :: directory !< An optional directory into which to !! write these restart files. character(len=*), optional, intent(in) :: filename_suffix !< An optional suffix (e.g., a time-stamp) @@ -758,16 +754,12 @@ subroutine ocean_model_save_restart(OS, Time, directory, filename_suffix) end subroutine ocean_model_save_restart !> Initialize the public ocean type -subroutine initialize_ocean_public_type(input_domain, Ocean_sfc, diag, maskmap, & - gas_fields_ocn) - type(domain2D), intent(in) :: input_domain !< The ocean model domain description +subroutine initialize_ocean_public_type(input_domain, Ocean_sfc, diag, gas_fields_ocn) + type(MOM_domain_type), intent(in) :: input_domain !< The ocean model domain description type(ocean_public_type), intent(inout) :: Ocean_sfc !< A structure containing various publicly - !! visible ocean surface properties after initialization, whose - !! elements are allocated here. - type(diag_ctrl), intent(in) :: diag !< A structure that regulates diagnsotic output - logical, dimension(:,:), & - optional, intent(in) :: maskmap !< A mask indicating which virtual processors - !! are actually in use. If missing, all are used. + !! visible ocean surface properties after + !! initialization, whose elements are allocated here. + type(diag_ctrl), intent(in) :: diag !< A structure that regulates diagnostic output type(coupler_1d_bc_type), & optional, intent(in) :: gas_fields_ocn !< If present, this type describes the !! ocean and surface-ice fields that will participate @@ -779,14 +771,9 @@ subroutine initialize_ocean_public_type(input_domain, Ocean_sfc, diag, maskmap, ! and have no halos. integer :: isc, iec, jsc, jec - call mpp_get_layout(input_domain,layout) - call mpp_get_global_domain(input_domain, xsize=xsz, ysize=ysz) - if (PRESENT(maskmap)) then - call mpp_define_domains((/1,xsz,1,ysz/),layout,Ocean_sfc%Domain, maskmap=maskmap) - else - call mpp_define_domains((/1,xsz,1,ysz/),layout,Ocean_sfc%Domain) - endif - call mpp_get_compute_domain(Ocean_sfc%Domain, isc, iec, jsc, jec) + call clone_MOM_domain(input_domain, Ocean_sfc%Domain, halo_size=0, symmetric=.false.) + + call get_domain_extent(Ocean_sfc%Domain, isc, iec, jsc, jec) allocate ( Ocean_sfc%t_surf (isc:iec,jsc:jec), & Ocean_sfc%s_surf (isc:iec,jsc:jec), & @@ -794,6 +781,8 @@ subroutine initialize_ocean_public_type(input_domain, Ocean_sfc, diag, maskmap, Ocean_sfc%v_surf (isc:iec,jsc:jec), & Ocean_sfc%sea_lev(isc:iec,jsc:jec), & Ocean_sfc%area (isc:iec,jsc:jec), & + Ocean_sfc%melt_potential(isc:iec,jsc:jec), & + Ocean_sfc%OBLD (isc:iec,jsc:jec), & Ocean_sfc%frazil (isc:iec,jsc:jec)) Ocean_sfc%t_surf(:,:) = 0.0 ! time averaged sst (Kelvin) passed to atmosphere/ice model @@ -802,6 +791,8 @@ subroutine initialize_ocean_public_type(input_domain, Ocean_sfc, diag, maskmap, Ocean_sfc%v_surf(:,:) = 0.0 ! time averaged v-current (m/sec) passed to atmosphere/ice models Ocean_sfc%sea_lev(:,:) = 0.0 ! time averaged thickness of top model grid cell (m) plus patm/rho0/grav Ocean_sfc%frazil(:,:) = 0.0 ! time accumulated frazil (J/m^2) passed to ice model + Ocean_sfc%melt_potential(:,:) = 0.0 ! time accumulated melt potential (J/m^2) passed to ice model + Ocean_sfc%OBLD(:,:) = 0.0 ! ocean boundary layer depth (m) Ocean_sfc%area(:,:) = 0.0 Ocean_sfc%axes = diag%axesT1%handles !diag axes to be used by coupler tracer flux diagnostics @@ -838,8 +829,7 @@ subroutine convert_state_to_ocean_type(sfc_state, Ocean_sfc, G, US, patm, press_ is = G%isc ; ie = G%iec ; js = G%jsc ; je = G%jec call pass_vector(sfc_state%u, sfc_state%v, G%Domain) - call mpp_get_compute_domain(Ocean_sfc%Domain, isc_bnd, iec_bnd, & - jsc_bnd, jec_bnd) + call get_domain_extent(Ocean_sfc%Domain, isc_bnd, iec_bnd, jsc_bnd, jec_bnd) if (present(patm)) then ! Check that the inidicies in patm are (isc_bnd:iec_bnd,jsc_bnd:jec_bnd). if (.not.present(press_to_z)) call MOM_error(FATAL, & @@ -887,6 +877,18 @@ subroutine convert_state_to_ocean_type(sfc_state, Ocean_sfc, G, US, patm, press_ enddo ; enddo endif + if (allocated(sfc_state%melt_potential)) then + do j=jsc_bnd,jec_bnd ; do i=isc_bnd,iec_bnd + Ocean_sfc%melt_potential(i,j) = US%Q_to_J_kg*US%RZ_to_kg_m2 * sfc_state%melt_potential(i+i0,j+j0) + enddo ; enddo + endif + + if (allocated(sfc_state%Hml)) then + do j=jsc_bnd,jec_bnd ; do i=isc_bnd,iec_bnd + Ocean_sfc%OBLD(i,j) = US%Z_to_m * sfc_state%Hml(i+i0,j+j0) + enddo ; enddo + endif + if (Ocean_sfc%stagger == AGRID) then do j=jsc_bnd,jec_bnd ; do i=isc_bnd,iec_bnd Ocean_sfc%u_surf(i,j) = G%mask2dT(i+i0,j+j0) * US%L_T_to_m_s * & @@ -1021,51 +1023,52 @@ subroutine ocean_model_data2D_get(OS, Ocean, name, array2D, isc, jsc) integer , intent(in) :: isc !< The starting i-index of array2D integer , intent(in) :: jsc !< The starting j-index of array2D - integer :: g_isc, g_iec, g_jsc, g_jec,g_isd, g_ied, g_jsd, g_jed, i, j + integer :: g_isc, g_iec, g_jsc, g_jec, g_isd, g_ied, g_jsd, g_jed, i, j if (.not.associated(OS)) return if (.not.OS%is_ocean_pe) return -! The problem is %areaT is on MOM domain but Ice_Ocean_Boundary%... is on mpp domain. -! We want to return the MOM data on the mpp (compute) domain -! Get MOM domain extents - call mpp_get_compute_domain(OS%grid%Domain%mpp_domain, g_isc, g_iec, g_jsc, g_jec) - call mpp_get_data_domain (OS%grid%Domain%mpp_domain, g_isd, g_ied, g_jsd, g_jed) + ! The problem is that %areaT is on MOM domain but Ice_Ocean_Boundary%... is on a haloless domain. + ! We want to return the MOM data on the haloless (compute) domain + call get_domain_extent(OS%grid%Domain, g_isc, g_iec, g_jsc, g_jec, g_isd, g_ied, g_jsd, g_jed) g_isc = g_isc-g_isd+1 ; g_iec = g_iec-g_isd+1 ; g_jsc = g_jsc-g_jsd+1 ; g_jec = g_jec-g_jsd+1 - select case(name) - case('area') - array2D(isc:,jsc:) = OS%US%L_to_m**2*OS%grid%areaT(g_isc:g_iec,g_jsc:g_jec) - case('mask') - array2D(isc:,jsc:) = OS%grid%mask2dT(g_isc:g_iec,g_jsc:g_jec) + case('area') + array2D(isc:,jsc:) = OS%US%L_to_m**2*OS%grid%areaT(g_isc:g_iec,g_jsc:g_jec) + case('mask') + array2D(isc:,jsc:) = OS%grid%mask2dT(g_isc:g_iec,g_jsc:g_jec) !OR same result ! do j=g_jsc,g_jec ; do i=g_isc,g_iec ! array2D(isc+i-g_isc,jsc+j-g_jsc) = OS%grid%mask2dT(i,j) ! enddo ; enddo - case('t_surf') - array2D(isc:,jsc:) = Ocean%t_surf(isc:,jsc:)-CELSIUS_KELVIN_OFFSET - case('t_pme') - array2D(isc:,jsc:) = Ocean%t_surf(isc:,jsc:)-CELSIUS_KELVIN_OFFSET - case('t_runoff') - array2D(isc:,jsc:) = Ocean%t_surf(isc:,jsc:)-CELSIUS_KELVIN_OFFSET - case('t_calving') - array2D(isc:,jsc:) = Ocean%t_surf(isc:,jsc:)-CELSIUS_KELVIN_OFFSET - case('btfHeat') - array2D(isc:,jsc:) = 0 - case('cos_rot') - array2D(isc:,jsc:) = OS%grid%cos_rot(g_isc:g_iec,g_jsc:g_jec) ! =1 - case('sin_rot') - array2D(isc:,jsc:) = OS%grid%sin_rot(g_isc:g_iec,g_jsc:g_jec) ! =0 - case('s_surf') - array2D(isc:,jsc:) = Ocean%s_surf(isc:,jsc:) - case('sea_lev') - array2D(isc:,jsc:) = Ocean%sea_lev(isc:,jsc:) - case('frazil') - array2D(isc:,jsc:) = Ocean%frazil(isc:,jsc:) - case default - call MOM_error(FATAL,'get_ocean_grid_data2D: unknown argument name='//name) + case('t_surf') + array2D(isc:,jsc:) = Ocean%t_surf(isc:,jsc:)-CELSIUS_KELVIN_OFFSET + case('t_pme') + array2D(isc:,jsc:) = Ocean%t_surf(isc:,jsc:)-CELSIUS_KELVIN_OFFSET + case('t_runoff') + array2D(isc:,jsc:) = Ocean%t_surf(isc:,jsc:)-CELSIUS_KELVIN_OFFSET + case('t_calving') + array2D(isc:,jsc:) = Ocean%t_surf(isc:,jsc:)-CELSIUS_KELVIN_OFFSET + case('btfHeat') + array2D(isc:,jsc:) = 0 + case('cos_rot') + array2D(isc:,jsc:) = OS%grid%cos_rot(g_isc:g_iec,g_jsc:g_jec) ! =1 + case('sin_rot') + array2D(isc:,jsc:) = OS%grid%sin_rot(g_isc:g_iec,g_jsc:g_jec) ! =0 + case('s_surf') + array2D(isc:,jsc:) = Ocean%s_surf(isc:,jsc:) + case('sea_lev') + array2D(isc:,jsc:) = Ocean%sea_lev(isc:,jsc:) + case('frazil') + array2D(isc:,jsc:) = Ocean%frazil(isc:,jsc:) + case('melt_pot') + array2D(isc:,jsc:) = Ocean%melt_potential(isc:,jsc:) + case('obld') + array2D(isc:,jsc:) = Ocean%OBLD(isc:,jsc:) + case default + call MOM_error(FATAL,'get_ocean_grid_data2D: unknown argument name='//name) end select end subroutine ocean_model_data2D_get @@ -1100,16 +1103,16 @@ subroutine ocean_public_type_chksum(id, timestep, ocn) !! visible ocean surface fields. integer :: n, m, outunit - outunit = stdout() + outunit = stdout write(outunit,*) "BEGIN CHECKSUM(ocean_type):: ", id, timestep - write(outunit,100) 'ocean%t_surf ',mpp_chksum(ocn%t_surf ) - write(outunit,100) 'ocean%s_surf ',mpp_chksum(ocn%s_surf ) - write(outunit,100) 'ocean%u_surf ',mpp_chksum(ocn%u_surf ) - write(outunit,100) 'ocean%v_surf ',mpp_chksum(ocn%v_surf ) - write(outunit,100) 'ocean%sea_lev ',mpp_chksum(ocn%sea_lev) - write(outunit,100) 'ocean%frazil ',mpp_chksum(ocn%frazil ) - + write(outunit,100) 'ocean%t_surf ', field_chksum(ocn%t_surf ) + write(outunit,100) 'ocean%s_surf ', field_chksum(ocn%s_surf ) + write(outunit,100) 'ocean%u_surf ', field_chksum(ocn%u_surf ) + write(outunit,100) 'ocean%v_surf ', field_chksum(ocn%v_surf ) + write(outunit,100) 'ocean%sea_lev ', field_chksum(ocn%sea_lev) + write(outunit,100) 'ocean%frazil ', field_chksum(ocn%frazil ) + write(outunit,100) 'ocean%melt_potential ', field_chksum(ocn%melt_potential) call coupler_type_write_chksums(ocn%fields, outunit, 'ocean%') 100 FORMAT(" CHECKSUM::",A20," = ",Z20) @@ -1153,8 +1156,7 @@ subroutine ocean_model_get_UV_surf(OS, Ocean, name, array2D, isc, jsc) G => OS%grid is = G%isc ; ie = G%iec ; js = G%jsc ; je = G%jec - call mpp_get_compute_domain(Ocean%Domain, isc_bnd, iec_bnd, & - jsc_bnd, jec_bnd) + call get_domain_extent(Ocean%Domain, isc_bnd, iec_bnd, jsc_bnd, jec_bnd) i0 = is - isc_bnd ; j0 = js - jsc_bnd @@ -1182,7 +1184,7 @@ subroutine ocean_model_get_UV_surf(OS, Ocean, name, array2D, isc, jsc) 0.5*(sfc_state%v(i+i0,J+j0)+sfc_state%v(i+i0+1,J+j0)) enddo ; enddo case default - call MOM_error(FATAL,'ocean_model_get_UV_surf: unknown argument name='//name) + call MOM_error(FATAL,'ocean_model_get_UV_surf: unknown argument name='//name) end select end subroutine ocean_model_get_UV_surf diff --git a/config_src/ice_solo_driver/atmos_ocean_fluxes.F90 b/config_src/drivers/ice_solo_driver/atmos_ocean_fluxes.F90 similarity index 100% rename from config_src/ice_solo_driver/atmos_ocean_fluxes.F90 rename to config_src/drivers/ice_solo_driver/atmos_ocean_fluxes.F90 diff --git a/config_src/ice_solo_driver/ice_shelf_driver.F90 b/config_src/drivers/ice_solo_driver/ice_shelf_driver.F90 similarity index 94% rename from config_src/ice_solo_driver/ice_shelf_driver.F90 rename to config_src/drivers/ice_solo_driver/ice_shelf_driver.F90 index b1323a5485..959e4676d0 100644 --- a/config_src/ice_solo_driver/ice_shelf_driver.F90 +++ b/config_src/drivers/ice_solo_driver/ice_shelf_driver.F90 @@ -38,9 +38,9 @@ program Shelf_main use MOM_get_input, only : Get_MOM_Input, directories use MOM_grid, only : ocean_grid_type, MOM_grid_init, MOM_grid_end use MOM_hor_index, only : hor_index_type, hor_index_init - use MOM_io, only : MOM_io_init, file_exists, open_file, close_file + use MOM_io, only : MOM_io_init, file_exists, open_ASCII_file, close_file use MOM_io, only : check_nml_error, io_infra_init, io_infra_end - use MOM_io, only : APPEND_FILE, ASCII_FILE, READONLY_FILE, SINGLE_FILE + use MOM_io, only : APPEND_FILE, READONLY_FILE, SINGLE_FILE use MOM_open_boundary, only : ocean_OBC_type use MOM_restart, only : save_restart use MOM_string_functions,only : uppercase @@ -176,7 +176,7 @@ program Shelf_main if (file_exists('input.nml')) then ! Provide for namelist specification of the run length and calendar data. - call open_file(unit, 'input.nml', form=ASCII_FILE, action=READONLY_FILE) + call open_ASCII_file(unit, 'input.nml', action=READONLY_FILE) read(unit, ice_solo_nml, iostat=io_status) call close_file(unit) ierr = check_nml_error(io_status,'ice_solo_nml') @@ -185,17 +185,18 @@ program Shelf_main endif endif + call Get_MOM_Input(param_file, dirs) + ! Read ocean_solo restart, which can override settings from the namelist. if (file_exists(trim(dirs%restart_input_dir)//'ice_solo.res')) then - call open_file(unit,trim(dirs%restart_input_dir)//'ice_solo.res', & - form=ASCII_FILE,action=READONLY_FILE) + call open_ASCII_file(unit, trim(dirs%restart_input_dir)//'ice_solo.res', action=READONLY_FILE) read(unit,*) calendar_type read(unit,*) date_init read(unit,*) date call close_file(unit) else calendar = uppercase(calendar) - if (calendar(1:6) == 'JULIAN') then ; calendar_type = JULIAN + if (calendar(1:6) == 'JULIAN') then ; calendar_type = JULIAN elseif (calendar(1:9) == 'GREGORIAN') then ; calendar_type = GREGORIAN elseif (calendar(1:6) == 'NOLEAP') then ; calendar_type = NOLEAP elseif (calendar(1:10)=='THIRTY_DAY') then ; calendar_type = THIRTY_DAY_MONTHS @@ -216,7 +217,6 @@ program Shelf_main Start_time = real_to_time(0.0) endif - call Get_MOM_Input(param_file, dirs) ! Determining the internal unit scaling factors for this run. call unit_scaling_init(param_file, US) @@ -341,15 +341,14 @@ program Shelf_main call diag_mediator_close_registration(diag) ! Write out a time stamp file. - if (calendar_type /= NO_CALENDAR) then - call open_file(unit, 'time_stamp.out', form=ASCII_FILE, action=APPEND_FILE, & - threading=SINGLE_FILE) + if (is_root_pe() .and. (calendar_type /= NO_CALENDAR)) then + call open_ASCII_file(unit, 'time_stamp.out', action=APPEND_FILE) call get_date(Time, date(1), date(2), date(3), date(4), date(5), date(6)) month = month_name(date(2)) - if (is_root_pe()) write(unit,'(6i4,2x,a3)') date, month(1:3) + write(unit,'(6i4,2x,a3)') date, month(1:3) call get_date(Time_end, date(1), date(2), date(3), date(4), date(5), date(6)) month = month_name(date(2)) - if (is_root_pe()) write(unit,'(6i4,2x,a3)') date, month(1:3) + write(unit,'(6i4,2x,a3)') date, month(1:3) call close_file(unit) endif @@ -428,19 +427,19 @@ program Shelf_main dirs%restart_output_dir) ! Write ice shelf solo restart file. - call open_file(unit, trim(dirs%restart_output_dir)//'shelf.res', nohdrs=.true.) if (is_root_pe())then - write(unit, '(i6,8x,a)') calendar_type, & - '(Calendar: no_calendar=0, thirty_day_months=1, julian=2, gregorian=3, noleap=4)' - - call get_date(Start_time, yr, mon, day, hr, mins, sec) - write(unit, '(6i6,8x,a)') yr, mon, day, hr, mins, sec, & - 'Model start time: year, month, day, hour, minute, second' - call get_date(Time, yr, mon, day, hr, mins, sec) - write(unit, '(6i6,8x,a)') yr, mon, day, hr, mins, sec, & - 'Current model time: year, month, day, hour, minute, second' + call open_ASCII_file(unit, trim(dirs%restart_output_dir)//'shelf.res') + write(unit, '(i6,8x,a)') calendar_type, & + '(Calendar: no_calendar=0, thirty_day_months=1, julian=2, gregorian=3, noleap=4)' + + call get_date(Start_time, yr, mon, day, hr, mins, sec) + write(unit, '(6i6,8x,a)') yr, mon, day, hr, mins, sec, & + 'Model start time: year, month, day, hour, minute, second' + call get_date(Time, yr, mon, day, hr, mins, sec) + write(unit, '(6i6,8x,a)') yr, mon, day, hr, mins, sec, & + 'Current model time: year, month, day, hour, minute, second' + call close_file(unit) endif - call close_file(unit) endif if (is_root_pe()) then diff --git a/config_src/mct_driver/mom_ocean_model_mct.F90 b/config_src/drivers/mct_cap/mom_ocean_model_mct.F90 similarity index 100% rename from config_src/mct_driver/mom_ocean_model_mct.F90 rename to config_src/drivers/mct_cap/mom_ocean_model_mct.F90 diff --git a/config_src/mct_driver/mom_surface_forcing_mct.F90 b/config_src/drivers/mct_cap/mom_surface_forcing_mct.F90 similarity index 99% rename from config_src/mct_driver/mom_surface_forcing_mct.F90 rename to config_src/drivers/mct_cap/mom_surface_forcing_mct.F90 index ef0527dd1c..e675170575 100644 --- a/config_src/mct_driver/mom_surface_forcing_mct.F90 +++ b/config_src/drivers/mct_cap/mom_surface_forcing_mct.F90 @@ -496,7 +496,8 @@ subroutine convert_IOB_to_fluxes(IOB, fluxes, index_bounds, Time, valid_time, G, if (associated(fluxes%frunoff)) then fluxes%latent(i,j) = fluxes%latent(i,j) - & IOB%rofi_flux(i-i0,j-j0)*US%W_m2_to_QRZ_T*CS%latent_heat_fusion - fluxes%latent_frunoff_diag(i,j) = - G%mask2dT(i,j) * IOB%rofi_flux(i-i0,j-j0)*US%W_m2_to_QRZ_T*CS%latent_heat_fusion + fluxes%latent_frunoff_diag(i,j) = -G%mask2dT(i,j) & + * IOB%rofi_flux(i-i0,j-j0)*US%W_m2_to_QRZ_T*CS%latent_heat_fusion endif ! contribution from evaporation if (associated(IOB%q_flux)) then diff --git a/config_src/mct_driver/ocn_cap_methods.F90 b/config_src/drivers/mct_cap/ocn_cap_methods.F90 similarity index 100% rename from config_src/mct_driver/ocn_cap_methods.F90 rename to config_src/drivers/mct_cap/ocn_cap_methods.F90 diff --git a/config_src/mct_driver/ocn_comp_mct.F90 b/config_src/drivers/mct_cap/ocn_comp_mct.F90 similarity index 100% rename from config_src/mct_driver/ocn_comp_mct.F90 rename to config_src/drivers/mct_cap/ocn_comp_mct.F90 diff --git a/config_src/mct_driver/ocn_cpl_indices.F90 b/config_src/drivers/mct_cap/ocn_cpl_indices.F90 similarity index 100% rename from config_src/mct_driver/ocn_cpl_indices.F90 rename to config_src/drivers/mct_cap/ocn_cpl_indices.F90 diff --git a/config_src/nuopc_driver/mom_cap.F90 b/config_src/drivers/nuopc_cap/mom_cap.F90 similarity index 94% rename from config_src/nuopc_driver/mom_cap.F90 rename to config_src/drivers/nuopc_cap/mom_cap.F90 index d1e44fe90b..e6bb0c20d4 100644 --- a/config_src/nuopc_driver/mom_cap.F90 +++ b/config_src/drivers/nuopc_cap/mom_cap.F90 @@ -35,22 +35,25 @@ module MOM_cap_mod use MOM_ocean_model_nuopc, only: ocean_model_init, update_ocean_model, ocean_model_end use MOM_ocean_model_nuopc, only: get_ocean_grid, get_eps_omesh use MOM_cap_time, only: AlarmInit -use MOM_cap_methods, only: mom_import, mom_export, mom_set_geomtype, state_diagnose +use MOM_cap_methods, only: mom_import, mom_export, mom_set_geomtype, mod2med_areacor +use MOM_cap_methods, only: med2mod_areacor, state_diagnose use MOM_cap_methods, only: ChkErr + #ifdef CESMCOUPLED use shr_file_mod, only: shr_file_setLogUnit, shr_file_getLogUnit +use shr_mpi_mod, only : shr_mpi_min, shr_mpi_max #endif use time_utils_mod, only: esmf2fms_time use, intrinsic :: iso_fortran_env, only: output_unit -use ESMF, only: ESMF_ClockAdvance, ESMF_ClockGet, ESMF_ClockPrint +use ESMF, only: ESMF_ClockAdvance, ESMF_ClockGet, ESMF_ClockPrint, ESMF_VMget use ESMF, only: ESMF_ClockGetAlarm, ESMF_ClockGetNextTime, ESMF_ClockAdvance use ESMF, only: ESMF_ClockSet, ESMF_Clock, ESMF_GeomType_Flag, ESMF_LOGMSG_INFO use ESMF, only: ESMF_Grid, ESMF_GridCreate, ESMF_GridAddCoord use ESMF, only: ESMF_GridGetCoord, ESMF_GridAddItem, ESMF_GridGetItem use ESMF, only: ESMF_GridComp, ESMF_GridCompSetEntryPoint, ESMF_GridCompGet -use ESMF, only: ESMF_LogFoundError, ESMF_LogWrite, ESMF_LogSetError +use ESMF, only: ESMF_LogWrite, ESMF_LogSetError use ESMF, only: ESMF_LOGERR_PASSTHRU, ESMF_KIND_R8, ESMF_RC_VAL_WRONG use ESMF, only: ESMF_GEOMTYPE_MESH, ESMF_GEOMTYPE_GRID, ESMF_SUCCESS use ESMF, only: ESMF_METHOD_INITIALIZE, ESMF_MethodRemove, ESMF_State @@ -69,9 +72,10 @@ module MOM_cap_mod use ESMF, only: ESMF_COORDSYS_SPH_DEG, ESMF_GridCreate, ESMF_INDEX_DELOCAL use ESMF, only: ESMF_MESHLOC_ELEMENT, ESMF_RC_VAL_OUTOFRANGE, ESMF_StateGet use ESMF, only: ESMF_TimePrint, ESMF_AlarmSet, ESMF_FieldGet, ESMF_Array +use ESMF, only: ESMF_FieldRegridGetArea use ESMF, only: ESMF_ArrayCreate use ESMF, only: ESMF_RC_FILE_OPEN, ESMF_RC_FILE_READ, ESMF_RC_FILE_WRITE -use ESMF, only: ESMF_VMBroadcast +use ESMF, only: ESMF_VMBroadcast, ESMF_VMReduce, ESMF_REDUCE_MAX, ESMF_REDUCE_MIN use ESMF, only: ESMF_AlarmCreate, ESMF_ClockGetAlarmList, ESMF_AlarmList_Flag use ESMF, only: ESMF_AlarmGet, ESMF_AlarmIsCreated, ESMF_ALARMLIST_ALL, ESMF_AlarmIsEnabled use ESMF, only: ESMF_STATEITEM_NOTFOUND, ESMF_FieldWrite @@ -93,7 +97,9 @@ module MOM_cap_mod use NUOPC_Model, only: model_label_SetRunClock => label_SetRunClock use NUOPC_Model, only: model_label_Finalize => label_Finalize use NUOPC_Model, only: SetVM + use get_stochy_pattern_mod, only: write_stoch_restart_ocn +!$use omp_lib , only : omp_set_num_threads implicit none; private @@ -137,11 +143,12 @@ module MOM_cap_mod logical :: profile_memory = .true. logical :: grid_attach_area = .false. logical :: use_coldstart = .true. -logical :: use_mommesh = .false. +logical :: use_mommesh = .true. character(len=128) :: scalar_field_name = '' integer :: scalar_field_count = 0 integer :: scalar_field_idx_grid_nx = 0 integer :: scalar_field_idx_grid_ny = 0 +integer :: nthrds !< number of openmp threads per task character(len=*),parameter :: u_FILE_u = & __FILE__ @@ -350,7 +357,7 @@ subroutine InitializeP0(gcomp, importState, exportState, clock, rc) write(logmsg,*) use_coldstart call ESMF_LogWrite('MOM_cap:use_coldstart = '//trim(logmsg), ESMF_LOGMSG_INFO) - use_mommesh = .false. + use_mommesh = .true. call NUOPC_CompAttributeGet(gcomp, name="use_mommesh", value=value, & isPresent=isPresent, isSet=isSet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -413,10 +420,12 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) character(len=512) :: diro character(len=512) :: logfile character(ESMF_MAXSTR) :: cvalue + character(len=64) :: logmsg logical :: isPresent, isPresentDiro, isPresentLogfile, isSet logical :: existflag integer :: userRc integer :: localPet + integer :: localPeCount integer :: iostat integer :: readunit character(len=512) :: restartfile ! Path/Name of restart file @@ -441,7 +450,7 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) call ESMF_VMGetCurrent(vm, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call ESMF_VMGet(VM, mpiCommunicator=mpi_comm_mom, rc=rc) + call ESMF_VMGet(VM, mpiCommunicator=mpi_comm_mom, localPet=localPet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return call ESMF_ClockGet(CLOCK, currTIME=MyTime, TimeStep=TINT, RC=rc) @@ -453,7 +462,30 @@ subroutine InitializeAdvertise(gcomp, importState, exportState, clock, rc) CALL ESMF_TimeIntervalGet(TINT, S=DT_OCEAN, RC=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - !TODO: next two lines not present in NCAR + !--------------------------------- + ! openmp threads + !--------------------------------- + + call ESMF_VMGet(vm, pet=localPet, peCount=localPeCount, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if(localPeCount == 1) then + call NUOPC_CompAttributeGet(gcomp, name="nthreads", value=cvalue, & + isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) nthrds + else + nthrds = localPeCount + endif + else + nthrds = localPeCount + endif + write(logmsg,*) nthrds + call ESMF_LogWrite(trim(subname)//': nthreads = '//trim(logmsg), ESMF_LOGMSG_INFO) + +!$ call omp_set_num_threads(nthrds) + call fms_init(mpi_comm_mom) call constants_init call field_manager_init @@ -800,6 +832,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) integer :: lbnd3,ubnd3,lbnd4,ubnd4 integer :: nblocks_tot logical :: found + logical :: isPresent, isSet integer(ESMF_KIND_I4), pointer :: dataPtr_mask(:,:) real(ESMF_KIND_R8), pointer :: dataPtr_area(:,:) real(ESMF_KIND_R8), pointer :: dataPtr_xcen(:,:) @@ -808,6 +841,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) real(ESMF_KIND_R8), pointer :: dataPtr_ycor(:,:) integer :: mpicom integer :: localPet + integer :: localPeCount integer :: lsize integer :: ig,jg, ni,nj,k integer, allocatable :: gindex(:) ! global index space @@ -815,16 +849,29 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) character(len=256) :: cvalue character(len=256) :: frmt ! format specifier for several error msgs character(len=512) :: err_msg ! error messages + integer :: spatialDim + integer :: numOwnedElements + type(ESMF_Array) :: elemMaskArray + real(ESMF_KIND_R8) , pointer :: ownedElemCoords(:) + real(ESMF_KIND_R8) , pointer :: lat(:), latMesh(:) + real(ESMF_KIND_R8) , pointer :: lon(:), lonMesh(:) + integer(ESMF_KIND_I4) , pointer :: mask(:), maskMesh(:) + real(ESMF_KIND_R8) :: diff_lon, diff_lat + real :: eps_omesh + real(ESMF_KIND_R8) :: L2_to_rad2 + type(ESMF_Field) :: lfield + real(ESMF_KIND_R8), allocatable :: mesh_areas(:) + real(ESMF_KIND_R8), allocatable :: model_areas(:) + real(ESMF_KIND_R8), pointer :: dataPtr_mesh_areas(:) + real(ESMF_KIND_R8) :: max_mod2med_areacor + real(ESMF_KIND_R8) :: max_med2mod_areacor + real(ESMF_KIND_R8) :: min_mod2med_areacor + real(ESMF_KIND_R8) :: min_med2mod_areacor + real(ESMF_KIND_R8) :: max_mod2med_areacor_glob + real(ESMF_KIND_R8) :: max_med2mod_areacor_glob + real(ESMF_KIND_R8) :: min_mod2med_areacor_glob + real(ESMF_KIND_R8) :: min_med2mod_areacor_glob character(len=*), parameter :: subname='(MOM_cap:InitializeRealize)' - integer :: spatialDim - integer :: numOwnedElements - type(ESMF_Array) :: elemMaskArray - real(ESMF_KIND_R8) , pointer :: ownedElemCoords(:) - real(ESMF_KIND_R8) , pointer :: lat(:), latMesh(:) - real(ESMF_KIND_R8) , pointer :: lon(:), lonMesh(:) - integer(ESMF_KIND_I4) , pointer :: mask(:), maskMesh(:) - real(ESMF_KIND_R8) :: diff_lon, diff_lat - real :: eps_omesh !-------------------------------- rc = ESMF_SUCCESS @@ -852,6 +899,28 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call ESMF_VMGet(vm, petCount=npet, mpiCommunicator=mpicom, localPet=localPet, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + !--------------------------------- + ! openmp threads + !--------------------------------- + + call ESMF_VMGet(vm, pet=localPet, peCount=localPeCount, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + if(localPeCount == 1) then + call NUOPC_CompAttributeGet(gcomp, name="nthreads", value=cvalue, & + isPresent=isPresent, isSet=isSet, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + if (isPresent .and. isSet) then + read(cvalue,*) nthrds + else + nthrds = localPeCount + endif + else + nthrds = localPeCount + endif + +!$ call omp_set_num_threads(nthrds) + !--------------------------------- ! global mom grid size !--------------------------------- @@ -993,10 +1062,6 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) end if end do - deallocate(ownedElemCoords) - deallocate(lonMesh , lon ) - deallocate(latMesh , lat ) - deallocate(maskMesh, mask) ! realize the import and export fields using the mesh call MOM_RealizeFields(importState, fldsToOcn_num, fldsToOcn, "Ocn import", mesh=Emesh, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return @@ -1004,6 +1069,69 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call MOM_RealizeFields(exportState, fldsFrOcn_num, fldsFrOcn, "Ocn export", mesh=Emesh, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return + !--------------------------------- + ! determine flux area correction factors - module variables in mom_cap_methods + !--------------------------------- + ! Area correction factors are ONLY valid for meshes that are read in - so do not need them for + ! grids that are calculated internally + + ! Determine mesh areas for regridding + call ESMF_MeshGet(Emesh, numOwnedElements=numOwnedElements, spatialDim=spatialDim, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + allocate (mod2med_areacor(numOwnedElements)) + allocate (med2mod_areacor(numOwnedElements)) + mod2med_areacor(:) = 1._ESMF_KIND_R8 + med2mod_areacor(:) = 1._ESMF_KIND_R8 + +#ifdef CESMCOUPLED + ! Determine model areas and flux correction factors (module variables in mom_) + call ESMF_StateGet(exportState, itemName=trim(fldsFrOcn(2)%stdname), field=lfield, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldRegridGetArea(lfield, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + call ESMF_FieldGet(lfield, farrayPtr=dataPtr_mesh_areas, rc=rc) + if (ChkErr(rc,__LINE__,u_FILE_u)) return + + allocate(mesh_areas(numOwnedElements)) + allocate(model_areas(numOwnedElements)) + k = 0 + do j = ocean_grid%jsc, ocean_grid%jec + do i = ocean_grid%isc, ocean_grid%iec + k = k + 1 ! Increment position within gindex + if (mask(k) /= 0) then + mesh_areas(k) = dataPtr_mesh_areas(k) + model_areas(k) = ocean_grid%AreaT(i,j) / ocean_grid%Rad_Earth**2 + mod2med_areacor(k) = model_areas(k) / mesh_areas(k) + med2mod_areacor(k) = mesh_areas(k) / model_areas(k) + end if + end do + end do + deallocate(mesh_areas) + deallocate(model_areas) + + ! Write diagnostic output for correction factors + min_mod2med_areacor = minval(mod2med_areacor) + max_mod2med_areacor = maxval(mod2med_areacor) + min_med2mod_areacor = minval(med2mod_areacor) + max_med2mod_areacor = maxval(med2mod_areacor) + call shr_mpi_max(max_mod2med_areacor, max_mod2med_areacor_glob, mpicom) + call shr_mpi_min(min_mod2med_areacor, min_mod2med_areacor_glob, mpicom) + call shr_mpi_max(max_med2mod_areacor, max_med2mod_areacor_glob, mpicom) + call shr_mpi_min(min_med2mod_areacor, min_med2mod_areacor_glob, mpicom) + if (localPet == 0) then + write(logunit,'(2A,2g23.15,A )') trim(subname),' : min_mod2med_areacor, max_mod2med_areacor ',& + min_mod2med_areacor_glob, max_mod2med_areacor_glob, 'MOM6' + write(logunit,'(2A,2g23.15,A )') trim(subname),' : min_med2mod_areacor, max_med2mod_areacor ',& + min_med2mod_areacor_glob, max_med2mod_areacor_glob, 'MOM6' + end if +#endif + + deallocate(ownedElemCoords) + deallocate(lonMesh , lon ) + deallocate(latMesh , lat ) + deallocate(maskMesh, mask) + else if (geomtype == ESMF_GEOMTYPE_GRID) then !--------------------------------- @@ -1230,7 +1358,6 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) call MOM_RealizeFields(exportState, fldsFrOcn_num, fldsFrOcn, "Ocn export", grid=gridOut, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - endif !--------------------------------- @@ -1408,6 +1535,8 @@ subroutine ModelAdvance(gcomp, rc) call shr_file_setLogUnit (logunit) +!$ call omp_set_num_threads(nthrds) + ! query the Component for its clock, importState and exportState call ESMF_GridCompGet(gcomp, clock=clock, importState=importState, & exportState=exportState, rc=rc) diff --git a/config_src/nuopc_driver/mom_cap_methods.F90 b/config_src/drivers/nuopc_cap/mom_cap_methods.F90 similarity index 91% rename from config_src/nuopc_driver/mom_cap_methods.F90 rename to config_src/drivers/nuopc_cap/mom_cap_methods.F90 index 78014f1c63..0a4e12c647 100644 --- a/config_src/nuopc_driver/mom_cap_methods.F90 +++ b/config_src/drivers/nuopc_cap/mom_cap_methods.F90 @@ -46,8 +46,13 @@ module MOM_cap_methods type(ESMF_GeomType_Flag) :: geomtype !< SMF type describing type of !! geometry (mesh or grid) -character(len=*),parameter :: u_FILE_u = & - __FILE__ +! area correction factors for fluxes send and received from mediator +! these actors are ONLY valid for meshes that are read in - so do not need them for +! grids that are calculated internally + +real(ESMF_KIND_R8), public, allocatable :: mod2med_areacor(:) ! ratios of model areas to input mesh areas +real(ESMF_KIND_R8), public, allocatable :: med2mod_areacor(:) ! ratios of input mesh areas to model areas +character(len=*),parameter :: u_FILE_u = __FILE__ contains @@ -100,35 +105,35 @@ subroutine mom_import(ocean_public, ocean_grid, importState, ice_ocean_boundary, ! near-IR, direct shortwave (W/m2) !---- call state_getimport(importState, 'mean_net_sw_ir_dir_flx', & - isc, iec, jsc, jec, ice_ocean_boundary%sw_flux_nir_dir, rc=rc) + isc, iec, jsc, jec, ice_ocean_boundary%sw_flux_nir_dir, areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return !---- ! near-IR, diffuse shortwave (W/m2) !---- call state_getimport(importState, 'mean_net_sw_ir_dif_flx', & - isc, iec, jsc, jec, ice_ocean_boundary%sw_flux_nir_dif, rc=rc) + isc, iec, jsc, jec, ice_ocean_boundary%sw_flux_nir_dif, areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return !---- ! visible, direct shortwave (W/m2) !---- call state_getimport(importState, 'mean_net_sw_vis_dir_flx', & - isc, iec, jsc, jec, ice_ocean_boundary%sw_flux_vis_dir, rc=rc) + isc, iec, jsc, jec, ice_ocean_boundary%sw_flux_vis_dir, areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return !---- ! visible, diffuse shortwave (W/m2) !---- call state_getimport(importState, 'mean_net_sw_vis_dif_flx', & - isc, iec, jsc, jec, ice_ocean_boundary%sw_flux_vis_dif, rc=rc) + isc, iec, jsc, jec, ice_ocean_boundary%sw_flux_vis_dif, areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! ------- ! Net longwave radiation (W/m2) ! ------- call state_getimport(importState, 'mean_net_lw_flx', & - isc, iec, jsc, jec, ice_ocean_boundary%lw_flux, rc=rc) + isc, iec, jsc, jec, ice_ocean_boundary%lw_flux, areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return !---- @@ -137,12 +142,13 @@ subroutine mom_import(ocean_public, ocean_grid, importState, ice_ocean_boundary, allocate (taux(isc:iec,jsc:jec)) allocate (tauy(isc:iec,jsc:jec)) - call state_getimport(importState, 'mean_zonal_moment_flx', isc, iec, jsc, jec, taux, rc=rc) + call state_getimport(importState, 'mean_zonal_moment_flx', isc, iec, jsc, jec, taux, & + areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - call state_getimport(importState, 'mean_merid_moment_flx', isc, iec, jsc, jec, tauy, rc=rc) + call state_getimport(importState, 'mean_merid_moment_flx', isc, iec, jsc, jec, tauy, & + areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! rotate taux and tauy from true zonal/meridional to local coordinates do j = jsc, jec jg = j + ocean_grid%jsc - jsc @@ -161,28 +167,28 @@ subroutine mom_import(ocean_public, ocean_grid, importState, ice_ocean_boundary, ! sensible heat flux (W/m2) !---- call state_getimport(importState, 'mean_sensi_heat_flx', & - isc, iec, jsc, jec, ice_ocean_boundary%t_flux, rc=rc) + isc, iec, jsc, jec, ice_ocean_boundary%t_flux, areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return !---- ! evaporation flux (W/m2) !---- call state_getimport(importState, 'mean_evap_rate', & - isc, iec, jsc, jec, ice_ocean_boundary%q_flux, rc=rc) + isc, iec, jsc, jec, ice_ocean_boundary%q_flux, areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return !---- ! liquid precipitation (rain) !---- call state_getimport(importState, 'mean_prec_rate', & - isc, iec, jsc, jec, ice_ocean_boundary%lprec, rc=rc) + isc, iec, jsc, jec, ice_ocean_boundary%lprec, areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return !---- ! frozen precipitation (snow) !---- call state_getimport(importState, 'mean_fprec_rate', & - isc, iec, jsc, jec, ice_ocean_boundary%fprec, rc=rc) + isc, iec, jsc, jec, ice_ocean_boundary%fprec, areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return !---- @@ -194,25 +200,25 @@ subroutine mom_import(ocean_public, ocean_grid, importState, ice_ocean_boundary, ! liquid runoff ice_ocean_boundary%lrunoff (:,:) = 0._ESMF_KIND_R8 call state_getimport(importState, 'Foxx_rofl', & - isc, iec, jsc, jec, ice_ocean_boundary%lrunoff,rc=rc) + isc, iec, jsc, jec, ice_ocean_boundary%lrunoff, areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! ice runoff ice_ocean_boundary%frunoff (:,:) = 0._ESMF_KIND_R8 call state_getimport(importState, 'Foxx_rofi', & - isc, iec, jsc, jec, ice_ocean_boundary%frunoff,rc=rc) + isc, iec, jsc, jec, ice_ocean_boundary%frunoff, areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! heat content of lrunoff ice_ocean_boundary%lrunoff_hflx(:,:) = 0._ESMF_KIND_R8 call state_getimport(importState, 'mean_runoff_heat_flx', & - isc, iec, jsc, jec, ice_ocean_boundary%lrunoff_hflx, rc=rc) + isc, iec, jsc, jec, ice_ocean_boundary%lrunoff_hflx, areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return ! heat content of frunoff ice_ocean_boundary%frunoff_hflx(:,:) = 0._ESMF_KIND_R8 call state_getimport(importState, 'mean_calving_heat_flx', & - isc, iec, jsc, jec, ice_ocean_boundary%frunoff_hflx, rc=rc) + isc, iec, jsc, jec, ice_ocean_boundary%frunoff_hflx, areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return !---- @@ -220,23 +226,23 @@ subroutine mom_import(ocean_public, ocean_grid, importState, ice_ocean_boundary, !---- ice_ocean_boundary%salt_flux(:,:) = 0._ESMF_KIND_R8 call state_getimport(importState, 'mean_salt_rate', & - isc, iec, jsc, jec, ice_ocean_boundary%salt_flux,rc=rc) + isc, iec, jsc, jec, ice_ocean_boundary%salt_flux, areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! !---- - ! ! snow&ice melt heat flux (W/m^2) - ! !---- + !---- + ! snow&ice melt heat flux (W/m^2) + !---- ice_ocean_boundary%seaice_melt_heat(:,:) = 0._ESMF_KIND_R8 call state_getimport(importState, 'net_heat_flx_to_ocn', & - isc, iec, jsc, jec, ice_ocean_boundary%seaice_melt_heat,rc=rc) + isc, iec, jsc, jec, ice_ocean_boundary%seaice_melt_heat, areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! !---- - ! ! snow&ice melt water flux (W/m^2) - ! !---- + !---- + ! snow&ice melt water flux (W/m^2) + !---- ice_ocean_boundary%seaice_melt(:,:) = 0._ESMF_KIND_R8 call state_getimport(importState, 'mean_fresh_water_to_ocean_rate', & - isc, iec, jsc, jec, ice_ocean_boundary%seaice_melt,rc=rc) + isc, iec, jsc, jec, ice_ocean_boundary%seaice_melt, areacor=med2mod_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return !---- @@ -247,10 +253,9 @@ subroutine mom_import(ocean_public, ocean_grid, importState, ice_ocean_boundary, ice_ocean_boundary%mi(:,:) = 0._ESMF_KIND_R8 call state_getimport(importState, 'mass_of_overlying_ice', & - isc, iec, jsc, jec, ice_ocean_boundary%mi, rc=rc) + isc, iec, jsc, jec, ice_ocean_boundary%mi,rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - !---- ! Partitioned Stokes Drift Components !---- @@ -451,7 +456,7 @@ subroutine mom_export(ocean_public, ocean_grid, ocean_state, exportState, clock, enddo call State_SetExport(exportState, 'freezing_melting_potential', & - isc, iec, jsc, jec, melt_potential, ocean_grid, rc=rc) + isc, iec, jsc, jec, melt_potential, ocean_grid, areacor=mod2med_areacor, rc=rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return deallocate(melt_potential) @@ -619,7 +624,7 @@ subroutine State_GetFldPtr_2d(State, fldname, fldptr, rc) end subroutine State_GetFldPtr_2d !> Map import state field to output array -subroutine State_GetImport(state, fldname, isc, iec, jsc, jec, output, do_sum, rc) +subroutine State_GetImport(state, fldname, isc, iec, jsc, jec, output, do_sum, areacor, rc) type(ESMF_State) , intent(in) :: state !< ESMF state character(len=*) , intent(in) :: fldname !< Field name integer , intent(in) :: isc !< The start i-index of cell centers within @@ -632,6 +637,8 @@ subroutine State_GetImport(state, fldname, isc, iec, jsc, jec, output, do_sum, r !! the computational domain real (ESMF_KIND_R8) , intent(inout) :: output(isc:iec,jsc:jec)!< Output 2D array logical, optional , intent(in) :: do_sum !< If true, sums the data + real (ESMF_KIND_R8), optional, intent(in) :: areacor(:) !< flux area correction factors + !! applicable to meshes integer , intent(out) :: rc !< Return code ! local variables @@ -654,15 +661,23 @@ subroutine State_GetImport(state, fldname, isc, iec, jsc, jec, output, do_sum, r call state_getfldptr(state, trim(fldname), dataptr1d, rc) if (ChkErr(rc,__LINE__,u_FILE_u)) return - ! determine output array + ! determine output array and apply area correction if present n = 0 do j = jsc,jec do i = isc,iec n = n + 1 if (present(do_sum)) then - output(i,j) = output(i,j) + dataPtr1d(n) + if (present(areacor)) then + output(i,j) = output(i,j) + dataPtr1d(n) * areacor(n) + else + output(i,j) = output(i,j) + dataPtr1d(n) + end if else - output(i,j) = dataPtr1d(n) + if (present(areacor)) then + output(i,j) = dataPtr1d(n) * areacor(n) + else + output(i,j) = dataPtr1d(n) + end if endif enddo enddo @@ -694,7 +709,7 @@ subroutine State_GetImport(state, fldname, isc, iec, jsc, jec, output, do_sum, r end subroutine State_GetImport !> Map input array to export state -subroutine State_SetExport(state, fldname, isc, iec, jsc, jec, input, ocean_grid, rc) +subroutine State_SetExport(state, fldname, isc, iec, jsc, jec, input, ocean_grid, areacor, rc) type(ESMF_State) , intent(inout) :: state !< ESMF state character(len=*) , intent(in) :: fldname !< Field name integer , intent(in) :: isc !< The start i-index of cell centers within @@ -707,6 +722,8 @@ subroutine State_SetExport(state, fldname, isc, iec, jsc, jec, input, ocean_grid !! the computational domain real (ESMF_KIND_R8) , intent(in) :: input(isc:iec,jsc:jec)!< Input 2D array type(ocean_grid_type) , intent(in) :: ocean_grid !< Ocean horizontal grid + real (ESMF_KIND_R8), optional, intent(in) :: areacor(:) !< flux area correction factors + !! applicable to meshes integer , intent(out) :: rc !< Return code ! local variables @@ -741,6 +758,11 @@ subroutine State_SetExport(state, fldname, isc, iec, jsc, jec, input, ocean_grid dataPtr1d(n) = input(i,j) * ocean_grid%mask2dT(ig,jg) enddo enddo + if (present(areacor)) then + do n = 1,(size(dataPtr1d)) + dataPtr1d(n) = dataPtr1d(n) * areacor(n) + enddo + end if else if (geomtype == ESMF_GEOMTYPE_GRID) then diff --git a/config_src/nuopc_driver/mom_cap_time.F90 b/config_src/drivers/nuopc_cap/mom_cap_time.F90 similarity index 100% rename from config_src/nuopc_driver/mom_cap_time.F90 rename to config_src/drivers/nuopc_cap/mom_cap_time.F90 diff --git a/config_src/nuopc_driver/mom_ocean_model_nuopc.F90 b/config_src/drivers/nuopc_cap/mom_ocean_model_nuopc.F90 similarity index 100% rename from config_src/nuopc_driver/mom_ocean_model_nuopc.F90 rename to config_src/drivers/nuopc_cap/mom_ocean_model_nuopc.F90 diff --git a/config_src/nuopc_driver/mom_surface_forcing_nuopc.F90 b/config_src/drivers/nuopc_cap/mom_surface_forcing_nuopc.F90 similarity index 100% rename from config_src/nuopc_driver/mom_surface_forcing_nuopc.F90 rename to config_src/drivers/nuopc_cap/mom_surface_forcing_nuopc.F90 diff --git a/config_src/nuopc_driver/ocn_comp_NUOPC.F90 b/config_src/drivers/nuopc_cap/ocn_comp_NUOPC.F90 similarity index 100% rename from config_src/nuopc_driver/ocn_comp_NUOPC.F90 rename to config_src/drivers/nuopc_cap/ocn_comp_NUOPC.F90 diff --git a/config_src/nuopc_driver/time_utils.F90 b/config_src/drivers/nuopc_cap/time_utils.F90 similarity index 100% rename from config_src/nuopc_driver/time_utils.F90 rename to config_src/drivers/nuopc_cap/time_utils.F90 diff --git a/config_src/solo_driver/MESO_surface_forcing.F90 b/config_src/drivers/solo_driver/MESO_surface_forcing.F90 similarity index 99% rename from config_src/solo_driver/MESO_surface_forcing.F90 rename to config_src/drivers/solo_driver/MESO_surface_forcing.F90 index e2f0694b6c..679f147797 100644 --- a/config_src/solo_driver/MESO_surface_forcing.F90 +++ b/config_src/drivers/solo_driver/MESO_surface_forcing.F90 @@ -274,7 +274,7 @@ subroutine MESO_surface_forcing_init(Time, G, US, param_file, diag, CS) call get_param(param_file, mdl, "INPUTDIR", CS%inputdir, default=".") CS%inputdir = slasher(CS%inputdir) - endif + endif end subroutine MESO_surface_forcing_init diff --git a/config_src/solo_driver/MOM_driver.F90 b/config_src/drivers/solo_driver/MOM_driver.F90 similarity index 88% rename from config_src/solo_driver/MOM_driver.F90 rename to config_src/drivers/solo_driver/MOM_driver.F90 index ba52d9c02a..8edad7fa05 100644 --- a/config_src/solo_driver/MOM_driver.F90 +++ b/config_src/drivers/solo_driver/MOM_driver.F90 @@ -32,49 +32,43 @@ program MOM_main use MOM, only : extract_surface_state, finish_MOM_initialization use MOM, only : get_MOM_state_elements, MOM_state_is_synchronized use MOM, only : step_offline - use MOM_domains, only : MOM_infra_init, MOM_infra_end + use MOM_coms, only : Set_PElist + use MOM_domains, only : MOM_infra_init, MOM_infra_end, set_MOM_thread_affinity + use MOM_ensemble_manager, only : ensemble_manager_init, get_ensemble_size + use MOM_ensemble_manager, only : ensemble_pelist_setup use MOM_error_handler, only : MOM_error, MOM_mesg, WARNING, FATAL, is_root_pe use MOM_error_handler, only : callTree_enter, callTree_leave, callTree_waypoint use MOM_file_parser, only : read_param, get_param, log_param, log_version, param_file_type use MOM_file_parser, only : close_param_file use MOM_forcing_type, only : forcing, mech_forcing, forcing_diagnostics use MOM_forcing_type, only : mech_forcing_diags, MOM_forcing_chksum, MOM_mech_forcing_chksum - use MOM_get_input, only : directories + use MOM_get_input, only : get_MOM_input, directories use MOM_grid, only : ocean_grid_type - use MOM_io, only : file_exists, open_file, close_file + use MOM_ice_shelf, only : initialize_ice_shelf, ice_shelf_end, ice_shelf_CS + use MOM_ice_shelf, only : shelf_calc_flux, add_shelf_forces, ice_shelf_save_restart + use MOM_ice_shelf, only : initialize_ice_shelf_fluxes, initialize_ice_shelf_forces + use MOM_interpolate, only : time_interp_external_init + use MOM_io, only : file_exists, open_ASCII_file, close_file use MOM_io, only : check_nml_error, io_infra_init, io_infra_end - use MOM_io, only : APPEND_FILE, ASCII_FILE, READONLY_FILE, SINGLE_FILE + use MOM_io, only : APPEND_FILE, READONLY_FILE use MOM_restart, only : MOM_restart_CS, save_restart use MOM_string_functions,only : uppercase use MOM_surface_forcing, only : set_forcing, forcing_save_restart use MOM_surface_forcing, only : surface_forcing_init, surface_forcing_CS - use MOM_time_manager, only : time_type, set_date, get_date - use MOM_time_manager, only : real_to_time, time_type_to_real + use MOM_time_manager, only : time_type, set_date, get_date, real_to_time, time_type_to_real use MOM_time_manager, only : operator(+), operator(-), operator(*), operator(/) use MOM_time_manager, only : operator(>), operator(<), operator(>=) use MOM_time_manager, only : increment_date, set_calendar_type, month_name - use MOM_time_manager, only : JULIAN, GREGORIAN, NOLEAP, THIRTY_DAY_MONTHS - use MOM_time_manager, only : NO_CALENDAR + use MOM_time_manager, only : JULIAN, GREGORIAN, NOLEAP, THIRTY_DAY_MONTHS, NO_CALENDAR use MOM_tracer_flow_control, only : tracer_flow_control_CS use MOM_unit_scaling, only : unit_scale_type use MOM_variables, only : surface use MOM_verticalGrid, only : verticalGrid_type + use MOM_wave_interface, only : wave_parameters_CS, MOM_wave_interface_init + use MOM_wave_interface, only : MOM_wave_interface_init_lite, Update_Surface_Waves use MOM_write_cputime, only : write_cputime, MOM_write_cputime_init use MOM_write_cputime, only : write_cputime_start_clock, write_cputime_CS - use ensemble_manager_mod, only : ensemble_manager_init, get_ensemble_size - use ensemble_manager_mod, only : ensemble_pelist_setup - use mpp_mod, only : set_current_pelist => mpp_set_current_pelist - use time_interp_external_mod, only : time_interp_external_init - use fms_affinity_mod, only : fms_affinity_init, fms_affinity_set,fms_affinity_get - - use MOM_ice_shelf, only : initialize_ice_shelf, ice_shelf_end, ice_shelf_CS - use MOM_ice_shelf, only : shelf_calc_flux, add_shelf_forces, ice_shelf_save_restart -! , add_shelf_flux_forcing, add_shelf_flux_IOB - - use MOM_wave_interface, only: wave_parameters_CS, MOM_wave_interface_init - use MOM_wave_interface, only: MOM_wave_interface_init_lite, Update_Surface_Waves - implicit none #include @@ -84,18 +78,17 @@ program MOM_main ! A structure containing pointers to the thermodynamic forcing fields ! at the ocean surface. type(forcing) :: fluxes - ! A structure containing pointers to the ocean surface state fields. type(surface) :: sfc_state ! A pointer to a structure containing metrics and related information. - type(ocean_grid_type), pointer :: grid - type(verticalGrid_type), pointer :: GV + type(ocean_grid_type), pointer :: grid => NULL() + type(verticalGrid_type), pointer :: GV => NULL() ! A pointer to a structure containing dimensional unit scaling factors. - type(unit_scale_type), pointer :: US + type(unit_scale_type), pointer :: US => NULL() ! If .true., use the ice shelf model for part of the domain. - logical :: use_ice_shelf + logical :: use_ice_shelf = .false. ! If .true., use surface wave coupling logical :: use_waves = .false. @@ -198,8 +191,8 @@ program MOM_main type(MOM_restart_CS), pointer :: & restart_CSp => NULL() !< A pointer to the restart control structure !! that will be used for MOM restart files. - type(diag_ctrl), pointer :: & - diag => NULL() !< A pointer to the diagnostic regulatory structure + type(diag_ctrl), pointer :: & + diag => NULL() !< A pointer to the diagnostic regulatory structure !----------------------------------------------------------------------- character(len=4), parameter :: vers_num = 'v2.0' @@ -219,6 +212,8 @@ program MOM_main call MOM_infra_init() ; call io_infra_init() + !allocate(forces,fluxes,sfc_state) + ! Initialize the ensemble manager. If there are no settings for ensemble_size ! in input.nml(ensemble.nml), these should not do anything. In coupled ! configurations, this all occurs in the external driver. @@ -228,7 +223,7 @@ program MOM_main allocate(ocean_pelist(nPEs_per)) call ensemble_pelist_setup(.true., 0, nPEs_per, 0, 0, atm_pelist, ocean_pelist, & land_pelist, ice_pelist) - call set_current_pelist(ocean_pelist) + call Set_PElist(ocean_pelist) deallocate(ocean_pelist) endif @@ -243,7 +238,7 @@ program MOM_main if (file_exists('input.nml')) then ! Provide for namelist specification of the run length and calendar data. - call open_file(unit, 'input.nml', form=ASCII_FILE, action=READONLY_FILE) + call open_ASCII_file(unit, 'input.nml', action=READONLY_FILE) read(unit, ocean_solo_nml, iostat=io_status) call close_file(unit) ierr = check_nml_error(io_status,'ocean_solo_nml') @@ -252,25 +247,23 @@ program MOM_main endif endif -!$ call fms_affinity_init -!$ call fms_affinity_set('OCEAN', use_hyper_thread, ocean_nthreads) -!$ call omp_set_num_threads(ocean_nthreads) -!$OMP PARALLEL -!$ write(6,*) "ocean_solo OMPthreading ", fms_affinity_get(), omp_get_thread_num(), omp_get_num_threads() -!$ call flush(6) -!$OMP END PARALLEL + ! This call sets the number and affinity of threads with openMP. + !$ call set_MOM_thread_affinity(ocean_nthreads, use_hyper_thread) + + ! This call is required to initiate dirs%restart_input_dir for ocean_solo.res + ! The contents of dirs will be reread in initialize_MOM. + call get_MOM_input(dirs=dirs) ! Read ocean_solo restart, which can override settings from the namelist. if (file_exists(trim(dirs%restart_input_dir)//'ocean_solo.res')) then - call open_file(unit,trim(dirs%restart_input_dir)//'ocean_solo.res', & - form=ASCII_FILE,action=READONLY_FILE) + call open_ASCII_file(unit, trim(dirs%restart_input_dir)//'ocean_solo.res', action=READONLY_FILE) read(unit,*) calendar_type read(unit,*) date_init read(unit,*) date call close_file(unit) else calendar = uppercase(calendar) - if (calendar(1:6) == 'JULIAN') then ; calendar_type = JULIAN + if (calendar(1:6) == 'JULIAN') then ; calendar_type = JULIAN elseif (calendar(1:9) == 'GREGORIAN') then ; calendar_type = GREGORIAN elseif (calendar(1:6) == 'NOLEAP') then ; calendar_type = NOLEAP elseif (calendar(1:10)=='THIRTY_DAY') then ; calendar_type = THIRTY_DAY_MONTHS @@ -285,32 +278,47 @@ program MOM_main if (sum(date_init) > 0) then - Start_time = set_date(date_init(1),date_init(2), date_init(3), & - date_init(4),date_init(5),date_init(6)) + Start_time = set_date(date_init(1), date_init(2), date_init(3), & + date_init(4), date_init(5), date_init(6)) else Start_time = real_to_time(0.0) endif - call time_interp_external_init + call time_interp_external_init() if (sum(date) >= 0) then ! In this case, the segment starts at a time fixed by ocean_solo.res - segment_start_time = set_date(date(1),date(2),date(3),date(4),date(5),date(6)) + segment_start_time = set_date(date(1), date(2), date(3), date(4), date(5), date(6)) Time = segment_start_time - call initialize_MOM(Time, Start_time, param_file, dirs, MOM_CSp, restart_CSp, & - segment_start_time, offline_tracer_mode=offline_tracer_mode, & - diag_ptr=diag, tracer_flow_CSp=tracer_flow_CSp) else ! In this case, the segment starts at a time read from the MOM restart file ! or left as Start_time by MOM_initialize. Time = Start_time + endif + + ! Call initialize MOM with an optional Ice Shelf CS which, if present triggers + ! initialization of ice shelf parameters and arrays. + if (sum(date) >= 0) then + call initialize_MOM(Time, Start_time, param_file, dirs, MOM_CSp, restart_CSp, & + segment_start_time, offline_tracer_mode=offline_tracer_mode, & + diag_ptr=diag, tracer_flow_CSp=tracer_flow_CSp, ice_shelf_CSp=ice_shelf_CSp) + else call initialize_MOM(Time, Start_time, param_file, dirs, MOM_CSp, restart_CSp, & offline_tracer_mode=offline_tracer_mode, diag_ptr=diag, & - tracer_flow_CSp=tracer_flow_CSp) + tracer_flow_CSp=tracer_flow_CSp, ice_shelf_CSp=ice_shelf_CSp) endif call get_MOM_state_elements(MOM_CSp, G=grid, GV=GV, US=US, C_p_scaled=fluxes%C_p) Master_Time = Time + use_ice_shelf = associated(ice_shelf_CSp) + + if (use_ice_shelf) then + ! These arrays are not initialized in most solo cases, but are needed + ! when using an ice shelf + call initialize_ice_shelf_fluxes(ice_shelf_CSp, grid, US, fluxes) + call initialize_ice_shelf_forces(ice_shelf_CSp, grid, US, forces) + endif + call callTree_waypoint("done initialize_MOM") @@ -320,16 +328,8 @@ program MOM_main surface_forcing_CSp, tracer_flow_CSp) call callTree_waypoint("done surface_forcing_init") - call get_param(param_file, mod_name, "ICE_SHELF", use_ice_shelf, & - "If true, enables the ice shelf model.", default=.false.) - if (use_ice_shelf) then - ! These arrays are not initialized in most solo cases, but are needed - ! when using an ice shelf - call initialize_ice_shelf(param_file, grid, Time, ice_shelf_CSp, & - diag, forces, fluxes) - endif - call get_param(param_file,mod_name,"USE_WAVES",Use_Waves,& + call get_param(param_file,mod_name, "USE_WAVES", Use_Waves, & "If true, enables surface wave modules.",default=.false.) if (use_waves) then call MOM_wave_interface_init(Time, grid, GV, US, param_file, Waves_CSp, diag) @@ -435,15 +435,14 @@ program MOM_main call diag_mediator_close_registration(diag) ! Write out a time stamp file. - if (calendar_type /= NO_CALENDAR) then - call open_file(unit, 'time_stamp.out', form=ASCII_FILE, action=APPEND_FILE, & - threading=SINGLE_FILE) + if (is_root_pe() .and. (calendar_type /= NO_CALENDAR)) then + call open_ASCII_file(unit, 'time_stamp.out', action=APPEND_FILE) call get_date(Time, date(1), date(2), date(3), date(4), date(5), date(6)) month = month_name(date(2)) - if (is_root_pe()) write(unit,'(6i4,2x,a3)') date, month(1:3) + write(unit,'(6i4,2x,a3)') date, month(1:3) call get_date(Time_end, date(1), date(2), date(3), date(4), date(5), date(6)) month = month_name(date(2)) - if (is_root_pe()) write(unit,'(6i4,2x,a3)') date, month(1:3) + write(unit,'(6i4,2x,a3)') date, month(1:3) call close_file(unit) endif @@ -482,7 +481,7 @@ program MOM_main if (use_ice_shelf) then call shelf_calc_flux(sfc_state, fluxes, Time, dt_forcing, ice_shelf_CSp) - call add_shelf_forces(grid, US, Ice_shelf_CSp, forces) + call add_shelf_forces(grid, US, Ice_shelf_CSp, forces, external_call=.true.) endif fluxes%fluxes_used = .false. fluxes%dt_buoy_accum = US%s_to_T*dt_forcing @@ -621,19 +620,19 @@ program MOM_main if (use_ice_shelf) call ice_shelf_save_restart(ice_shelf_CSp, Time, & dirs%restart_output_dir) ! Write ocean solo restart file. - call open_file(unit, trim(dirs%restart_output_dir)//'ocean_solo.res', nohdrs=.true.) - if (is_root_pe())then - write(unit, '(i6,8x,a)') calendar_type, & - '(Calendar: no_calendar=0, thirty_day_months=1, julian=2, gregorian=3, noleap=4)' - - call get_date(Start_time, yr, mon, day, hr, mins, sec) - write(unit, '(6i6,8x,a)') yr, mon, day, hr, mins, sec, & - 'Model start time: year, month, day, hour, minute, second' - call get_date(Time, yr, mon, day, hr, mins, sec) - write(unit, '(6i6,8x,a)') yr, mon, day, hr, mins, sec, & - 'Current model time: year, month, day, hour, minute, second' + if (is_root_pe()) then + call open_ASCII_file(unit, trim(dirs%restart_output_dir)//'ocean_solo.res') + write(unit, '(i6,8x,a)') calendar_type, & + '(Calendar: no_calendar=0, thirty_day_months=1, julian=2, gregorian=3, noleap=4)' + + call get_date(Start_time, yr, mon, day, hr, mins, sec) + write(unit, '(6i6,8x,a)') yr, mon, day, hr, mins, sec, & + 'Model start time: year, month, day, hour, minute, second' + call get_date(Time, yr, mon, day, hr, mins, sec) + write(unit, '(6i6,8x,a)') yr, mon, day, hr, mins, sec, & + 'Current model time: year, month, day, hour, minute, second' + call close_file(unit) endif - call close_file(unit) endif if (is_root_pe()) then @@ -651,6 +650,7 @@ program MOM_main endif call callTree_waypoint("End MOM_main") + if (use_ice_shelf) call ice_shelf_end(ice_shelf_CSp) call diag_mediator_end(Time, diag, end_diag_manager=.true.) if (cpu_steps > 0) call write_cputime(Time, ns-1, write_CPU_CSp, call_end=.true.) call cpu_clock_end(termClock) @@ -658,6 +658,5 @@ program MOM_main call io_infra_end ; call MOM_infra_end call MOM_end(MOM_CSp) - if (use_ice_shelf) call ice_shelf_end(ice_shelf_CSp) end program MOM_main diff --git a/config_src/solo_driver/MOM_surface_forcing.F90 b/config_src/drivers/solo_driver/MOM_surface_forcing.F90 similarity index 93% rename from config_src/solo_driver/MOM_surface_forcing.F90 rename to config_src/drivers/solo_driver/MOM_surface_forcing.F90 index 3d8b398516..85c363b897 100644 --- a/config_src/solo_driver/MOM_surface_forcing.F90 +++ b/config_src/drivers/solo_driver/MOM_surface_forcing.F90 @@ -1,5 +1,5 @@ !> Functions that calculate the surface wind stresses and fluxes of buoyancy -!! or temperature/salinity andfresh water, in ocean-only (solo) mode. +!! or temperature/salinity and fresh water, in ocean-only (solo) mode. !! !! These functions are called every time step, even if the wind stresses !! or buoyancy fluxes are constant in time - in that case these routines @@ -12,6 +12,7 @@ module MOM_surface_forcing use MOM_constants, only : hlv, hlf use MOM_cpu_clock, only : cpu_clock_id, cpu_clock_begin, cpu_clock_end use MOM_cpu_clock, only : CLOCK_MODULE +use MOM_data_override, only : data_override_init, data_override use MOM_diag_mediator, only : post_data, query_averaging_enabled use MOM_diag_mediator, only : diag_ctrl, safe_alloc_ptr use MOM_domains, only : pass_var, pass_vector, AGRID, To_South, To_West, To_All @@ -54,7 +55,6 @@ module MOM_surface_forcing use BFB_surface_forcing, only : BFB_surface_forcing_init, BFB_surface_forcing_CS use dumbbell_surface_forcing, only : dumbbell_surface_forcing_init, dumbbell_surface_forcing_CS use dumbbell_surface_forcing, only : dumbbell_buoyancy_forcing -use data_override_mod, only : data_override_init, data_override implicit none ; private @@ -151,7 +151,7 @@ module MOM_surface_forcing character(len=200) :: runoff_file = '' !< The file from which the runoff is read character(len=200) :: longwaveup_file = '' !< The file from which the upward longwave heat flux is read - character(len=200) :: shortwaveup_file = '' !< The file from which the upward shorwave heat flux is read + character(len=200) :: shortwaveup_file = '' !< The file from which the upward shortwave heat flux is read character(len=200) :: SSTrestore_file = '' !< The file from which to read the sea surface !! temperature to restore toward @@ -161,7 +161,7 @@ module MOM_surface_forcing character(len=80) :: stress_x_var = '' !< X-windstress variable name in the input file character(len=80) :: stress_y_var = '' !< Y-windstress variable name in the input file character(len=80) :: ustar_var = '' !< ustar variable name in the input file - character(len=80) :: LW_var = '' !< lonngwave heat flux variable name in the input file + character(len=80) :: LW_var = '' !< longwave heat flux variable name in the input file character(len=80) :: SW_var = '' !< shortwave heat flux variable name in the input file character(len=80) :: latent_var = '' !< latent heat flux variable name in the input file character(len=80) :: sens_var = '' !< sensible heat flux variable name in the input file @@ -170,7 +170,7 @@ module MOM_surface_forcing character(len=80) :: snow_var = '' !< snowfall variable name in the input file character(len=80) :: lrunoff_var = '' !< liquid runoff variable name in the input file character(len=80) :: frunoff_var = '' !< frozen runoff variable name in the input file - character(len=80) :: SST_restore_var = '' !< target sea surface temeperature variable name in the input file + character(len=80) :: SST_restore_var = '' !< target sea surface temperature variable name in the input file character(len=80) :: SSS_restore_var = '' !< target sea surface salinity variable name in the input file ! These variables give the number of time levels in the various forcing files. @@ -228,7 +228,7 @@ subroutine set_forcing(sfc_state, forces, fluxes, day_start, day_interval, G, US type(time_type), intent(in) :: day_interval !< Length of time over which these fluxes applied type(ocean_grid_type), intent(inout) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables real :: dt ! length of time over which fluxes applied [s] @@ -243,7 +243,7 @@ subroutine set_forcing(sfc_state, forces, fluxes, day_start, day_interval, G, US dt = time_type_to_real(day_interval) if (CS%first_call_set_forcing) then - ! Allocate memory for the mechanical and thermodyanmic forcing fields. + ! Allocate memory for the mechanical and thermodynamic forcing fields. call allocate_mech_forcing(G, forces, stress=.true., ustar=.true., press=.true.) call allocate_forcing_type(G, fluxes, ustar=.true., fix_accum_bug=CS%fix_ustar_gustless_bug) @@ -376,7 +376,7 @@ subroutine wind_forcing_const(sfc_state, forces, tau_x0, tau_y0, day, G, US, CS) type(time_type), intent(in) :: day !< The time of the fluxes type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables real :: Pa_conversion ! A unit conversion factor from Pa to the internal units [R Z L T-2 Pa-1 ~> 1] @@ -421,7 +421,7 @@ subroutine wind_forcing_2gyre(sfc_state, forces, day, G, US, CS) type(time_type), intent(in) :: day !< The time of the fluxes type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables real :: PI @@ -455,7 +455,7 @@ subroutine wind_forcing_1gyre(sfc_state, forces, day, G, US, CS) type(time_type), intent(in) :: day !< The time of the fluxes type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables real :: PI @@ -488,7 +488,7 @@ subroutine wind_forcing_gyres(sfc_state, forces, day, G, US, CS) type(time_type), intent(in) :: day !< The time of the fluxes type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables real :: PI, y, I_rho @@ -541,7 +541,7 @@ subroutine Neverworld_wind_forcing(sfc_state, forces, day, G, US, CS) type(time_type), intent(in) :: day !< Time used for determining the fluxes. type(ocean_grid_type), intent(inout) :: G !< Grid structure. type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables integer :: i, j, is, ie, js, je, Isq, Ieq, Jsq, Jeq @@ -606,7 +606,7 @@ subroutine scurve_wind_forcing(sfc_state, forces, day, G, US, CS) type(time_type), intent(in) :: day !< Time used for determining the fluxes. type(ocean_grid_type), intent(inout) :: G !< Grid structure. type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables integer :: i, j, kseg @@ -671,16 +671,16 @@ subroutine wind_forcing_from_file(sfc_state, forces, day, G, US, CS) type(time_type), intent(in) :: day !< The time of the fluxes type(ocean_grid_type), intent(inout) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables character(len=200) :: filename ! The name of the input file. - real :: temp_x(SZI_(G),SZJ_(G)) ! Pseudo-zonal and psuedo-meridional + real :: temp_x(SZI_(G),SZJ_(G)) ! Pseudo-zonal and pseudo-meridional real :: temp_y(SZI_(G),SZJ_(G)) ! wind stresses at h-points [R L Z T-1 ~> Pa]. real :: Pa_conversion ! A unit conversion factor from Pa to the internal wind stress ! units [R Z L T-2 Pa-1 ~> 1] integer :: time_lev_daily ! The time levels to read for fields with - integer :: time_lev_monthly ! daily and montly cycles. + integer :: time_lev_monthly ! daily and monthly cycles. integer :: time_lev ! The time level that is used for a field. integer :: days, seconds integer :: i, j, is, ie, js, je, Isq, Ieq, Jsq, Jeq @@ -787,13 +787,13 @@ subroutine wind_forcing_from_file(sfc_state, forces, day, G, US, CS) call pass_vector(forces%taux, forces%tauy, G%Domain, To_All) if (.not.read_Ustar) then if (CS%read_gust_2d) then - do j=js, je ; do i=is, ie + do j=js,je ; do i=is,ie forces%ustar(i,j) = sqrt((CS%gust(i,j) + & sqrt(0.5*((forces%tauy(i,j-1)**2 + forces%tauy(i,j)**2) + & (forces%taux(i-1,j)**2 + forces%taux(i,j)**2))) ) * US%L_to_Z / CS%Rho0 ) enddo ; enddo else - do j=js, je ; do i=is, ie + do j=js,je ; do i=is,ie forces%ustar(i,j) = sqrt(US%L_to_Z * ( (CS%gust_const/CS%Rho0) + & sqrt(0.5*((forces%tauy(i,j-1)**2 + forces%tauy(i,j)**2) + & (forces%taux(i-1,j)**2 + forces%taux(i,j)**2)))/CS%Rho0)) @@ -826,68 +826,58 @@ subroutine wind_forcing_by_data_override(sfc_state, forces, day, G, US, CS) type(time_type), intent(in) :: day !< The time of the fluxes type(ocean_grid_type), intent(inout) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables - real :: temp_x(SZI_(G),SZJ_(G)) ! Pseudo-zonal and psuedo-meridional - real :: temp_y(SZI_(G),SZJ_(G)) ! wind stresses at h-points [Pa]. - real :: temp_ustar(SZI_(G),SZJ_(G)) ! ustar [m s-1] (not rescaled). + real :: temp_x(SZI_(G),SZJ_(G)) ! Pseudo-zonal wind stresses at h-points [R Z L T-2 ~> Pa]. + real :: temp_y(SZI_(G),SZJ_(G)) ! Psuedo-meridional wind stresses at h-points [R Z L T-2 ~> Pa]. real :: Pa_conversion ! A unit conversion factor from Pa to the internal units [R Z L T-2 Pa-1 ~> 1] - integer :: i, j, is_in, ie_in, js_in, je_in - logical :: read_uStar + integer :: i, j call callTree_enter("wind_forcing_by_data_override, MOM_surface_forcing.F90") if (.not.CS%dataOverrideIsInitialized) then call allocate_mech_forcing(G, forces, stress=.true., ustar=.true., press=.true.) - call data_override_init(Ocean_domain_in=G%Domain%mpp_domain) + call data_override_init(G%Domain) CS%dataOverrideIsInitialized = .True. endif - is_in = G%isc - G%isd + 1 ; ie_in = G%iec - G%isd + 1 - js_in = G%jsc - G%jsd + 1 ; je_in = G%jec - G%jsd + 1 Pa_conversion = US%kg_m3_to_R*US%m_s_to_L_T**2*US%L_to_Z temp_x(:,:) = 0.0 ; temp_y(:,:) = 0.0 - call data_override('OCN', 'taux', temp_x, day, is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) - call data_override('OCN', 'tauy', temp_y, day, is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) + ! CS%wind_scale is ignored here because it is not set in this mode. + call data_override(G%Domain, 'taux', temp_x, day, scale=Pa_conversion) + call data_override(G%Domain, 'tauy', temp_y, day, scale=Pa_conversion) call pass_vector(temp_x, temp_y, G%Domain, To_All, AGRID) - ! Ignore CS%wind_scale when using data_override ????? do j=G%jsc,G%jec ; do I=G%isc-1,G%IecB - forces%taux(I,j) = Pa_conversion * 0.5 * (temp_x(i,j) + temp_x(i+1,j)) + forces%taux(I,j) = 0.5 * (temp_x(i,j) + temp_x(i+1,j)) enddo ; enddo do J=G%jsc-1,G%JecB ; do i=G%isc,G%iec - forces%tauy(i,J) = Pa_conversion * 0.5 * (temp_y(i,j) + temp_y(i,j+1)) + forces%tauy(i,J) = 0.5 * (temp_y(i,j) + temp_y(i,j+1)) enddo ; enddo - read_Ustar = (len_trim(CS%ustar_var) > 0) ! Need better control higher up ???? - if (read_Ustar) then - do j=G%jsc,G%jec ; do i=G%isc,G%iec ; temp_ustar(i,j) = US%Z_to_m*US%s_to_T*forces%ustar(i,j) ; enddo ; enddo - call data_override('OCN', 'ustar', temp_ustar, day, is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) - do j=G%jsc,G%jec ; do i=G%isc,G%iec ; forces%ustar(i,j) = US%m_to_Z*US%T_to_s*temp_ustar(i,j) ; enddo ; enddo + if (CS%read_gust_2d) then + call data_override(G%Domain, 'gust', CS%gust, day, scale=Pa_conversion) + do j=G%jsc,G%jec ; do i=G%isc,G%iec + forces%ustar(i,j) = sqrt((sqrt(temp_x(i,j)**2 + temp_y(i,j)**2) + & + CS%gust(i,j)) * US%L_to_Z / CS%Rho0) + enddo ; enddo else - if (CS%read_gust_2d) then - call data_override('OCN', 'gust', CS%gust, day, is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) - do j=G%jsc,G%jec ; do i=G%isc,G%iec - forces%ustar(i,j) = sqrt((Pa_conversion * sqrt(temp_x(i,j)*temp_x(i,j) + & - temp_y(i,j)*temp_y(i,j)) + CS%gust(i,j)) * US%L_to_Z / CS%Rho0) - enddo ; enddo - else - do j=G%jsc,G%jec ; do i=G%isc,G%iec - forces%ustar(i,j) = sqrt(US%L_to_Z * (Pa_conversion*sqrt(temp_x(i,j)*temp_x(i,j) + & - temp_y(i,j)*temp_y(i,j))/CS%Rho0 + CS%gust_const/CS%Rho0 )) - enddo ; enddo - endif + do j=G%jsc,G%jec ; do i=G%isc,G%iec + forces%ustar(i,j) = sqrt(US%L_to_Z * (sqrt(temp_x(i,j)**2 + temp_y(i,j)**2)/CS%Rho0 + & + CS%gust_const/CS%Rho0)) + enddo ; enddo endif + ! Give the data override the option to modify the newly calculated forces%ustar. + call data_override(G%Domain, 'ustar', forces%ustar, day, scale=US%m_to_Z*US%T_to_s) call pass_vector(forces%taux, forces%tauy, G%Domain, To_All) -! call pass_var(forces%ustar, G%Domain, To_All) Not needed ????? call callTree_leave("wind_forcing_by_data_override") end subroutine wind_forcing_by_data_override -!> Specifies zero surface bouyancy fluxes from input files. +!> Specifies zero surface buoyancy fluxes from input files. subroutine buoyancy_forcing_from_files(sfc_state, fluxes, day, dt, G, US, CS) type(surface), intent(inout) :: sfc_state !< A structure containing fields that !! describe the surface state of the ocean. @@ -897,7 +887,7 @@ subroutine buoyancy_forcing_from_files(sfc_state, fluxes, day, dt, G, US, CS) !! the fluxes apply [s] type(ocean_grid_type), intent(inout) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables real, dimension(SZI_(G),SZJ_(G)) :: & @@ -1165,7 +1155,7 @@ subroutine buoyancy_forcing_from_files(sfc_state, fluxes, day, dt, G, US, CS) call callTree_leave("buoyancy_forcing_from_files") end subroutine buoyancy_forcing_from_files -!> Specifies zero surface bouyancy fluxes from data over-ride. +!> Specifies zero surface buoyancy fluxes from data over-ride. subroutine buoyancy_forcing_from_data_override(sfc_state, fluxes, day, dt, G, US, CS) type(surface), intent(inout) :: sfc_state !< A structure containing fields that !! describe the surface state of the ocean. @@ -1175,7 +1165,7 @@ subroutine buoyancy_forcing_from_data_override(sfc_state, fluxes, day, dt, G, US !! the fluxes apply [s] type(ocean_grid_type), intent(inout) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables real, dimension(SZI_(G),SZJ_(G)) :: & @@ -1190,14 +1180,7 @@ subroutine buoyancy_forcing_from_data_override(sfc_state, fluxes, day, dt, G, US real :: kg_m2_s_conversion ! A combination of unit conversion factors for rescaling ! mass fluxes [R Z s m2 kg-1 T-1 ~> 1]. real :: rhoXcp ! The mean density times the heat capacity [Q R degC-1 ~> J m-3 degC-1]. - - integer :: time_lev_daily ! The time levels to read for fields with - integer :: time_lev_monthly ! daily and montly cycles. - integer :: itime_lev ! The time level that is used for a field. - - integer :: days, seconds integer :: i, j, is, ie, js, je, isd, ied, jsd, jed - integer :: is_in, ie_in, js_in, je_in call callTree_enter("buoyancy_forcing_from_data_override, MOM_surface_forcing.F90") @@ -1208,75 +1191,32 @@ subroutine buoyancy_forcing_from_data_override(sfc_state, fluxes, day, dt, G, US if (CS%use_temperature) rhoXcp = CS%Rho0 * fluxes%C_p if (.not.CS%dataOverrideIsInitialized) then - call data_override_init(Ocean_domain_in=G%Domain%mpp_domain) + call data_override_init(G%Domain) CS%dataOverrideIsInitialized = .True. endif - is_in = G%isc - G%isd + 1 - ie_in = G%iec - G%isd + 1 - js_in = G%jsc - G%jsd + 1 - je_in = G%jec - G%jsd + 1 + call data_override(G%Domain, 'lw', fluxes%lw, day, scale=US%W_m2_to_QRZ_T) + call data_override(G%Domain, 'sw', fluxes%sw, day, scale=US%W_m2_to_QRZ_T) - call data_override('OCN', 'lw', fluxes%lw(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) ! scale=US%W_m2_to_QRZ_T - if (US%QRZ_T_to_W_m2 /= 1.0) then ; do j=js,je ; do i=is,ie - fluxes%lw(i,j) = fluxes%lw(i,j) * US%W_m2_to_QRZ_T - enddo ; enddo ; endif - call data_override('OCN', 'evap', fluxes%evap(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) + ! The normal MOM6 sign conventions are that fluxes%evap and fluxes%sens are positive into the + ! ocean but evap and sens are normally positive quantities in the files. + call data_override(G%Domain, 'evap', fluxes%evap, day, scale=-US%kg_m2s_to_RZ_T) + call data_override(G%Domain, 'sens', fluxes%sens, day, scale=-US%W_m2_to_QRZ_T) - ! note the sign convention do j=js,je ; do i=is,ie - ! The normal convention is that fluxes%evap positive into the ocean - ! but evap is normally a positive quantity in the files - ! This conversion is dangerous because it is not clear whether the data files have been read! - fluxes%evap(i,j) = -kg_m2_s_conversion*fluxes%evap(i,j) fluxes%latent(i,j) = CS%latent_heat_vapor*fluxes%evap(i,j) fluxes%latent_evap_diag(i,j) = fluxes%latent(i,j) enddo ; enddo - call data_override('OCN', 'sens', fluxes%sens(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) - - ! note the sign convention - do j=js,je ; do i=is,ie - fluxes%sens(i,j) = -US%W_m2_to_QRZ_T * fluxes%sens(i,j) ! Normal convention is positive into the ocean - ! but sensible is normally a positive quantity in the files - enddo ; enddo - - call data_override('OCN', 'sw', fluxes%sw(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) ! scale=US%W_m2_to_QRZ_T - if (US%QRZ_T_to_W_m2 /= 1.0) then ; do j=js,je ; do i=is,ie - fluxes%sw(i,j) = fluxes%sw(i,j) * US%W_m2_to_QRZ_T - enddo ; enddo ; endif - - call data_override('OCN', 'snow', fluxes%fprec(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) ! scale=kg_m2_s_conversion - - call data_override('OCN', 'rain', fluxes%lprec(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) ! scale=kg_m2_s_conversion - - call data_override('OCN', 'runoff', fluxes%lrunoff(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) ! scale=kg_m2_s_conversion - - call data_override('OCN', 'calving', fluxes%frunoff(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) ! scale=kg_m2_s_conversion - - if (kg_m2_s_conversion /= 1.0) then ; do j=js,je ; do i=is,ie - fluxes%lprec(i,j) = fluxes%lprec(i,j) * kg_m2_s_conversion - fluxes%fprec(i,j) = fluxes%fprec(i,j) * kg_m2_s_conversion - fluxes%lrunoff(i,j) = fluxes%lrunoff(i,j) * kg_m2_s_conversion - fluxes%frunoff(i,j) = fluxes%frunoff(i,j) * kg_m2_s_conversion - enddo ; enddo ; endif + call data_override(G%Domain, 'snow', fluxes%fprec, day, scale=kg_m2_s_conversion) + call data_override(G%Domain, 'rain', fluxes%lprec, day, scale=kg_m2_s_conversion) + call data_override(G%Domain, 'runoff', fluxes%lrunoff, day, scale=kg_m2_s_conversion) + call data_override(G%Domain, 'calving', fluxes%frunoff, day, scale=kg_m2_s_conversion) ! Read the SST and SSS fields for damping. if (CS%restorebuoy) then !#CTRL# .or. associated(CS%ctrl_forcing_CSp)) then - call data_override('OCN', 'SST_restore', CS%T_restore(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) - - call data_override('OCN', 'SSS_restore', CS%S_restore(:,:), day, & - is_in=is_in, ie_in=ie_in, js_in=js_in, je_in=je_in) - + call data_override(G%Domain, 'SST_restore', CS%T_restore, day) + call data_override(G%Domain, 'SSS_restore', CS%S_restore, day) endif ! restoring boundary fluxes @@ -1334,7 +1274,6 @@ subroutine buoyancy_forcing_from_data_override(sfc_state, fluxes, day, dt, G, US fluxes%latent_frunoff_diag(i,j) = -fluxes%frunoff(i,j)*CS%latent_heat_fusion enddo ; enddo - !#CTRL# if (associated(CS%ctrl_forcing_CSp)) then !#CTRL# do j=js,je ; do i=is,ie !#CTRL# SST_anom(i,j) = sfc_state%SST(i,j) - CS%T_Restore(i,j) @@ -1348,7 +1287,7 @@ subroutine buoyancy_forcing_from_data_override(sfc_state, fluxes, day, dt, G, US call callTree_leave("buoyancy_forcing_from_data_override") end subroutine buoyancy_forcing_from_data_override -!> This subroutine specifies zero surface bouyancy fluxes +!> This subroutine specifies zero surface buoyancy fluxes subroutine buoyancy_forcing_zero(sfc_state, fluxes, day, dt, G, CS) type(surface), intent(inout) :: sfc_state !< A structure containing fields that !! describe the surface state of the ocean. @@ -1357,7 +1296,7 @@ subroutine buoyancy_forcing_zero(sfc_state, fluxes, day, dt, G, CS) real, intent(in) :: dt !< The amount of time over which !! the fluxes apply [s] type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables integer :: i, j, is, ie, js, je @@ -1401,7 +1340,7 @@ subroutine buoyancy_forcing_const(sfc_state, fluxes, day, dt, G, US, CS) !! the fluxes apply [s] type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables integer :: i, j, is, ie, js, je @@ -1444,7 +1383,7 @@ subroutine buoyancy_forcing_linear(sfc_state, fluxes, day, dt, G, US, CS) !! the fluxes apply [s] type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call ! Local variables real :: y, T_restore, S_restore @@ -1518,7 +1457,7 @@ end subroutine buoyancy_forcing_linear !> Save a restart file for the forcing fields subroutine forcing_save_restart(CS, G, Time, directory, time_stamped, & filename_suffix) - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call type(ocean_grid_type), intent(inout) :: G !< The ocean's grid structure type(time_type), intent(in) :: Time !< model time at this call; needed for mpp_write calls @@ -1526,7 +1465,7 @@ subroutine forcing_save_restart(CS, G, Time, directory, time_stamped, & logical, optional, intent(in) :: time_stamped !< If true, the restart file names !! include a unique time stamp; the default is false. character(len=*), optional, intent(in) :: filename_suffix !< optional suffix (e.g., a time-stamp) - !! to append to the restart fname + !! to append to the restart file name if (.not.associated(CS)) return if (.not.associated(CS%restart_CSp)) return @@ -1542,7 +1481,7 @@ subroutine surface_forcing_init(Time, G, US, param_file, diag, CS, tracer_flow_C type(unit_scale_type), intent(in) :: US !< A dimensional unit scaling type type(param_file_type), intent(in) :: param_file !< A structure to parse for run-time parameters type(diag_ctrl), target, intent(inout) :: diag !< structure used to regulate diagnostic output - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call type(tracer_flow_control_CS), pointer :: tracer_flow_CSp !< Forcing for tracers? @@ -1593,9 +1532,9 @@ subroutine surface_forcing_init(Time, G, US, param_file, diag, CS, tracer_flow_C "initialization of the model.", default=.true.) call get_param(param_file, mdl, "BUOY_CONFIG", CS%buoy_config, & - "The character string that indicates how buoyancy forcing "//& - "is specified. Valid options include (file), (zero), "//& - "(linear), (USER), (BFB) and (NONE).", default="zero") + "The character string that indicates how buoyancy forcing is specified. Valid "//& + "options include (file), (data_override), (zero), (const), (linear), (MESO), "//& + "(SCM_CVmix_tests), (BFB), (dumbbell), (USER) and (NONE).", default="zero") if (trim(CS%buoy_config) == "file") then call get_param(param_file, mdl, "ARCHAIC_OMIP_FORCING_FILE", CS%archaic_OMIP_file, & "If true, use the forcing variable decomposition from "//& @@ -1735,9 +1674,10 @@ subroutine surface_forcing_init(Time, G, US, param_file, diag, CS, tracer_flow_C units='W/m2', scale=US%W_m2_to_QRZ_T, fail_if_missing=.true.) endif call get_param(param_file, mdl, "WIND_CONFIG", CS%wind_config, & - "The character string that indicates how wind forcing "//& - "is specified. Valid options include (file), (2gyre), "//& - "(1gyre), (gyres), (zero), and (USER).", default="zero") + "The character string that indicates how wind forcing is specified. Valid "//& + "options include (file), (data_override), (2gyre), (1gyre), (gyres), (zero), "//& + "(const), (Neverworld), (scurves), (ideal_hurr), (SCM_ideal_hurr), "//& + "(SCM_CVmix_tests) and (USER).", default="zero") if (trim(CS%wind_config) == "file") then call get_param(param_file, mdl, "WIND_FILE", CS%wind_file, & "The file in which the wind stresses are found in "//& @@ -1964,7 +1904,7 @@ end subroutine surface_forcing_init !> Deallocate memory associated with the surface forcing module subroutine surface_forcing_end(CS, fluxes) - type(surface_forcing_CS), pointer :: CS !< pointer to control struct returned by + type(surface_forcing_CS), pointer :: CS !< pointer to control structure returned by !! a previous surface_forcing_init call type(forcing), optional, intent(inout) :: fluxes !< A structure containing thermodynamic forcing fields ! Arguments: CS - A pointer to the control structure returned by a previous diff --git a/config_src/solo_driver/atmos_ocean_fluxes.F90 b/config_src/drivers/solo_driver/atmos_ocean_fluxes.F90 similarity index 100% rename from config_src/solo_driver/atmos_ocean_fluxes.F90 rename to config_src/drivers/solo_driver/atmos_ocean_fluxes.F90 diff --git a/config_src/solo_driver/user_surface_forcing.F90 b/config_src/drivers/solo_driver/user_surface_forcing.F90 similarity index 99% rename from config_src/solo_driver/user_surface_forcing.F90 rename to config_src/drivers/solo_driver/user_surface_forcing.F90 index f5372e07d2..940bcd04b4 100644 --- a/config_src/solo_driver/user_surface_forcing.F90 +++ b/config_src/drivers/solo_driver/user_surface_forcing.F90 @@ -11,7 +11,6 @@ module user_surface_forcing use MOM_forcing_type, only : forcing, mech_forcing use MOM_forcing_type, only : allocate_forcing_type, allocate_mech_forcing use MOM_grid, only : ocean_grid_type -use MOM_io, only : file_exists, read_data use MOM_time_manager, only : time_type, operator(+), operator(/) use MOM_tracer_flow_control, only : call_tracer_set_forcing use MOM_tracer_flow_control, only : tracer_flow_control_CS @@ -89,7 +88,7 @@ subroutine USER_wind_forcing(sfc_state, forces, day, G, US, CS) ! is always positive. if (associated(forces%ustar)) then ; do j=js,je ; do i=is,ie ! This expression can be changed if desired, but need not be. - forces%ustar(i,j) = G%mask2dT(i,j) * sqrt((CS%gust_const + & + forces%ustar(i,j) = G%mask2dT(i,j) * sqrt((CS%gust_const + & sqrt(0.5*(forces%taux(I-1,j)**2 + forces%taux(I,j)**2) + & 0.5*(forces%tauy(i,J-1)**2 + forces%tauy(i,J)**2))) * (US%L_to_Z/CS%Rho0)) enddo ; enddo ; endif diff --git a/config_src/unit_drivers/MOM_sum_driver.F90 b/config_src/drivers/unit_drivers/MOM_sum_driver.F90 similarity index 90% rename from config_src/unit_drivers/MOM_sum_driver.F90 rename to config_src/drivers/unit_drivers/MOM_sum_driver.F90 index 5673b201ee..7e3c6d45b4 100644 --- a/config_src/unit_drivers/MOM_sum_driver.F90 +++ b/config_src/drivers/unit_drivers/MOM_sum_driver.F90 @@ -18,8 +18,6 @@ program MOM_main use MOM_coms, only : EFP_type, operator(+), operator(-), assignment(=), EFP_to_real, real_to_EFP use MOM_cpu_clock, only : cpu_clock_id, cpu_clock_begin, cpu_clock_end use MOM_cpu_clock, only : CLOCK_COMPONENT -! use MOM_diag_mediator, only : diag_mediator_end, diag_mediator_init -! use MOM_diag_mediator, only : diag_mediator_close_registration use MOM_domains, only : MOM_domains_init, MOM_infra_init, MOM_infra_end use MOM_error_handler, only : MOM_error, MOM_mesg, WARNING, FATAL, is_root_pe use MOM_error_handler, only : MOM_set_verbosity @@ -39,11 +37,10 @@ program MOM_main type(param_file_type) :: param_file ! The structure indicating the file(s) ! containing all run-time parameters. - real :: max_depth + real :: max_depth ! The maximum ocean depth [m] integer :: verbosity integer :: num_sums - integer :: n, i, j, is, ie, js, je, nz - integer :: isd, ied, jsd, jed, IsdB, IedB, JsdB, JedB + integer :: n, i, j, is, ie, js, je, isd, ied, jsd, jed integer :: unit, io_status, ierr logical :: unit_in_use @@ -55,8 +52,8 @@ program MOM_main !----------------------------------------------------------------------- character(len=4), parameter :: vers_num = 'v2.0' -! This include declares and sets the variable "version". -#include "version_variable.h" + ! This include declares and sets the variable "version". +# include "version_variable.h" character(len=40) :: mdl = "MOM_main (MOM_sum_driver)" ! This module's name. character(len=200) :: mesg @@ -85,9 +82,8 @@ program MOM_main ! call diag_mediator_init(param_file) call MOM_grid_init(grid, param_file) - is = grid%isc ; ie = grid%iec ; js = grid%jsc ; je = grid%jec ; nz = grid%ke + is = grid%isc ; ie = grid%iec ; js = grid%jsc ; je = grid%jec isd = grid%isd ; ied = grid%ied ; jsd = grid%jsd ; jed = grid%jed - IsdB = grid%IsdB ; IedB = grid%IedB ; JsdB = grid%JsdB ; JedB = grid%JedB ! Read all relevant parameters and write them to the model log. call log_version(param_file, "MOM", version, "") @@ -165,27 +161,25 @@ program MOM_main contains +!> This subroutine sets up the benchmark test case topography for debugging subroutine benchmark_init_topog_local(D, G, param_file, max_depth) type(ocean_grid_type), intent(in) :: G !< The ocean's grid structure real, dimension(SZI_(G),SZJ_(G)), intent(out) :: D !< The ocean bottom depth in m type(param_file_type), intent(in) :: param_file !< A structure to parse for run-time parameters - real, intent(in) :: max_depth !< The maximum ocean depth in m + real, intent(in) :: max_depth !< The maximum ocean depth [m] -! This subroutine sets up the benchmark test case topography real :: min_depth ! The minimum ocean depth in m. real :: PI ! 3.1415926... calculated as 4*atan(1) real :: D0 ! A constant to make the maximum ! ! basin depth MAXIMUM_DEPTH. ! real :: x, y -! This include declares and sets the variable "version". -#include "version_variable.h" - character(len=40) :: mdl = "benchmark_initialize_topography" ! This subroutine's name. + ! This include declares and sets the variable "version". +# include "version_variable.h" + character(len=40) :: mdl = "benchmark_init_topog_local" ! This subroutine's name. integer :: i, j, is, ie, js, je, isd, ied, jsd, jed is = G%isc ; ie = G%iec ; js = G%jsc ; je = G%jec isd = G%isd ; ied = G%ied ; jsd = G%jsd ; jed = G%jed - call MOM_mesg(" benchmark_initialization.F90, benchmark_initialize_topography: setting topography", 5) - call log_version(param_file, mdl, version) call get_param(param_file, mdl, "MINIMUM_DEPTH", min_depth, & "The minimum depth of the ocean.", units="m", default=0.0) diff --git a/config_src/external/GFDL_ocean_BGC/FMS_coupler_util.F90 b/config_src/external/GFDL_ocean_BGC/FMS_coupler_util.F90 index f3d63dd061..e50f2ccf0b 100644 --- a/config_src/external/GFDL_ocean_BGC/FMS_coupler_util.F90 +++ b/config_src/external/GFDL_ocean_BGC/FMS_coupler_util.F90 @@ -12,11 +12,15 @@ module FMS_coupler_util subroutine extract_coupler_values(BC_struc, BC_index, BC_element, array_out, ilb, jlb, & is, ie, js, je, conversion) real, dimension(ilb:,jlb:),intent(out) :: array_out !< The array being filled with the input values - integer, intent(in) :: ilb, jlb !< Lower bounds + integer, intent(in) :: ilb !< Lower bounds + integer, intent(in) :: jlb !< Lower bounds type(coupler_2d_bc_type), intent(in) :: BC_struc !< The type from which the data is being extracted integer, intent(in) :: BC_index !< The boundary condition number being extracted integer, intent(in) :: BC_element !< The element of the boundary condition being extracted - integer, optional, intent(in) :: is, ie, js, je !< The i- and j- limits of array_out to be filled + integer, optional, intent(in) :: is !< The i- limits of array_out to be filled + integer, optional, intent(in) :: ie !< The i- limits of array_out to be filled + integer, optional, intent(in) :: js !< The j- limits of array_out to be filled + integer, optional, intent(in) :: je !< The j- limits of array_out to be filled real, optional, intent(in) :: conversion !< A number that every element is multiplied by end subroutine extract_coupler_values @@ -24,11 +28,15 @@ end subroutine extract_coupler_values subroutine set_coupler_values(array_in, BC_struc, BC_index, BC_element, ilb, jlb,& is, ie, js, je, conversion) real, dimension(ilb:,jlb:), intent(in) :: array_in !< The array containing the values to load into the BC - integer, intent(in) :: ilb, jlb !< Lower bounds + integer, intent(in) :: ilb !< Lower bounds + integer, intent(in) :: jlb !< Lower bounds type(coupler_2d_bc_type), intent(inout) :: BC_struc !< The type into which the data is being loaded integer, intent(in) :: BC_index !< The boundary condition number being set integer, intent(in) :: BC_element !< The element of the boundary condition being set - integer, optional, intent(in) :: is, ie, js, je !< The i- and j- limits of array_out to be filled + integer, optional, intent(in) :: is !< The i- limits of array_out to be filled + integer, optional, intent(in) :: ie !< The i- limits of array_out to be filled + integer, optional, intent(in) :: js !< The j- limits of array_out to be filled + integer, optional, intent(in) :: je !< The j- limits of array_out to be filled real, optional, intent(in) :: conversion !< A number that every element is multiplied by end subroutine set_coupler_values diff --git a/config_src/external/GFDL_ocean_BGC/generic_tracer.F90 b/config_src/external/GFDL_ocean_BGC/generic_tracer.F90 index bfbc846af9..4d2e4183f7 100644 --- a/config_src/external/GFDL_ocean_BGC/generic_tracer.F90 +++ b/config_src/external/GFDL_ocean_BGC/generic_tracer.F90 @@ -33,7 +33,17 @@ end subroutine generic_tracer_register !> Initialize generic tracers subroutine generic_tracer_init(isc,iec,jsc,jec,isd,ied,jsd,jed,nk,ntau,axes,grid_tmask,grid_kmt,init_time) - integer, intent(in) :: isc,iec,jsc,jec,isd,ied,jsd,jed,nk,ntau,axes(3) !< Domain boundaries and axes + integer, intent(in) :: isc !< Computation start index in i direction + integer, intent(in) :: iec !< Computation end index in i direction + integer, intent(in) :: jsc !< Computation start index in j direction + integer, intent(in) :: jec !< Computation end index in j direction + integer, intent(in) :: isd !< Data start index in i direction + integer, intent(in) :: ied !< Data end index in i direction + integer, intent(in) :: jsd !< Data start index in j direction + integer, intent(in) :: jed !< Data end index in j direction + integer, intent(in) :: nk !< Number of levels in k direction + integer, intent(in) :: ntau !< Unknown + integer, intent(in) :: axes(3) !< Domain axes? type(time_type), intent(in) :: init_time !< Time real, dimension(:,:,:),target, intent(in) :: grid_tmask !< Mask integer, dimension(:,:) , intent(in) :: grid_kmt !< Number of wet cells in column @@ -61,7 +71,7 @@ subroutine generic_tracer_source(Temp,Salt,rho_dzt,dzt,hblt_depth,ilb,jlb,tau,dt frunoff,grid_ht, current_wave_stress, sosga) real, dimension(ilb:,jlb:,:), intent(in) :: Temp !< Potential temperature [deg C] real, dimension(ilb:,jlb:,:), intent(in) :: Salt !< Salinity [psu] - real, dimension(ilb:,jlb:,:), intent(in) :: rho_dzt + real, dimension(ilb:,jlb:,:), intent(in) :: rho_dzt !< Unknown real, dimension(ilb:,jlb:,:), intent(in) :: dzt !< Ocean layer thickness [m] real, dimension(ilb:,jlb:), intent(in) :: hblt_depth !< Boundary layer depth integer, intent(in) :: ilb !< Lower bounds of x extent of input arrays on data domain @@ -71,14 +81,14 @@ subroutine generic_tracer_source(Temp,Salt,rho_dzt,dzt,hblt_depth,ilb,jlb,tau,dt real, dimension(ilb:,jlb:), intent(in) :: grid_dat !< Unknown type(time_type), intent(in) :: model_time !< Time integer, intent(in) :: nbands !< Unknown - real, dimension(:), intent(in) :: max_wavelength_band + real, dimension(:), intent(in) :: max_wavelength_band !< Unknown real, dimension(:,ilb:,jlb:), intent(in) :: sw_pen_band !< Shortwave penetration real, dimension(:,ilb:,jlb:,:), intent(in) :: opacity_band !< Unknown real, dimension(ilb:,jlb:),optional, intent(in) :: internal_heat !< Unknown real, dimension(ilb:,jlb:),optional, intent(in) :: frunoff !< Unknown real, dimension(ilb:,jlb:),optional, intent(in) :: grid_ht !< Unknown real, dimension(ilb:,jlb:),optional , intent(in) :: current_wave_stress !< Unknown - real, optional , intent(in) :: sosga ! global avg. sea surface salinity + real, optional , intent(in) :: sosga !< Global average sea surface salinity end subroutine generic_tracer_source !> Update the tracers from bottom fluxes diff --git a/config_src/external/GFDL_ocean_BGC/generic_tracer_utils.F90 b/config_src/external/GFDL_ocean_BGC/generic_tracer_utils.F90 index 6937ef4710..de513a7f11 100644 --- a/config_src/external/GFDL_ocean_BGC/generic_tracer_utils.F90 +++ b/config_src/external/GFDL_ocean_BGC/generic_tracer_utils.F90 @@ -21,8 +21,10 @@ module g_tracer_utils !> Tracer concentration in river runoff real, allocatable, dimension(:,:) :: trunoff logical :: requires_restart = .true. !< Unknown - !> Tracer source: filename, type, var name, units, record, gridfile - character(len=fm_string_len) :: src_file, src_var_name, src_var_unit, src_var_gridspec + character(len=fm_string_len) :: src_file !< Tracer source filename + character(len=fm_string_len) :: src_var_name !< Tracer source variable name + character(len=fm_string_len) :: src_var_unit !< Tracer source variable units + character(len=fm_string_len) :: src_var_gridspec !< Tracer source grid file name integer :: src_var_record !< Unknown logical :: requires_src_info = .false. !< Unknown real :: src_var_unit_conversion = 1.0 !< This factor depends on the tracer. Ask Jasmin @@ -38,7 +40,8 @@ module g_tracer_utils type g_tracer_common ! type(g_diag_ctrl) :: diag_CS !< Unknown !> Domain extents - integer :: isd,jsd + integer :: isd !< Start index of the data domain in the i-direction + integer :: jsd !< Start index of the data domain in the j-direction end type g_tracer_common !> Unknown dangerous module data! @@ -102,7 +105,17 @@ subroutine g_tracer_set_csdiag(diag_CS) end subroutine g_tracer_set_csdiag subroutine g_tracer_set_common(isc,iec,jsc,jec,isd,ied,jsd,jed,nk,ntau,axes,grid_tmask,grid_kmt,init_time) - integer, intent(in) :: isc,iec,jsc,jec,isd,ied,jsd,jed,nk,ntau,axes(3) !< Unknown + integer, intent(in) :: isc !< Computation start index in i direction + integer, intent(in) :: iec !< Computation end index in i direction + integer, intent(in) :: jsc !< Computation start index in j direction + integer, intent(in) :: jec !< Computation end index in j direction + integer, intent(in) :: isd !< Data start index in i direction + integer, intent(in) :: ied !< Data end index in i direction + integer, intent(in) :: jsd !< Data start index in j direction + integer, intent(in) :: jed !< Data end index in j direction + integer, intent(in) :: nk !< Number of levels in k direction + integer, intent(in) :: ntau !< Unknown + integer, intent(in) :: axes(3) !< Domain axes? real, dimension(isd:,jsd:,:),intent(in) :: grid_tmask !< Unknown integer,dimension(isd:,jsd:),intent(in) :: grid_kmt !< Unknown type(time_type), intent(in) :: init_time !< Unknown @@ -110,10 +123,19 @@ end subroutine g_tracer_set_common subroutine g_tracer_get_common(isc,iec,jsc,jec,isd,ied,jsd,jed,nk,ntau,& axes,grid_tmask,grid_mask_coast,grid_kmt,init_time,diag_CS) - integer, intent(out) :: isc,iec,jsc,jec,isd,ied,jsd,jed,nk,ntau !< Unknown - integer,optional, intent(out) :: axes(3) !< Unknown + integer, intent(out) :: isc !< Computation start index in i direction + integer, intent(out) :: iec !< Computation end index in i direction + integer, intent(out) :: jsc !< Computation start index in j direction + integer, intent(out) :: jec !< Computation end index in j direction + integer, intent(out) :: isd !< Data start index in i direction + integer, intent(out) :: ied !< Data end index in i direction + integer, intent(out) :: jsd !< Data start index in j direction + integer, intent(out) :: jed !< Data end index in j direction + integer, intent(out) :: nk !< Number of levels in k direction + integer, intent(out) :: ntau !< Unknown + integer, optional, intent(out) :: axes(3) !< Unknown type(time_type), optional, intent(out) :: init_time !< Unknown - real, optional, dimension(:,:,:),pointer :: grid_tmask !< Unknown + real, optional, dimension(:,:,:), pointer :: grid_tmask !< Unknown integer, optional, dimension(:,:), pointer :: grid_mask_coast !< Unknown integer, optional, dimension(:,:), pointer :: grid_kmt !< Unknown type(g_diag_ctrl), optional, pointer :: diag_CS !< Unknown @@ -123,32 +145,33 @@ end subroutine g_tracer_get_common subroutine g_tracer_get_4D(g_tracer_list,name,member,array_ptr) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - real, dimension(:,:,:,:), pointer :: array_ptr + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + real, dimension(:,:,:,:), pointer :: array_ptr !< Unknown end subroutine g_tracer_get_4D !> Unknown subroutine g_tracer_get_3D(g_tracer_list,name,member,array_ptr) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - real, dimension(:,:,:), pointer :: array_ptr !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + real, dimension(:,:,:), pointer :: array_ptr !< Unknown end subroutine g_tracer_get_3D !> Unknown subroutine g_tracer_get_2D(g_tracer_list,name,member,array_ptr) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - real, dimension(:,:), pointer :: array_ptr !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + real, dimension(:,:), pointer :: array_ptr !< Unknown end subroutine g_tracer_get_2D !> Unknown subroutine g_tracer_get_4D_val(g_tracer_list,name,member,array,isd,jsd) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - integer, intent(in) :: isd,jsd !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + integer, intent(in) :: isd !< Unknown + integer, intent(in) :: jsd !< Unknown real, dimension(isd:,jsd:,:,:), intent(out):: array !< Unknown end subroutine g_tracer_get_4D_val @@ -156,8 +179,9 @@ end subroutine g_tracer_get_4D_val subroutine g_tracer_get_3D_val(g_tracer_list,name,member,array,isd,jsd,ntau,positive) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - integer, intent(in) :: isd,jsd !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + integer, intent(in) :: isd !< Unknown + integer, intent(in) :: jsd !< Unknown integer, optional, intent(in) :: ntau !< Unknown logical, optional, intent(in) :: positive !< Unknown real, dimension(isd:,jsd:,:), intent(out):: array !< Unknown @@ -169,8 +193,9 @@ end subroutine g_tracer_get_3D_val subroutine g_tracer_get_2D_val(g_tracer_list,name,member,array,isd,jsd) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - integer, intent(in) :: isd,jsd !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + integer, intent(in) :: isd !< Unknown + integer, intent(in) :: jsd !< Unknown real, dimension(isd:,jsd:), intent(out):: array !< Unknown end subroutine g_tracer_get_2D_val @@ -178,15 +203,15 @@ end subroutine g_tracer_get_2D_val subroutine g_tracer_get_real(g_tracer_list,name,member,value) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - real, intent(out):: value + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + real, intent(out):: value !< Unknown end subroutine g_tracer_get_real !> Unknown subroutine g_tracer_get_string(g_tracer_list,name,member,string) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown character(len=fm_string_len), intent(out) :: string !< Unknown end subroutine g_tracer_get_string @@ -194,8 +219,9 @@ end subroutine g_tracer_get_string subroutine g_tracer_set_2D(g_tracer_list,name,member,array,isd,jsd,weight) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - integer, intent(in) :: isd,jsd !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + integer, intent(in) :: isd !< Unknown + integer, intent(in) :: jsd !< Unknown real, dimension(isd:,jsd:),intent(in) :: array !< Unknown real, optional ,intent(in) :: weight !< Unknown end subroutine g_tracer_set_2D @@ -204,8 +230,9 @@ end subroutine g_tracer_set_2D subroutine g_tracer_set_3D(g_tracer_list,name,member,array,isd,jsd,ntau) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - integer, intent(in) :: isd,jsd !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + integer, intent(in) :: isd !< Unknown + integer, intent(in) :: jsd !< Unknown integer, optional, intent(in) :: ntau !< Unknown real, dimension(isd:,jsd:,:), intent(in) :: array !< Unknown end subroutine g_tracer_set_3D @@ -214,16 +241,17 @@ end subroutine g_tracer_set_3D subroutine g_tracer_set_4D(g_tracer_list,name,member,array,isd,jsd) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown - integer, intent(in) :: isd,jsd !< Unknown - real, dimension(isd:,jsd:,:,:), intent(in) :: array !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown + integer, intent(in) :: isd !< Unknown + integer, intent(in) :: jsd !< Unknown + real, dimension(isd:,jsd:,:,:), intent(in) :: array !< Unknown end subroutine g_tracer_set_4D !> Unknown subroutine g_tracer_set_real(g_tracer_list,name,member,value) character(len=*), intent(in) :: name !< Unknown character(len=*), intent(in) :: member !< Unknown - type(g_tracer_type), pointer :: g_tracer_list, g_tracer !< Unknown + type(g_tracer_type), pointer :: g_tracer_list !< Unknown real, intent(in) :: value !< Unknown end subroutine g_tracer_set_real @@ -265,7 +293,7 @@ end subroutine g_tracer_get_next !! Since the surface flux from the atmosphere (%stf) has the units of mol/m^2/sec the resulting !! tracer concentration has units of mol/Kg subroutine g_tracer_vertdiff_G(g_tracer, h_old, ea, eb, dt, kg_m2_to_H, m_to_H, tau, mom) - type(g_tracer_type), pointer :: g_tracer + type(g_tracer_type), pointer :: g_tracer !< Unknown !> Layer thickness before entrainment, in m or kg m-2. real, dimension(g_tracer_com%isd:,g_tracer_com%jsd:,:), intent(in) :: h_old !> The amount of fluid entrained from the layer above, in H. @@ -278,7 +306,7 @@ subroutine g_tracer_vertdiff_G(g_tracer, h_old, ea, eb, dt, kg_m2_to_H, m_to_H, real, intent(in) :: m_to_H !< A conversion factor that translates m into the units !! of h_old (H). integer, intent(in) :: tau !< Unknown - logical, intent(in), optional :: mom + logical, intent(in), optional :: mom !< Unknown end subroutine g_tracer_vertdiff_G end module g_tracer_utils diff --git a/config_src/external/ODA_hooks/ocean_da_types.F90 b/config_src/external/ODA_hooks/ocean_da_types.F90 index bc5af1d782..e71c76a048 100644 --- a/config_src/external/ODA_hooks/ocean_da_types.F90 +++ b/config_src/external/ODA_hooks/ocean_da_types.F90 @@ -7,10 +7,9 @@ module ocean_da_types_mod private - !> Example type for ocean ensemble DA state type, public :: OCEAN_CONTROL_STRUCT - integer :: ensemble_size + integer :: ensemble_size !< ensemble size real, pointer, dimension(:,:,:) :: SSH=>NULL() !NULL() !NULL() !NULL() !< all profiles are stored as linked list. - type(ocean_profile_type), pointer :: prev=>NULL() - type(ocean_profile_type), pointer :: cnext=>NULL() ! current profiles are stored as linked list. - type(ocean_profile_type), pointer :: cprev=>NULL() - integer :: nbr_xi, nbr_yi ! nearest neighbor model gridpoint for the profile - real :: nbr_dist ! distance to nearest neighbor model gridpoint + type(ocean_profile_type), pointer :: prev=>NULL() !< previous + type(ocean_profile_type), pointer :: cnext=>NULL() !< current profiles are stored as linked list. + type(ocean_profile_type), pointer :: cprev=>NULL() !< previous + integer :: nbr_xi !< x nearest neighbor model gridpoint for the profile + integer :: nbr_yi !< y nearest neighbor model gridpoint for the profile + real :: nbr_dist !< distance to nearest neighbor model gridpoint logical :: compute !< profile is within current compute domain real, dimension(:,:), pointer :: depth => NULL() !< depth of measurement [m] real, dimension(:,:), pointer :: data => NULL() !< data by variable type @@ -54,32 +55,36 @@ module ocean_da_types_mod real, dimension(:,:,:), pointer :: analysis => NULL() !< ensemble member analysis type(forward_operator_type), pointer :: obs_def => NULL() !< observation forward operator type(time_type) :: time !< profile time type - real :: i_index, j_index !< model longitude and latitude indices respectively + real :: i_index !< model longitude indices respectively + real :: j_index !< model latitude indices respectively real, dimension(:,:), pointer :: k_index !< model depth indices type(time_type) :: tdiff !< difference between model time and observation time - character(len=128) :: filename + character(len=128) :: filename !< a filename end type ocean_profile_type !> Example forward operator type. type, public :: forward_operator_type - integer :: num + integer :: num !< how many? integer, dimension(2) :: state_size !< for integer, dimension(:), pointer :: state_var_index !< for flattened data integer, dimension(:), pointer :: i_index !< i-dimension index integer, dimension(:), pointer :: j_index !< j-dimension index - real, dimension(:), pointer :: coef + real, dimension(:), pointer :: coef !< coefficient end type forward_operator_type !> Grid type for DA type, public :: grid_type - real, pointer, dimension(:,:) :: x=>NULL(), y=>NULL() - real, pointer, dimension(:,:,:) :: z=>NULL() - real, pointer, dimension(:,:,:) :: h=>NULL() - real, pointer, dimension(:,:) :: basin_mask => NULL() - real, pointer, dimension(:,:,:) :: mask => NULL() - real, pointer, dimension(:,:) :: bathyT => NULL() - logical :: tripolar_N - integer :: ni, nj, nk + real, pointer, dimension(:,:) :: x=>NULL() !< x + real, pointer, dimension(:,:) :: y=>NULL() !< y + real, pointer, dimension(:,:,:) :: z=>NULL() !< z + real, pointer, dimension(:,:,:) :: h=>NULL() !< h + real, pointer, dimension(:,:) :: basin_mask => NULL() !< basin mask + real, pointer, dimension(:,:,:) :: mask => NULL() !< land mask? + real, pointer, dimension(:,:) :: bathyT => NULL() !< bathymetry at T points + logical :: tripolar_N !< True for tripolar grids + integer :: ni !< ni + integer :: nj !< nj + integer :: nk !< nk end type grid_type end module ocean_da_types_mod diff --git a/config_src/external/ODA_hooks/write_ocean_obs.F90 b/config_src/external/ODA_hooks/write_ocean_obs.F90 index a2c41b58d6..da4a404d3d 100644 --- a/config_src/external/ODA_hooks/write_ocean_obs.F90 +++ b/config_src/external/ODA_hooks/write_ocean_obs.F90 @@ -15,13 +15,13 @@ module write_ocean_obs_mod contains !> Open a profile file -integer function open_profile_file(name, nvar, grid_lon, grid_lat,thread,fset) +integer function open_profile_file(name, nvar, grid_lon, grid_lat, thread, fset) character(len=*), intent(in) :: name !< File name integer, intent(in), optional :: nvar !< Number of variables real, dimension(:), optional, intent(in) :: grid_lon !< Longitude [degreeE] real, dimension(:), optional, intent(in) :: grid_lat !< Latitude [degreeN] - integer, intent(in), optional :: thread !< Thread - integer, intent(in), optional :: fset !< File set + integer, optional, intent(in) :: thread !< Thread number + integer, optional, intent(in) :: fset !< File set open_profile_file=-1 end function open_profile_file @@ -29,7 +29,7 @@ end function open_profile_file !> Write a profile subroutine write_profile(unit,profile) integer, intent(in) :: unit !< File unit - type(ocean_profile_type), intent(in) :: profile !< Profile + type(ocean_profile_type), intent(in) :: profile !< Profile to write return end subroutine write_profile diff --git a/config_src/infra/FMS1/MOM_coms_infra.F90 b/config_src/infra/FMS1/MOM_coms_infra.F90 new file mode 100644 index 0000000000..555b4df119 --- /dev/null +++ b/config_src/infra/FMS1/MOM_coms_infra.F90 @@ -0,0 +1,455 @@ +!> Thin interfaces to non-domain-oriented mpp communication subroutines +module MOM_coms_infra + +! This file is part of MOM6. See LICENSE.md for the license. + +use iso_fortran_env, only : int32, int64 + +use mpp_mod, only : mpp_pe, mpp_root_pe, mpp_npes, mpp_set_root_pe +use mpp_mod, only : mpp_set_current_pelist, mpp_get_current_pelist +use mpp_mod, only : mpp_broadcast, mpp_sync, mpp_sync_self, mpp_chksum +use mpp_mod, only : mpp_sum, mpp_max, mpp_min +use memutils_mod, only : print_memuse_stats +use fms_mod, only : fms_end, fms_init + +implicit none ; private + +public :: PE_here, root_PE, num_PEs, set_rootPE, Set_PElist, Get_PElist +public :: broadcast, sum_across_PEs, min_across_PEs, max_across_PEs +public :: field_chksum, MOM_infra_init, MOM_infra_end + +! This module provides interfaces to the non-domain-oriented communication +! subroutines. + +!> Communicate an array, string or scalar from one PE to others +interface broadcast + module procedure broadcast_char, broadcast_int32_0D, broadcast_int64_0D, broadcast_int1D + module procedure broadcast_real0D, broadcast_real1D, broadcast_real2D +end interface broadcast + +!> Compute a checksum for a field distributed over a PE list. If no PE list is +!! provided, then the current active PE list is used. +interface field_chksum + module procedure field_chksum_real_0d + module procedure field_chksum_real_1d + module procedure field_chksum_real_2d + module procedure field_chksum_real_3d + module procedure field_chksum_real_4d +end interface field_chksum + +!> Find the sum of field across PEs, and update PEs with the sums. +interface sum_across_PEs + module procedure sum_across_PEs_int4_0d + module procedure sum_across_PEs_int4_1d + module procedure sum_across_PEs_int8_0d + module procedure sum_across_PEs_int8_1d + module procedure sum_across_PEs_int8_2d + module procedure sum_across_PEs_real_0d + module procedure sum_across_PEs_real_1d + module procedure sum_across_PEs_real_2d +end interface sum_across_PEs + +!> Find the maximum value of field across PEs, and update PEs with the values. +interface max_across_PEs + module procedure max_across_PEs_int_0d + module procedure max_across_PEs_real_0d + module procedure max_across_PEs_real_1d +end interface max_across_PEs + +!> Find the minimum value of field across PEs, and update PEs with the values. +interface min_across_PEs + module procedure min_across_PEs_int_0d + module procedure min_across_PEs_real_0d + module procedure min_across_PEs_real_1d +end interface min_across_PEs + +contains + +!> Return the ID of the PE for the current process. +function PE_here() result(pe) + integer :: pe !< PE ID of the current process + pe = mpp_pe() +end function PE_here + +!> Return the ID of the root PE for the PE list of the current procss. +function root_PE() result(pe) + integer :: pe !< root PE ID + pe = mpp_root_pe() +end function root_PE + +!> Return the number of PEs for the current PE list. +function num_PEs() result(npes) + integer :: npes !< Number of PEs + npes = mpp_npes() +end function num_PEs + +!> Designate a PE as the root PE +subroutine set_rootPE(pe) + integer, intent(in) :: pe !< ID of the PE to be assigned as root + call mpp_set_root_pe(pe) +end subroutine + +!> Set the current PE list. If no list is provided, then the current PE list +!! is set to the list of all available PEs on the communicator. Setting the +!! list will trigger a rank synchronization unless the `no_sync` flag is set. +subroutine Set_PEList(pelist, no_sync) + integer, optional, intent(in) :: pelist(:) !< List of PEs to set for communication + logical, optional, intent(in) :: no_sync !< Do not sync after list update. + call mpp_set_current_pelist(pelist, no_sync) +end subroutine Set_PEList + +!> Retrieve the current PE list and any metadata if requested. +subroutine Get_PEList(pelist, name, commID) + integer, intent(out) :: pelist(:) !< List of PE IDs of the current PE list + character(len=*), optional, intent(out) :: name !< Name of PE list + integer, optional, intent(out) :: commID !< Communicator ID of PE list + + call mpp_get_current_pelist(pelist, name, commiD) +end subroutine Get_PEList + +!> Communicate a 1-D array of character strings from one PE to others +subroutine broadcast_char(dat, length, from_PE, PElist, blocking) + character(len=*), intent(inout) :: dat(:) !< The data to communicate and destination + integer, intent(in) :: length !< The length of each string + integer, optional, intent(in) :: from_PE !< The source PE, by default the root PE + integer, optional, intent(in) :: PElist(:) !< The list of participating PEs, by default the + !! active PE set as previously set via Set_PElist. + logical, optional, intent(in) :: blocking !< If true, barriers are added around the call + + integer :: src_PE ! The processor that is sending the data + logical :: do_block ! If true add synchronizing barriers + + do_block = .false. ; if (present(blocking)) do_block = blocking + if (present(from_PE)) then ; src_PE = from_PE ; else ; src_PE = root_PE() ; endif + + if (do_block) call mpp_sync(PElist) + call mpp_broadcast(dat, length, src_PE, PElist) + if (do_block) call mpp_sync_self(PElist) + +end subroutine broadcast_char + +!> Communicate an integer from one PE to others +subroutine broadcast_int64_0D(dat, from_PE, PElist, blocking) + integer(kind=int64), intent(inout) :: dat !< The data to communicate and destination + integer, optional, intent(in) :: from_PE !< The source PE, by default the root PE + integer, optional, intent(in) :: PElist(:) !< The list of participating PEs, by default the + !! active PE set as previously set via Set_PElist. + logical, optional, intent(in) :: blocking !< If true, barriers are added around the call + + integer :: src_PE ! The processor that is sending the data + logical :: do_block ! If true add synchronizing barriers + + do_block = .false. ; if (present(blocking)) do_block = blocking + if (present(from_PE)) then ; src_PE = from_PE ; else ; src_PE = root_PE() ; endif + + if (do_block) call mpp_sync(PElist) + call mpp_broadcast(dat, src_PE, PElist) + if (do_block) call mpp_sync_self(PElist) + +end subroutine broadcast_int64_0D + + +!> Communicate an integer from one PE to others +subroutine broadcast_int32_0D(dat, from_PE, PElist, blocking) + integer(kind=int32), intent(inout) :: dat !< The data to communicate and destination + integer, optional, intent(in) :: from_PE !< The source PE, by default the root PE + integer, optional, intent(in) :: PElist(:) !< The list of participating PEs, by default the + !! active PE set as previously set via Set_PElist. + logical, optional, intent(in) :: blocking !< If true, barriers are added around the call + + integer :: src_PE ! The processor that is sending the data + logical :: do_block ! If true add synchronizing barriers + + do_block = .false. ; if (present(blocking)) do_block = blocking + if (present(from_PE)) then ; src_PE = from_PE ; else ; src_PE = root_PE() ; endif + + if (do_block) call mpp_sync(PElist) + call mpp_broadcast(dat, src_PE, PElist) + if (do_block) call mpp_sync_self(PElist) + +end subroutine broadcast_int32_0D + +!> Communicate a 1-D array of integers from one PE to others +subroutine broadcast_int1D(dat, length, from_PE, PElist, blocking) + integer, dimension(:), intent(inout) :: dat !< The data to communicate and destination + integer, intent(in) :: length !< The number of data elements + integer, optional, intent(in) :: from_PE !< The source PE, by default the root PE + integer, optional, intent(in) :: PElist(:) !< The list of participating PEs, by default the + !! active PE set as previously set via Set_PElist. + logical, optional, intent(in) :: blocking !< If true, barriers are added around the call + + integer :: src_PE ! The processor that is sending the data + logical :: do_block ! If true add synchronizing barriers + + do_block = .false. ; if (present(blocking)) do_block = blocking + if (present(from_PE)) then ; src_PE = from_PE ; else ; src_PE = root_PE() ; endif + + if (do_block) call mpp_sync(PElist) + call mpp_broadcast(dat, length, src_PE, PElist) + if (do_block) call mpp_sync_self(PElist) + +end subroutine broadcast_int1D + +!> Communicate a real number from one PE to others +subroutine broadcast_real0D(dat, from_PE, PElist, blocking) + real, intent(inout) :: dat !< The data to communicate and destination + integer, optional, intent(in) :: from_PE !< The source PE, by default the root PE + integer, optional, intent(in) :: PElist(:) !< The list of participating PEs, by default the + !! active PE set as previously set via Set_PElist. + logical, optional, intent(in) :: blocking !< If true, barriers are added around the call + + integer :: src_PE ! The processor that is sending the data + logical :: do_block ! If true add synchronizing barriers + + do_block = .false. ; if (present(blocking)) do_block = blocking + if (present(from_PE)) then ; src_PE = from_PE ; else ; src_PE = root_PE() ; endif + + if (do_block) call mpp_sync(PElist) + call mpp_broadcast(dat, src_PE, PElist) + if (do_block) call mpp_sync_self(PElist) + +end subroutine broadcast_real0D + +!> Communicate a 1-D array of reals from one PE to others +subroutine broadcast_real1D(dat, length, from_PE, PElist, blocking) + real, dimension(:), intent(inout) :: dat !< The data to communicate and destination + integer, intent(in) :: length !< The number of data elements + integer, optional, intent(in) :: from_PE !< The source PE, by default the root PE + integer, optional, intent(in) :: PElist(:) !< The list of participating PEs, by default the + !! active PE set as previously set via Set_PElist. + logical, optional, intent(in) :: blocking !< If true, barriers are added around the call + + integer :: src_PE ! The processor that is sending the data + logical :: do_block ! If true add synchronizing barriers + + do_block = .false. ; if (present(blocking)) do_block = blocking + if (present(from_PE)) then ; src_PE = from_PE ; else ; src_PE = root_PE() ; endif + + if (do_block) call mpp_sync(PElist) + call mpp_broadcast(dat, length, src_PE, PElist) + if (do_block) call mpp_sync_self(PElist) + +end subroutine broadcast_real1D + +!> Communicate a 2-D array of reals from one PE to others +subroutine broadcast_real2D(dat, length, from_PE, PElist, blocking) + real, dimension(:,:), intent(inout) :: dat !< The data to communicate and destination + integer, intent(in) :: length !< The total number of data elements + integer, optional, intent(in) :: from_PE !< The source PE, by default the root PE + integer, optional, intent(in) :: PElist(:) !< The list of participating PEs, by default the + !! active PE set as previously set via Set_PElist. + logical, optional, intent(in) :: blocking !< If true, barriers are added around the call + + integer :: src_PE ! The processor that is sending the data + logical :: do_block ! If true add synchronizing barriers + + do_block = .false. ; if (present(blocking)) do_block = blocking + if (present(from_PE)) then ; src_PE = from_PE ; else ; src_PE = root_PE() ; endif + + if (do_block) call mpp_sync(PElist) + call mpp_broadcast(dat, length, src_PE, PElist) + if (do_block) call mpp_sync_self(PElist) + +end subroutine broadcast_real2D + +! field_chksum wrappers + +!> Compute a checksum for a field distributed over a PE list. If no PE list is +!! provided, then the current active PE list is used. +function field_chksum_real_0d(field, pelist, mask_val) result(chksum) + real, intent(in) :: field !< Input scalar + integer, optional, intent(in) :: pelist(:) !< PE list of ranks to checksum + real, optional, intent(in) :: mask_val !< FMS mask value + integer(kind=int64) :: chksum !< checksum of array + + chksum = mpp_chksum(field, pelist, mask_val) +end function field_chksum_real_0d + +!> Compute a checksum for a field distributed over a PE list. If no PE list is +!! provided, then the current active PE list is used. +function field_chksum_real_1d(field, pelist, mask_val) result(chksum) + real, dimension(:), intent(in) :: field !< Input array + integer, optional, intent(in) :: pelist(:) !< PE list of ranks to checksum + real, optional, intent(in) :: mask_val !< FMS mask value + integer(kind=int64) :: chksum !< checksum of array + + chksum = mpp_chksum(field, pelist, mask_val) +end function field_chksum_real_1d + +!> Compute a checksum for a field distributed over a PE list. If no PE list is +!! provided, then the current active PE list is used. +function field_chksum_real_2d(field, pelist, mask_val) result(chksum) + real, dimension(:,:), intent(in) :: field !< Unrotated input field + integer, optional, intent(in) :: pelist(:) !< PE list of ranks to checksum + real, optional, intent(in) :: mask_val !< FMS mask value + integer(kind=int64) :: chksum !< checksum of array + + chksum = mpp_chksum(field, pelist, mask_val) +end function field_chksum_real_2d + +!> Compute a checksum for a field distributed over a PE list. If no PE list is +!! provided, then the current active PE list is used. +function field_chksum_real_3d(field, pelist, mask_val) result(chksum) + real, dimension(:,:,:), intent(in) :: field !< Unrotated input field + integer, optional, intent(in) :: pelist(:) !< PE list of ranks to checksum + real, optional, intent(in) :: mask_val !< FMS mask value + integer(kind=int64) :: chksum !< checksum of array + + chksum = mpp_chksum(field, pelist, mask_val) +end function field_chksum_real_3d + +!> Compute a checksum for a field distributed over a PE list. If no PE list is +!! provided, then the current active PE list is used. +function field_chksum_real_4d(field, pelist, mask_val) result(chksum) + real, dimension(:,:,:,:), intent(in) :: field !< Unrotated input field + integer, optional, intent(in) :: pelist(:) !< PE list of ranks to checksum + real, optional, intent(in) :: mask_val !< FMS mask value + integer(kind=int64) :: chksum !< checksum of array + + chksum = mpp_chksum(field, pelist, mask_val) +end function field_chksum_real_4d + +! sum_across_PEs wrappers + +!> Find the sum of field across PEs, and return this sum in field. +subroutine sum_across_PEs_int4_0d(field, pelist) + integer(kind=int32), intent(inout) :: field !< Value on this PE, and the sum across PEs upon return + integer, optional, intent(in) :: pelist(:) !< List of PEs to work with + + call mpp_sum(field, pelist) +end subroutine sum_across_PEs_int4_0d + +!> Find the sum of the values in corresponding positions of field across PEs, and return these sums in field. +subroutine sum_across_PEs_int4_1d(field, length, pelist) + integer(kind=int32), dimension(:), intent(inout) :: field !< The values to add, the sums upon return + integer, intent(in) :: length !< Number of elements in field to add + integer, optional, intent(in) :: pelist(:) !< List of PEs to work with + + call mpp_sum(field, length, pelist) +end subroutine sum_across_PEs_int4_1d + +!> Find the sum of field across PEs, and return this sum in field. +subroutine sum_across_PEs_int8_0d(field, pelist) + integer(kind=int64), intent(inout) :: field !< Value on this PE, and the sum across PEs upon return + integer, optional, intent(in) :: pelist(:) !< List of PEs to work with + + call mpp_sum(field, pelist) +end subroutine sum_across_PEs_int8_0d + +!> Find the sum of the values in corresponding positions of field across PEs, and return these sums in field. +subroutine sum_across_PEs_int8_1d(field, length, pelist) + integer(kind=int64), dimension(:), intent(inout) :: field !< The values to add, the sums upon return + integer, intent(in) :: length !< Number of elements in field to add + integer, optional, intent(in) :: pelist(:) !< List of PEs to work with + + call mpp_sum(field, length, pelist) +end subroutine sum_across_PEs_int8_1d + +!> Find the sum of the values in corresponding positions of field across PEs, and return these sums in field. +subroutine sum_across_PEs_int8_2d(field, length, pelist) + integer(kind=int64), & + dimension(:,:), intent(inout) :: field !< The values to add, the sums upon return + integer, intent(in) :: length !< The total number of positions to sum, usually + !! the product of the array sizes. + integer, optional, intent(in) :: pelist(:) !< List of PEs to work with + + call mpp_sum(field, length, pelist) +end subroutine sum_across_PEs_int8_2d + +!> Find the sum of field across PEs, and return this sum in field. +subroutine sum_across_PEs_real_0d(field, pelist) + real, intent(inout) :: field !< Value on this PE, and the sum across PEs upon return + integer, optional, intent(in) :: pelist(:) !< List of PEs to work with + + call mpp_sum(field, pelist) +end subroutine sum_across_PEs_real_0d + +!> Find the sum of the values in corresponding positions of field across PEs, and return these sums in field. +subroutine sum_across_PEs_real_1d(field, length, pelist) + real, dimension(:), intent(inout) :: field !< The values to add, the sums upon return + integer, intent(in) :: length !< Number of elements in field to add + integer, optional, intent(in) :: pelist(:) !< List of PEs to work with + + call mpp_sum(field, length, pelist) +end subroutine sum_across_PEs_real_1d + +!> Find the sum of the values in corresponding positions of field across PEs, and return these sums in field. +subroutine sum_across_PEs_real_2d(field, length, pelist) + real, dimension(:,:), intent(inout) :: field !< The values to add, the sums upon return + integer, intent(in) :: length !< The total number of positions to sum, usually + !! the product of the array sizes. + integer, optional, intent(in) :: pelist(:) !< List of PEs to work with + + call mpp_sum(field, length, pelist) +end subroutine sum_across_PEs_real_2d + +! max_across_PEs wrappers + +!> Find the maximum value of field across PEs, and store this maximum in field. +subroutine max_across_PEs_int_0d(field, pelist) + integer, intent(inout) :: field !< The values to compare, the maximum upon return + integer, optional, intent(in) :: pelist(:) !< List of PEs to work with + + call mpp_max(field, pelist) +end subroutine max_across_PEs_int_0d + +!> Find the maximum value of field across PEs, and store this maximum in field. +subroutine max_across_PEs_real_0d(field, pelist) + real, intent(inout) :: field !< The values to compare, the maximum upon return + integer, optional, intent(in) :: pelist(:) !< List of PEs to work with + + call mpp_max(field, pelist) +end subroutine max_across_PEs_real_0d + +!> Find the maximum values in each position of field across PEs, and store these minima in field. +subroutine max_across_PEs_real_1d(field, length, pelist) + real, dimension(:), intent(inout) :: field !< The list of values being compared, with the + !! maxima in each position upon return + integer, intent(in) :: length !< Number of elements in field to compare + integer, optional, intent(in) :: pelist(:) !< List of PEs to work with + + call mpp_max(field, length, pelist) +end subroutine max_across_PEs_real_1d + +! min_across_PEs wrappers + +!> Find the minimum value of field across PEs, and store this minimum in field. +subroutine min_across_PEs_int_0d(field, pelist) + integer, intent(inout) :: field !< The values to compare, the minimum upon return + integer, optional, intent(in) :: pelist(:) !< List of PEs to work with + + call mpp_min(field, pelist) +end subroutine min_across_PEs_int_0d + +!> Find the minimum value of field across PEs, and store this minimum in field. +subroutine min_across_PEs_real_0d(field, pelist) + real, intent(inout) :: field !< The values to compare, the minimum upon return + integer, optional, intent(in) :: pelist(:) !< List of PEs to work with + call mpp_min(field, pelist) +end subroutine min_across_PEs_real_0d + +!> Find the minimum values in each position of field across PEs, and store these minima in field. +subroutine min_across_PEs_real_1d(field, length, pelist) + real, dimension(:), intent(inout) :: field !< The list of values being compared, with the + !! minima in each position upon return + integer, intent(in) :: length !< Number of elements in field to compare + integer, optional, intent(in) :: pelist(:) !< List of PEs to work with + + call mpp_min(field, length, pelist) +end subroutine min_across_PEs_real_1d + +!> Initialize the model framework, including PE communication over a designated communicator. +!! If no communicator ID is provided, the framework's default communicator is used. +subroutine MOM_infra_init(localcomm) + integer, optional, intent(in) :: localcomm !< Communicator ID to initialize + call fms_init(localcomm) +end subroutine + +!> This subroutine carries out all of the calls required to close out the infrastructure cleanly. +!! This should only be called in ocean-only runs, as the coupler takes care of this in coupled runs. +subroutine MOM_infra_end + call print_memuse_stats( 'Memory HiWaterMark', always=.TRUE. ) + call fms_end() +end subroutine MOM_infra_end + +end module MOM_coms_infra diff --git a/src/framework/MOM_constants.F90 b/config_src/infra/FMS1/MOM_constants.F90 similarity index 100% rename from src/framework/MOM_constants.F90 rename to config_src/infra/FMS1/MOM_constants.F90 diff --git a/config_src/infra/FMS1/MOM_couplertype_infra.F90 b/config_src/infra/FMS1/MOM_couplertype_infra.F90 new file mode 100644 index 0000000000..fd947691ca --- /dev/null +++ b/config_src/infra/FMS1/MOM_couplertype_infra.F90 @@ -0,0 +1,247 @@ +!> This module wraps the FMS coupler types module +module MOM_couplertype_infra + +! This file is part of MOM6. See LICENSE.md for the license. + +use coupler_types_mod, only : coupler_type_spawn, coupler_type_initialized, coupler_type_destructor +use coupler_types_mod, only : coupler_type_set_diags, coupler_type_send_data +use coupler_types_mod, only : coupler_type_write_chksums +use coupler_types_mod, only : coupler_type_copy_data, coupler_type_increment_data +use coupler_types_mod, only : coupler_type_extract_data, coupler_type_set_data +use coupler_types_mod, only : ind_flux, ind_alpha, ind_csurf +use coupler_types_mod, only : coupler_1d_bc_type, coupler_2d_bc_type +use atmos_ocean_fluxes_mod, only : aof_set_coupler_flux +use MOM_time_manager, only : time_type + +implicit none ; private + +public :: CT_spawn, CT_initialized, CT_destructor +public :: CT_set_diags, CT_send_data, CT_write_chksums +public :: CT_set_data, CT_increment_data +public :: CT_copy_data, CT_extract_data +public :: atmos_ocn_coupler_flux +public :: ind_flux, ind_alpha, ind_csurf +public :: coupler_1d_bc_type, coupler_2d_bc_type + +!> This is the interface to spawn one coupler_bc_type into another. +interface CT_spawn + module procedure CT_spawn_1d_2d, CT_spawn_2d_2d +end interface CT_spawn + +!> This function interface indicates whether a coupler_bc_type has been initialized. +interface CT_initialized + module procedure CT_initialized_1d, CT_initialized_2d +end interface CT_initialized + +!> This is the interface to deallocate any data associated with a coupler_bc_type. +interface CT_destructor + module procedure CT_destructor_1d, CT_destructor_2d +end interface CT_destructor + +contains + +!> This subroutine sets many of the parameters for calculating an atmosphere-ocean tracer flux +!! and retuns an integer index for that flux. +function atmos_ocn_coupler_flux(name, flux_type, implementation, param, mol_wt, & + ice_restart_file, ocean_restart_file, units, caller, verbosity) & + result (coupler_index) + + character(len=*), intent(in) :: name !< A name to use for the flux + character(len=*), intent(in) :: flux_type !< A string describing the type of this flux, + !! perhaps 'air_sea_gas_flux'. + character(len=*), intent(in) :: implementation !< A name describing the specific + !! implementation of this flux, such as 'ocmip2'. + real, dimension(:), optional, intent(in) :: param !< An array of parameters used for the fluxes + real, optional, intent(in) :: mol_wt !< The molecular weight of this tracer + character(len=*), optional, intent(in) :: ice_restart_file !< A sea-ice restart file to use with this flux. + character(len=*), optional, intent(in) :: ocean_restart_file !< An ocean restart file to use with this flux. + character(len=*), optional, intent(in) :: units !< The units of the flux + character(len=*), optional, intent(in) :: caller !< The name of the calling routine + integer, optional, intent(in) :: verbosity !< A 0-9 integer indicating a level of verbosity. + integer :: coupler_index !< The resulting integer handle to use for this flux in subsequent calls. + + coupler_index = aof_set_coupler_flux(name, flux_type, implementation, & + param=param, mol_wt=mol_wt, ice_restart_file=ice_restart_file, & + ocean_restart_file=ocean_restart_file, & + units=units, caller=caller, verbosity=verbosity) + +end function atmos_ocn_coupler_flux + +!> Generate a 2-D coupler type using a 1-D coupler type as a template. +subroutine CT_spawn_1d_2d(var_in, var, idim, jdim, suffix, as_needed) + type(coupler_1d_bc_type), intent(in) :: var_in !< structure from which to copy information + type(coupler_2d_bc_type), intent(inout) :: var !< structure into which to copy information + integer, dimension(4), intent(in) :: idim !< The data and computational domain extents of + !! the first dimension in a non-decreasing list + integer, dimension(4), intent(in) :: jdim !< The data and computational domain extents of + !! the second dimension in a non-decreasing list + character(len=*), optional, intent(in) :: suffix !< optional suffix to make the name identifier unique + logical, optional, intent(in) :: as_needed !< Only do the spawn if the target type (var) + !! is not set and the parent type (var_in) is set. + + call coupler_type_spawn(var_in, var, idim, jdim, suffix=suffix, as_needed=as_needed) + +end subroutine CT_spawn_1d_2d + +!> Generate one 2-D coupler type using another 2-D coupler type as a template. +subroutine CT_spawn_2d_2d(var_in, var, idim, jdim, suffix, as_needed) + type(coupler_2d_bc_type), intent(in) :: var_in !< structure from which to copy information + type(coupler_2d_bc_type), intent(inout) :: var !< structure into which to copy information + integer, dimension(4), intent(in) :: idim !< The data and computational domain extents of + !! the first dimension in a non-decreasing list + integer, dimension(4), intent(in) :: jdim !< The data and computational domain extents of + !! the second dimension in a non-decreasing list + character(len=*), optional, intent(in) :: suffix !< optional suffix to make the name identifier unique + logical, optional, intent(in) :: as_needed !< Only do the spawn if the target type (var) + !! is not set and the parent type (var_in) is set. + + call coupler_type_spawn(var_in, var, idim, jdim, suffix=suffix, as_needed=as_needed) + +end subroutine CT_spawn_2d_2d + +!> Copy all elements of the data in of one coupler_2d_bc_type into another. Both must have the same array sizes. +subroutine CT_copy_data(var_in, var, halo_size, bc_index, field_index, & + exclude_flux_type, only_flux_type, pass_through_ice) + type(coupler_2d_bc_type), intent(in) :: var_in !< BC_type structure with the data to copy + type(coupler_2d_bc_type), intent(inout) :: var !< The recipient BC_type structure + integer, optional, intent(in) :: halo_size !< The extent of the halo to copy; 0 by default + integer, optional, intent(in) :: bc_index !< The index of the boundary condition + !! that is being copied + integer, optional, intent(in) :: field_index !< The index of the field in the + !! boundary condition that is being copied + character(len=*), optional, intent(in) :: exclude_flux_type !< A string describing which types of fluxes + !! to exclude from this copy. + character(len=*), optional, intent(in) :: only_flux_type !< A string describing which types of fluxes + !! to include from this copy. + logical, optional, intent(in) :: pass_through_ice !< If true, only copy BCs whose + !! value of pass_through ice matches this + + call coupler_type_copy_data(var_in, var, halo_size, bc_index, field_index, & + exclude_flux_type, only_flux_type, pass_through_ice) +end subroutine CT_copy_data + +!> Increment data in all elements of one coupler_2d_bc_type with the data from another. Both +!! must have the same array sizes. +subroutine CT_increment_data(var_in, var, halo_size, scale_factor, scale_prev) + type(coupler_2d_bc_type), intent(in) :: var_in !< coupler_type structure with the data to add to the other type + type(coupler_2d_bc_type), intent(inout) :: var !< The coupler_type structure whose fields are being incremented + integer, optional, intent(in) :: halo_size !< The extent of the halo to increment; 0 by default + real, optional, intent(in) :: scale_factor !< A scaling factor for the data that is being added + real, optional, intent(in) :: scale_prev !< A scaling factor for the data that is already here + + call coupler_type_increment_data(var_in, var, halo_size=halo_size, scale_factor=scale_factor, & + scale_prev=scale_prev) + +end subroutine CT_increment_data + +!> Extract a 2d field from a coupler_2d_bc_type into a two-dimensional array. +subroutine CT_extract_data(var_in, bc_index, field_index, array_out, & + scale_factor, halo_size, idim, jdim) + type(coupler_2d_bc_type), intent(in) :: var_in !< BC_type structure with the data to extract + integer, intent(in) :: bc_index !< The index of the boundary condition + !! that is being copied + integer, intent(in) :: field_index !< The index of the field in the boundary + !! condition that is being copied, or the + !! surface flux by default. + real, dimension(1:,1:), intent(out) :: array_out !< The recipient array for the field; its size + !! must match the size of the data being copied + !! unless idim and jdim are supplied. + real, optional, intent(in) :: scale_factor !< A scaling factor for the data that is being added + integer, optional, intent(in) :: halo_size !< The extent of the halo to copy; 0 by default + integer, dimension(4), optional, intent(in) :: idim !< The data and computational domain extents of + !! the first dimension of the output array + !! in a non-decreasing list + integer, dimension(4), optional, intent(in) :: jdim !< The data and computational domain extents of + !! the second dimension of the output array + !! in a non-decreasing list + call coupler_type_extract_data(var_in, bc_index, field_index, array_out, scale_factor, halo_size, idim, jdim) + +end subroutine CT_extract_data + +!> Set single 2d field in coupler_2d_bc_type from a two-dimensional array. +subroutine CT_set_data(array_in, bc_index, field_index, var, & + scale_factor, halo_size, idim, jdim) + real, dimension(1:,1:), intent(in) :: array_in !< The source array for the field; its size + !! must match the size of the data being copied + !! unless idim and jdim are supplied. + integer, intent(in) :: bc_index !< The index of the boundary condition + !! that is being copied + integer, intent(in) :: field_index !< The index of the field in the + !! boundary condition that is being set. The + !! surface concentration is set by default. + type(coupler_2d_bc_type), intent(inout) :: var !< BC_type structure with the data to set + real, optional, intent(in) :: scale_factor !< A scaling factor for the data that is being added + integer, optional, intent(in) :: halo_size !< The extent of the halo to copy; 0 by default + integer, dimension(4), optional, intent(in) :: idim !< The data and computational domain extents of + !! the first dimension of the output array + !! in a non-decreasing list + integer, dimension(4), optional, intent(in) :: jdim !< The data and computational domain extents of + !! the second dimension of the output array + !! in a non-decreasing list + + integer :: subfield ! An integer indicating which field to set. + + call coupler_type_set_data(array_in, bc_index, field_index, var, scale_factor, halo_size, idim, jdim) + +end subroutine CT_set_data + +!> Register the diagnostics of a coupler_2d_bc_type +subroutine CT_set_diags(var, diag_name, axes, time) + type(coupler_2d_bc_type), intent(inout) :: var !< BC_type structure for which to register diagnostics + character(len=*), intent(in) :: diag_name !< name for diagnostic file, or blank not to register the fields + integer, dimension(:), intent(in) :: axes !< array of axes identifiers for diagnostic variable registration + type(time_type), intent(in) :: time !< model time variable for registering diagnostic field + + call coupler_type_set_diags(var, diag_name, axes, time) + +end subroutine CT_set_diags + +!> Write out all diagnostics of elements of a coupler_2d_bc_type +subroutine CT_send_data(var, Time) + type(coupler_2d_bc_type), intent(in) :: var !< BC_type structure with the diagnostics to write + type(time_type), intent(in) :: time !< The current model time + + call coupler_type_send_data(var, Time) +end subroutine CT_send_data + +!> Write out checksums for the elements of a coupler_2d_bc_type +subroutine CT_write_chksums(var, outunit, name_lead) + type(coupler_2d_bc_type), intent(in) :: var !< BC_type structure for which to register diagnostics + integer, intent(in) :: outunit !< The index of a open output file + character(len=*), optional, intent(in) :: name_lead !< An optional prefix for the variable names + + call coupler_type_write_chksums(var, outunit, name_lead) + +end subroutine CT_write_chksums + +!> Indicate whether a coupler_1d_bc_type has been initialized. +logical function CT_initialized_1d(var) + type(coupler_1d_bc_type), intent(in) :: var !< BC_type structure to be deconstructed + + CT_initialized_1d = coupler_type_initialized(var) +end function CT_initialized_1d + +!> Indicate whether a coupler_2d_bc_type has been initialized. +logical function CT_initialized_2d(var) + type(coupler_2d_bc_type), intent(in) :: var !< BC_type structure to be deconstructed + + CT_initialized_2d = coupler_type_initialized(var) +end function CT_initialized_2d + +!> Deallocate all data associated with a coupler_1d_bc_type +subroutine CT_destructor_1d(var) + type(coupler_1d_bc_type), intent(inout) :: var !< BC_type structure to be deconstructed + + call coupler_type_destructor(var) + +end subroutine CT_destructor_1d + +!> Deallocate all data associated with a coupler_2d_bc_type +subroutine CT_destructor_2d(var) + type(coupler_2d_bc_type), intent(inout) :: var !< BC_type structure to be deconstructed + + call coupler_type_destructor(var) + +end subroutine CT_destructor_2d + +end module MOM_couplertype_infra diff --git a/config_src/infra/FMS1/MOM_cpu_clock_infra.F90 b/config_src/infra/FMS1/MOM_cpu_clock_infra.F90 new file mode 100644 index 0000000000..47d7bbedaa --- /dev/null +++ b/config_src/infra/FMS1/MOM_cpu_clock_infra.F90 @@ -0,0 +1,93 @@ +!> Wraps the MPP cpu clock functions +!! +!! The functions and constants should be accessed via mom_cpu_clock +module MOM_cpu_clock_infra + +! This file is part of MOM6. See LICENSE.md for the license. + +! These interfaces and constants from MPP/FMS will not be directly exposed outside of this module +use fms_mod, only : clock_flag_default +use mpp_mod, only : mpp_clock_begin +use mpp_mod, only : mpp_clock_end, mpp_clock_id +use mpp_mod, only : MPP_CLOCK_COMPONENT => CLOCK_COMPONENT +use mpp_mod, only : MPP_CLOCK_SUBCOMPONENT => CLOCK_SUBCOMPONENT +use mpp_mod, only : MPP_CLOCK_MODULE_DRIVER => CLOCK_MODULE_DRIVER +use mpp_mod, only : MPP_CLOCK_MODULE => CLOCK_MODULE +use mpp_mod, only : MPP_CLOCK_ROUTINE => CLOCK_ROUTINE +use mpp_mod, only : MPP_CLOCK_LOOP => CLOCK_LOOP +use mpp_mod, only : MPP_CLOCK_INFRA => CLOCK_INFRA + +implicit none ; private + +! Public entities +public :: cpu_clock_id, cpu_clock_begin, cpu_clock_end +public :: CLOCK_COMPONENT, CLOCK_SUBCOMPONENT, CLOCK_MODULE_DRIVER, CLOCK_MODULE +public :: CLOCK_ROUTINE, CLOCK_LOOP, CLOCK_INFRA + +!> A granularity value to passed to cpu_clock_id() to indicate the clock is for a +!! component, e.g. the entire MOM6 model +integer, parameter :: CLOCK_COMPONENT = MPP_CLOCK_COMPONENT + +!> A granularity value to passed to cpu_clock_id() to indicate the clock is for a +!! sub-component, e.g. dynamics or thermodynamics +integer, parameter :: CLOCK_SUBCOMPONENT = MPP_CLOCK_SUBCOMPONENT + +!> A granularity value to passed to cpu_clock_id() to indicate the clock is for a +!! module driver, e.g. a routine that calls multiple other routines +integer, parameter :: CLOCK_MODULE_DRIVER = MPP_CLOCK_MODULE_DRIVER + +!> A granularity value to passed to cpu_clock_id() to indicate the clock is for a +!! module, e.g. the main entry routine for a module +integer, parameter :: CLOCK_MODULE = MPP_CLOCK_MODULE + +!> A granularity value to passed to cpu_clock_id() to indicate the clock is for a +!! subroutine or function +integer, parameter :: CLOCK_ROUTINE = MPP_CLOCK_ROUTINE + +!> A granularity value to passed to cpu_clock_id() to indicate the clock is for a +!! section with in a routine, e.g. around a loop +integer, parameter :: CLOCK_LOOP = MPP_CLOCK_LOOP + +!> A granularity value to passed to cpu_clock_id() to indicate the clock is for an +!! infrastructure operation, e.g. a halo update +integer, parameter :: CLOCK_INFRA = MPP_CLOCK_INFRA + +contains + +!> Turns on clock with handle "id" +subroutine cpu_clock_begin(id) + integer, intent(in) :: id !< Handle for clock + + call mpp_clock_begin(id) + +end subroutine cpu_clock_begin + +!> Turns off clock with handle "id" +subroutine cpu_clock_end(id) + integer, intent(in) :: id !< Handle for clock + + call mpp_clock_end(id) + +end subroutine cpu_clock_end + +!> Returns the integer handle for a named CPU clock. +integer function cpu_clock_id( name, synchro_flag, grain ) + character(len=*), intent(in) :: name !< The unique name of the CPU clock + integer, optional, intent(in) :: synchro_flag !< An integer flag that controls whether the PEs + !! are synchronized before the cpu clocks start counting. + !! Synchronization occurs before the start of a clock if this + !! is odd, while additional (expensive) statistics can set + !! for other values. If absent, the default is taken from the + !! settings for FMS. + integer, optional, intent(in) :: grain !< The timing granularity for this clock, usually set to + !! the values of CLOCK_COMPONENT, CLOCK_ROUTINE, CLOCK_LOOP, etc. + + if (present(synchro_flag)) then + cpu_clock_id = mpp_clock_id(name, flags=synchro_flag, grain=grain) + else + cpu_clock_id = mpp_clock_id(name, flags=clock_flag_default, grain=grain) + endif + +end function cpu_clock_id + +end module MOM_cpu_clock_infra diff --git a/config_src/infra/FMS1/MOM_data_override_infra.F90 b/config_src/infra/FMS1/MOM_data_override_infra.F90 new file mode 100644 index 0000000000..1484f0c128 --- /dev/null +++ b/config_src/infra/FMS1/MOM_data_override_infra.F90 @@ -0,0 +1,105 @@ +!> These interfaces allow for ocean or sea-ice variables to be replaced with data. +module MOM_data_override_infra + +! This file is part of MOM6. See LICENSE.md for the license. + +use MOM_domain_infra, only : MOM_domain_type, domain2d +use MOM_domain_infra, only : get_simple_array_i_ind, get_simple_array_j_ind +use MOM_time_manager, only : time_type +use data_override_mod, only : data_override_init +use data_override_mod, only : data_override +use data_override_mod, only : data_override_unset_domains + +implicit none ; private + +public :: impose_data_init, impose_data, impose_data_unset_domains + +!> Potentially override the values of a field in the model with values from a dataset. +interface impose_data + module procedure data_override_MD, data_override_2d +end interface + +contains + +!> Initialize the data override capability and set the domains for the ocean and ice components. +!> There should be a call to impose_data_init before impose_data is called. +subroutine impose_data_init(MOM_domain_in, Ocean_domain_in, Ice_domain_in) + type (MOM_domain_type), intent(in), optional :: MOM_domain_in + type (domain2d), intent(in), optional :: Ocean_domain_in + type (domain2d), intent(in), optional :: Ice_domain_in + + if (present(MOM_domain_in)) then + call data_override_init(Ocean_domain_in=MOM_domain_in%mpp_domain, Ice_domain_in=Ice_domain_in) + else + call data_override_init(Ocean_domain_in=Ocean_domain_in, Ice_domain_in=Ice_domain_in) + endif +end subroutine impose_data_init + + +!> Potentially override a 2-d field on a MOM6 domain with values from a dataset. +subroutine data_override_MD(domain, fieldname, data_2D, time, scale, override, is_ice) + type(MOM_domain_type), intent(in) :: domain !< MOM domain from which to extract information + character(len=*), intent(in) :: fieldname !< Name of the field to override + real, dimension(:,:), intent(inout) :: data_2D !< Data that may be modified by this call. + type(time_type), intent(in) :: time !< The model time, and the time for the data + real, optional, intent(in) :: scale !< A scaling factor that an overridden field is + !! multiplied by before it is returned. However, + !! if there is no override, there is no rescaling. + logical, optional, intent(out) :: override !< True if the field has been overridden successfully + logical, optional, intent(in) :: is_ice !< If present and true, use the ice domain. + + logical :: overridden, is_ocean + integer :: i, j, is, ie, js, je + + overridden = .false. + is_ocean = .true. ; if (present(is_ice)) is_ocean = .not.is_ice + if (is_ocean) then + call data_override('OCN', fieldname, data_2D, time, override=overridden) + else + call data_override('ICE', fieldname, data_2D, time, override=overridden) + endif + + if (overridden .and. present(scale)) then ; if (scale /= 1.0) then + ! Rescale data in the computational domain if the data override has occurred. + call get_simple_array_i_ind(domain, size(data_2D,1), is, ie) + call get_simple_array_j_ind(domain, size(data_2D,2), js, je) + do j=js,je ; do i=is,ie + data_2D(i,j) = scale*data_2D(i,j) + enddo ; enddo + endif ; endif + + if (present(override)) override = overridden + +end subroutine data_override_MD + + +!> Potentially override a 2-d field with values from a dataset. +subroutine data_override_2d(gridname, fieldname, data_2D, time, override) + character(len=3), intent(in) :: gridname !< String identifying the model component, in MOM6 + !! and SIS this may be either 'OCN' or 'ICE' + character(len=*), intent(in) :: fieldname !< Name of the field to override + real, dimension(:,:), intent(inout) :: data_2D !< Data that may be modified by this call + type(time_type), intent(in) :: time !< The model time, and the time for the data + logical, optional, intent(out) :: override !< True if the field has been overridden successfully + + call data_override(gridname, fieldname, data_2D, time, override) + +end subroutine data_override_2d + +!> Unset domains that had previously been set for use by data_override. +subroutine impose_data_unset_domains(unset_Ocean, unset_Ice, must_be_set) + logical, intent(in), optional :: unset_Ocean !< If present and true, unset the ocean domain for overrides + logical, intent(in), optional :: unset_Ice !< If present and true, unset the sea-ice domain for overrides + logical, intent(in), optional :: must_be_set !< If present and true, it is a fatal error to unset + !! a domain that is not set. + + call data_override_unset_domains(unset_Ocean=unset_Ocean, unset_Ice=unset_Ice, & + must_be_set=must_be_set) +end subroutine impose_data_unset_domains + +end module MOM_data_override_infra + +!> \namespace MOM_data_override_infra +!! +!! The routines here wrap routines from the FMS module data_override_mod, which potentially replace +!! model values with values read from a data file. diff --git a/config_src/infra/FMS1/MOM_diag_manager_infra.F90 b/config_src/infra/FMS1/MOM_diag_manager_infra.F90 new file mode 100644 index 0000000000..18c80cf24c --- /dev/null +++ b/config_src/infra/FMS1/MOM_diag_manager_infra.F90 @@ -0,0 +1,423 @@ +!> A wrapper for the FMS diag_manager routines. This module should be the +!! only MOM6 module which imports the FMS shared infrastructure for +!! diagnostics. Pass through interfaces are being documented +!! here and renamed in order to clearly identify these APIs as being +!! consistent with the FMS infrastructure (Any future updates to +!! those APIs would be applied here). +module MOM_diag_manager_infra + +! This file is part of MOM6. See LICENSE.md for the license. + +use diag_axis_mod, only : fms_axis_init=>diag_axis_init +use diag_axis_mod, only : fms_get_diag_axis_name => get_diag_axis_name +use diag_axis_mod, only : EAST, NORTH +use diag_data_mod, only : null_axis_id +use diag_manager_mod, only : fms_diag_manager_init => diag_manager_init +use diag_manager_mod, only : fms_diag_manager_end => diag_manager_end +use diag_manager_mod, only : send_data_fms => send_data +use diag_manager_mod, only : fms_diag_field_add_attribute => diag_field_add_attribute +use diag_manager_mod, only : DIAG_FIELD_NOT_FOUND +use diag_manager_mod, only : register_diag_field_fms => register_diag_field +use diag_manager_mod, only : register_static_field_fms => register_static_field +use diag_manager_mod, only : get_diag_field_id_fms => get_diag_field_id +use MOM_time_manager, only : time_type +use MOM_domain_infra, only : MOM_domain_type +use MOM_error_infra, only : MOM_error => MOM_err, FATAL, WARNING + +implicit none ; private + +!> transmit data for diagnostic output +interface register_diag_field_infra + module procedure register_diag_field_infra_scalar + module procedure register_diag_field_infra_array +end interface register_diag_field_infra + +!> transmit data for diagnostic output +interface send_data_infra + module procedure send_data_infra_0d, send_data_infra_1d + module procedure send_data_infra_2d, send_data_infra_3d +#ifdef OVERLOAD_R8 + module procedure send_data_infra_2d_r8, send_data_infra_3d_r8 +#endif +end interface send_data_infra + +!> Add an attribute to a diagnostic field +interface MOM_diag_field_add_attribute + module procedure MOM_diag_field_add_attribute_scalar_r + module procedure MOM_diag_field_add_attribute_scalar_i + module procedure MOM_diag_field_add_attribute_scalar_c + module procedure MOM_diag_field_add_attribute_r1d + module procedure MOM_diag_field_add_attribute_i1d +end interface MOM_diag_field_add_attribute + + +! Public interfaces +public MOM_diag_axis_init +public get_MOM_diag_axis_name +public MOM_diag_manager_init +public MOM_diag_manager_end +public send_data_infra +public MOM_diag_field_add_attribute +public register_diag_field_infra +public register_static_field_infra +public get_MOM_diag_field_id +! Public data +public null_axis_id +public DIAG_FIELD_NOT_FOUND +public EAST, NORTH + + +contains + +!> Initialize a diagnostic axis +integer function MOM_diag_axis_init(name, data, units, cart_name, long_name, MOM_domain, position, & + & direction, edges, set_name, coarsen, null_axis) + character(len=*), intent(in) :: name !< The name of this axis + real, dimension(:), intent(in) :: data !< The array of coordinate values + character(len=*), intent(in) :: units !< The units for the axis data + character(len=*), intent(in) :: cart_name !< Cartesian axis ("X", "Y", "Z", "T", or "N" for none) + character(len=*), & + optional, intent(in) :: long_name !< The long name of this axis + type(MOM_domain_type), & + optional, intent(in) :: MOM_Domain !< A MOM_Domain that describes the decomposition + integer, optional, intent(in) :: position !< This indicates the relative position of this + !! axis. The default is CENTER, but EAST and NORTH + !! are common options. + integer, optional, intent(in) :: direction !< This indicates the direction along which this + !! axis increases: 1 for upward, -1 for downward, or + !! 0 for non-vertical axes (the default) + integer, optional, intent(in) :: edges !< The axis_id of the complementary axis that + !! describes the edges of this axis + character(len=*), & + optional, intent(in) :: set_name !< A name to use for this set of axes. + integer, optional, intent(in) :: coarsen !< An optional degree of coarsening for the grid, 1 + !! by default. + logical, optional, intent(in) :: null_axis !< If present and true, return the special null axis + !! id for use with scalars. + + integer :: coarsening ! The degree of grid coarsening + + if (present(null_axis)) then ; if (null_axis) then + ! Return the special null axis id for scalars + MOM_diag_axis_init = null_axis_id + return + endif ; endif + + if (present(MOM_domain)) then + coarsening = 1 ; if (present(coarsen)) coarsening = coarsen + if (coarsening == 1) then + MOM_diag_axis_init = fms_axis_init(name, data, units, cart_name, long_name=long_name, & + direction=direction, set_name=set_name, edges=edges, & + domain2=MOM_domain%mpp_domain, domain_position=position) + elseif (coarsening == 2) then + MOM_diag_axis_init = fms_axis_init(name, data, units, cart_name, long_name=long_name, & + direction=direction, set_name=set_name, edges=edges, & + domain2=MOM_domain%mpp_domain_d2, domain_position=position) + else + call MOM_error(FATAL, "diag_axis_init called with an invalid value of coarsen.") + endif + else + if (present(coarsen)) then ; if (coarsen /= 1) then + call MOM_error(FATAL, "diag_axis_init does not support grid coarsening without a MOM_domain.") + endif ; endif + MOM_diag_axis_init = fms_axis_init(name, data, units, cart_name, long_name=long_name, & + direction=direction, set_name=set_name, edges=edges) + endif + +end function MOM_diag_axis_init + +!> Returns the short name of the axis +subroutine get_MOM_diag_axis_name(id, name) + integer, intent(in) :: id !< The axis numeric id + character(len=*), intent(out) :: name !< The short name of the axis + + call fms_get_diag_axis_name(id, name) + +end subroutine get_MOM_diag_axis_name + +!> Return a unique numeric ID field a module/field name combination. +integer function get_MOM_diag_field_id(module_name, field_name) + character(len=*), intent(in) :: module_name !< A module name string to query. + character(len=*), intent(in) :: field_name !< A field name string to query. + + + get_MOM_diag_field_id = -1 + get_MOM_diag_field_id = get_diag_field_id_fms(module_name, field_name) + +end function get_MOM_diag_field_id + +!> Initializes the diagnostic manager +subroutine MOM_diag_manager_init(diag_model_subset, time_init, err_msg) + integer, optional, intent(in) :: diag_model_subset !< An optional diagnostic subset + integer, dimension(6), optional, intent(in) :: time_init !< An optional reference time for diagnostics + !! The default uses the value contained in the + !! diag_table. Format is Y-M-D-H-M-S + character(len=*), optional, intent(out) :: err_msg !< Error message. + call FMS_diag_manager_init(diag_model_subset, time_init, err_msg) + +end subroutine MOM_diag_manager_init + +!> Close the diagnostic manager +subroutine MOM_diag_manager_end(time) + type(time_type), intent(in) :: time !< Model time at call to close. + + call FMS_diag_manager_end(time) + +end subroutine MOM_diag_manager_end + +!> Register a MOM diagnostic field for scalars +integer function register_diag_field_infra_scalar(module_name, field_name, init_time, & + long_name, units, missing_value, range, standard_name, do_not_log, & + err_msg, area, volume) + character(len=*), intent(in) :: module_name !< The name of the associated module + character(len=*), intent(in) :: field_name !< The name of the field + type(time_type), optional, intent(in) :: init_time !< The registration time + character(len=*), optional, intent(in) :: long_name !< A long name for the field + character(len=*), optional, intent(in) :: units !< Field units + character(len=*), optional, intent(in) :: standard_name !< A standard name for the field + real, optional, intent(in) :: missing_value !< Missing value attribute + real, dimension(2), optional, intent(in) :: range !< A valid range of the field + logical, optional, intent(in) :: do_not_log !< if TRUE, field information is not logged + character(len=*), optional, intent(out):: err_msg !< An error message to return + integer, optional, intent(in) :: area !< Diagnostic ID of the field containing the area attribute + integer, optional, intent(in) :: volume !< Diagnostic ID of the field containing the volume attribute + + register_diag_field_infra_scalar = register_diag_field_fms(module_name, field_name, init_time, & + long_name, units, missing_value, range, standard_name, do_not_log, err_msg, area, volume) + +end function register_diag_field_infra_scalar + +!> Register a MOM diagnostic field for scalars +integer function register_diag_field_infra_array(module_name, field_name, axes, init_time, & + long_name, units, missing_value, range, mask_variant, standard_name, verbose, & + do_not_log, err_msg, interp_method, tile_count, area, volume) + character(len=*), intent(in) :: module_name !< The name of the associated module + character(len=*), intent(in) :: field_name !< The name of the field + integer, dimension(:), intent(in) :: axes !< Diagnostic IDs of axis attributes for the field + type(time_type), optional, intent(in) :: init_time !< The registration time + character(len=*), optional, intent(in) :: long_name !< A long name for the field + character(len=*), optional, intent(in) :: units !< Units of the field + real, optional, intent(in) :: missing_value !< Missing value attribute + real, dimension(2), optional, intent(in) :: range !< A valid range of the field + logical, optional, intent(in) :: mask_variant !< If true, the field mask is varying in time + character(len=*), optional, intent(in) :: standard_name !< A standard name for the field + logical, optional, intent(in) :: verbose !< If true, provide additional log information + logical, optional, intent(in) :: do_not_log !< if TRUE, field information is not logged + character(len=*), optional, intent(in) :: interp_method !< If 'none' indicates the field should + !! not be interpolated as a scalar + integer, optional, intent(in) :: tile_count !< The tile number for the current PE + character(len=*), optional, intent(out):: err_msg !< An error message to return + integer, optional, intent(in) :: area !< Diagnostic ID of the field containing the area attribute + integer, optional, intent(in) :: volume !< Diagnostic ID of the field containing the volume attribute + + register_diag_field_infra_array = register_diag_field_fms(module_name, field_name, axes, init_time, & + long_name, units, missing_value, range, mask_variant, standard_name, verbose, do_not_log, & + err_msg, interp_method, tile_count, area, volume) + +end function register_diag_field_infra_array + + +integer function register_static_field_infra(module_name, field_name, axes, long_name, units, & + missing_value, range, mask_variant, standard_name, do_not_log, interp_method, & + tile_count, area, volume) + character(len=*), intent(in) :: module_name !< The name of the associated module + character(len=*), intent(in) :: field_name !< The name of the field + integer, dimension(:), intent(in) :: axes !< Diagnostic IDs of axis attributes for the field + character(len=*), optional, intent(in) :: long_name !< A long name for the field + character(len=*), optional, intent(in) :: units !< Units of the field + real, optional, intent(in) :: missing_value !< Missing value attribute + real, dimension(2), optional, intent(in) :: range !< A valid range of the field + logical, optional, intent(in) :: mask_variant !< If true, the field mask is varying in time + character(len=*), optional, intent(in) :: standard_name !< A standard name for the field + logical, optional, intent(in) :: do_not_log !< if TRUE, field information is not logged + character(len=*), optional, intent(in) :: interp_method !< If 'none' indicates the field should + !! not be interpolated as a scalar + integer, optional, intent(in) :: tile_count !< The tile number for the current PE + integer, optional, intent(in) :: area !< Diagnostic ID of the field containing the area attribute + integer, optional, intent(in) :: volume !< Diagnostic ID of the field containing the volume attribute + + register_static_field_infra = register_static_field_fms(module_name, field_name, axes, long_name, units,& + & missing_value, range, mask_variant, standard_name, dynamic=.false.,do_not_log=do_not_log, & + interp_method=interp_method,tile_count=tile_count, area=area, volume=volume) +end function register_static_field_infra + +!> Returns true if the argument data are successfully passed to a diagnostic manager +!! with the indicated unique reference id, false otherwise. +logical function send_data_infra_0d(diag_field_id, field, time, err_msg) + integer, intent(in) :: diag_field_id !< The diagnostic manager identifier for this field + real, intent(in) :: field !< The value being recorded + TYPE(time_type), optional, intent(in) :: time !< The time for the current record + CHARACTER(len=*), optional, intent(out) :: err_msg !< An optional error message + + send_data_infra_0d = send_data_fms(diag_field_id, field, time, err_msg) +end function send_data_infra_0d + +!> Returns true if the argument data are successfully passed to a diagnostic manager +!! with the indicated unique reference id, false otherwise. +logical function send_data_infra_1d(diag_field_id, field, is_in, ie_in, time, mask, rmask, weight, err_msg) + integer, intent(in) :: diag_field_id !< The diagnostic manager identifier for this field + real, dimension(:), intent(in) :: field !< A 1-d array of values being recorded + integer, optional, intent(in) :: is_in !< The starting index for the data being recorded + integer, optional, intent(in) :: ie_in !< The end index for the data being recorded + type(time_type), optional, intent(in) :: time !< The time for the current record + logical, dimension(:), optional, intent(in) :: mask !< An optional rank 1 logical mask + real, dimension(:), optional, intent(in) :: rmask !< An optional rank 1 mask array + real, optional, intent(in) :: weight !< A scalar weight factor to apply to the current + !! record if there is averaging in time + character(len=*), optional, intent(out) :: err_msg !< A log indicating the status of the post upon + !! returning to the calling routine + + send_data_infra_1d = send_data_fms(diag_field_id, field, time, is_in, mask, rmask, ie_in, weight, err_msg) + +end function send_data_infra_1d + +!> Returns true if the argument data are successfully passed to a diagnostic manager +!! with the indicated unique reference id, false otherwise. +logical function send_data_infra_2d(diag_field_id, field, is_in, ie_in, js_in, je_in, & + time, mask, rmask, weight, err_msg) + integer, intent(in) :: diag_field_id !< The diagnostic manager identifier for this field + real, dimension(:,:), intent(in) :: field !< A 2-d array of values being recorded + integer, optional, intent(in) :: is_in !< The starting i-index for the data being recorded + integer, optional, intent(in) :: ie_in !< The end i-index for the data being recorded + integer, optional, intent(in) :: js_in !< The starting j-index for the data being recorded + integer, optional, intent(in) :: je_in !< The end j-index for the data being recorded + type(time_type), optional, intent(in) :: time !< The time for the current record + logical, dimension(:,:), optional, intent(in) :: mask !< An optional 2-d logical mask + real, dimension(:,:), optional, intent(in) :: rmask !< An optional 2-d mask array + real, optional, intent(in) :: weight !< A scalar weight factor to apply to the current + !! record if there is averaging in time + character(len=*), optional, intent(out) :: err_msg !< A log indicating the status of the post upon + !! returning to the calling routine + + send_data_infra_2d = send_data_fms(diag_field_id, field, time, is_in, js_in, mask, & + rmask, ie_in, je_in, weight, err_msg) + +end function send_data_infra_2d + +!> Returns true if the argument data are successfully passed to a diagnostic manager +!! with the indicated unique reference id, false otherwise. +logical function send_data_infra_3d(diag_field_id, field, is_in, ie_in, js_in, je_in, ks_in, ke_in, & + time, mask, rmask, weight, err_msg) + integer, intent(in) :: diag_field_id !< The diagnostic manager identifier for this field + real, dimension(:,:,:), intent(in) :: field !< A rank 1 array of floating point values being recorded + integer, optional, intent(in) :: is_in !< The starting i-index for the data being recorded + integer, optional, intent(in) :: ie_in !< The end i-index for the data being recorded + integer, optional, intent(in) :: js_in !< The starting j-index for the data being recorded + integer, optional, intent(in) :: je_in !< The end j-index for the data being recorded + integer, optional, intent(in) :: ks_in !< The starting k-index for the data being recorded + integer, optional, intent(in) :: ke_in !< The end k-index for the data being recorded + type(time_type), optional, intent(in) :: time !< The time for the current record + logical, dimension(:,:,:), optional, intent(in) :: mask !< An optional 3-d logical mask + real, dimension(:,:,:), optional, intent(in) :: rmask !< An optional 3-d mask array + real, optional, intent(in) :: weight !< A scalar weight factor to apply to the current + !! record if there is averaging in time + character(len=*), optional, intent(out) :: err_msg !< A log indicating the status of the post upon + !! returning to the calling routine + + send_data_infra_3d = send_data_fms(diag_field_id, field, time, is_in, js_in, ks_in, mask, & + rmask, ie_in, je_in, ke_in, weight, err_msg) + +end function send_data_infra_3d + + +#ifdef OVERLOAD_R8 +!> Returns true if the argument data are successfully passed to a diagnostic manager +!! with the indicated unique reference id, false otherwise. +logical function send_data_infra_2d_r8(diag_field_id, field, is_in, ie_in, js_in, je_in, & + time, mask, rmask, weight, err_msg) + integer, intent(in) :: diag_field_id !< The diagnostic manager identifier for this field + real(kind=8), dimension(:,:), intent(in) :: field !< A 2-d array of values being recorded + integer, optional, intent(in) :: is_in !< The starting i-index for the data being recorded + integer, optional, intent(in) :: ie_in !< The end i-index for the data being recorded + integer, optional, intent(in) :: js_in !< The starting j-index for the data being recorded + integer, optional, intent(in) :: je_in !< The end j-index for the data being recorded + type(time_type), optional, intent(in) :: time !< The time for the current record + logical, dimension(:,:), optional, intent(in) :: mask !< An optional 2-d logical mask + real, dimension(:,:), optional, intent(in) :: rmask !< An optional 2-d mask array + real, optional, intent(in) :: weight !< A scalar weight factor to apply to the current + !! record if there is averaging in time + character(len=*), optional, intent(out) :: err_msg !< A log indicating the status of the post upon + !! returning to the calling routine + + send_data_infra_2d_r8 = send_data_fms(diag_field_id, field, time, is_in, js_in, mask, & + rmask, ie_in, je_in, weight, err_msg) + +end function send_data_infra_2d_r8 + +!> Returns true if the argument data are successfully passed to a diagnostic manager +!! with the indicated unique reference id, false otherwise. +logical function send_data_infra_3d_r8(diag_field_id, field, is_in, ie_in, js_in, je_in, ks_in, ke_in, & + time, mask, rmask, weight, err_msg) + integer, intent(in) :: diag_field_id !< The diagnostic manager identifier for this field + real(kind=8), dimension(:,:,:), intent(in) :: field !< A rank 1 array of floating point values being recorded + integer, optional, intent(in) :: is_in !< The starting i-index for the data being recorded + integer, optional, intent(in) :: ie_in !< The end i-index for the data being recorded + integer, optional, intent(in) :: js_in !< The starting j-index for the data being recorded + integer, optional, intent(in) :: je_in !< The end j-index for the data being recorded + integer, optional, intent(in) :: ks_in !< The starting k-index for the data being recorded + integer, optional, intent(in) :: ke_in !< The end k-index for the data being recorded + type(time_type), optional, intent(in) :: time !< The time for the current record + logical, dimension(:,:,:), optional, intent(in) :: mask !< An optional 3-d logical mask + real, dimension(:,:,:), optional, intent(in) :: rmask !< An optional 3-d mask array + real, optional, intent(in) :: weight !< A scalar weight factor to apply to the current + !! record if there is averaging in time + character(len=*), optional, intent(out) :: err_msg !< A log indicating the status of the post upon + !! returning to the calling routine + + send_data_infra_3d_r8 = send_data_fms(diag_field_id, field, time, is_in, js_in, ks_in, mask, rmask, & + ie_in, je_in, ke_in, weight, err_msg) + +end function send_data_infra_3d_r8 +#endif + +!> Add a real scalar attribute to a diagnostic field +subroutine MOM_diag_field_add_attribute_scalar_r(diag_field_id, att_name, att_value) + integer, intent(in) :: diag_field_id !< The diagnostic manager identifier for this field + character(len=*), intent(in) :: att_name !< The name of the attribute + real, intent(in) :: att_value !< A real scalar value + + call FMS_diag_field_add_attribute(diag_field_id, att_name, att_value) + +end subroutine MOM_diag_field_add_attribute_scalar_r + +!> Add an integer attribute to a diagnostic field +subroutine MOM_diag_field_add_attribute_scalar_i(diag_field_id, att_name, att_value) + integer, intent(in) :: diag_field_id !< The diagnostic manager identifier for this field + character(len=*), intent(in) :: att_name !< The name of the attribute + integer, intent(in) :: att_value !< An integer scalar value + + call FMS_diag_field_add_attribute(diag_field_id, att_name, att_value) + +end subroutine MOM_diag_field_add_attribute_scalar_i + +!> Add a character string attribute to a diagnostic field +subroutine MOM_diag_field_add_attribute_scalar_c(diag_field_id, att_name, att_value) + integer, intent(in) :: diag_field_id !< The diagnostic manager identifier for this field + character(len=*), intent(in) :: att_name !< The name of the attribute + character(len=*), intent(in) :: att_value !< A character string value + + call FMS_diag_field_add_attribute(diag_field_id, att_name, att_value) + +end subroutine MOM_diag_field_add_attribute_scalar_c + +!> Add a real list of attributes attribute to a diagnostic field +subroutine MOM_diag_field_add_attribute_r1d(diag_field_id, att_name, att_value) + integer, intent(in) :: diag_field_id !< The diagnostic manager identifier for this field + character(len=*), intent(in) :: att_name !< The name of the attribute + real, dimension(:), intent(in) :: att_value !< An array of real values + + call FMS_diag_field_add_attribute(diag_field_id, att_name, att_value) + +end subroutine MOM_diag_field_add_attribute_r1d + +!> Add a integer list of attributes attribute to a diagnostic field +subroutine MOM_diag_field_add_attribute_i1d(diag_field_id, att_name, att_value) + integer, intent(in) :: diag_field_id !< The diagnostic manager identifier for this field + character(len=*), intent(in) :: att_name !< The name of the attribute + integer, dimension(:), intent(in) :: att_value !< An array of integer values + + call FMS_diag_field_add_attribute(diag_field_id, att_name, att_value) + +end subroutine MOM_diag_field_add_attribute_i1d + +end module MOM_diag_manager_infra diff --git a/config_src/infra/FMS1/MOM_domain_infra.F90 b/config_src/infra/FMS1/MOM_domain_infra.F90 new file mode 100644 index 0000000000..fc39777a2f --- /dev/null +++ b/config_src/infra/FMS1/MOM_domain_infra.F90 @@ -0,0 +1,1943 @@ +!> Describes the decomposed MOM domain and has routines for communications across PEs +module MOM_domain_infra + +! This file is part of MOM6. See LICENSE.md for the license. + +use MOM_coms_infra, only : PE_here, root_PE, num_PEs +use MOM_cpu_clock_infra, only : cpu_clock_begin, cpu_clock_end +use MOM_error_infra, only : MOM_error=>MOM_err, NOTE, WARNING, FATAL + +use mpp_domains_mod, only : domain2D, domain1D +use mpp_domains_mod, only : mpp_define_io_domain, mpp_define_domains, mpp_deallocate_domain +use mpp_domains_mod, only : mpp_get_domain_components, mpp_get_domain_extents +use mpp_domains_mod, only : mpp_get_compute_domain, mpp_get_data_domain, mpp_get_global_domain +use mpp_domains_mod, only : mpp_get_boundary, mpp_update_domains +use mpp_domains_mod, only : mpp_start_update_domains, mpp_complete_update_domains +use mpp_domains_mod, only : mpp_create_group_update, mpp_do_group_update +use mpp_domains_mod, only : mpp_reset_group_update_field, mpp_group_update_initialized +use mpp_domains_mod, only : mpp_start_group_update, mpp_complete_group_update +use mpp_domains_mod, only : mpp_compute_block_extent +use mpp_domains_mod, only : mpp_broadcast_domain, mpp_redistribute, mpp_global_field +use mpp_domains_mod, only : AGRID, BGRID_NE, CGRID_NE, SCALAR_PAIR, BITWISE_EXACT_SUM +use mpp_domains_mod, only : CYCLIC_GLOBAL_DOMAIN, FOLD_NORTH_EDGE +use mpp_domains_mod, only : To_East => WUPDATE, To_West => EUPDATE, Omit_Corners => EDGEUPDATE +use mpp_domains_mod, only : To_North => SUPDATE, To_South => NUPDATE +use mpp_domains_mod, only : CENTER, CORNER, NORTH_FACE => NORTH, EAST_FACE => EAST +use fms_io_mod, only : file_exist, parse_mask_table +use fms_affinity_mod, only : fms_affinity_init, fms_affinity_set, fms_affinity_get + +! This subroutine is not in MOM6/src but may be required by legacy drivers +use mpp_domains_mod, only : global_field_sum => mpp_global_sum + +! The `group_pass_type` fields are never accessed, so we keep it as an FMS type +use mpp_domains_mod, only : group_pass_type => mpp_group_update_type + +implicit none ; private + +! These types are inherited from mpp, but are treated as opaque here. +public :: domain2D, domain1D, group_pass_type +! These interfaces are actually implemented or have explicit interfaces in this file. +public :: create_MOM_domain, clone_MOM_domain, get_domain_components, get_domain_extent +public :: deallocate_MOM_domain, get_global_shape, compute_block_extent +public :: pass_var, pass_vector, fill_symmetric_edges, rescale_comp_data +public :: pass_var_start, pass_var_complete, pass_vector_start, pass_vector_complete +public :: create_group_pass, do_group_pass, start_group_pass, complete_group_pass +public :: redistribute_array, broadcast_domain, global_field +public :: get_simple_array_i_ind, get_simple_array_j_ind +public :: MOM_thread_affinity_set, set_MOM_thread_affinity +! These are encoding constant parmeters. +public :: To_East, To_West, To_North, To_South, To_All, Omit_Corners +public :: AGRID, BGRID_NE, CGRID_NE, SCALAR_PAIR +public :: CORNER, CENTER, NORTH_FACE, EAST_FACE +! These are no longer used by MOM6 because the reproducing sum works so well, but they are +! still referenced by some of the non-GFDL couplers. +public :: global_field_sum, BITWISE_EXACT_SUM + +!> Do a halo update on an array +interface pass_var + module procedure pass_var_3d, pass_var_2d +end interface pass_var + +!> Do a halo update on a pair of arrays representing the two components of a vector +interface pass_vector + module procedure pass_vector_3d, pass_vector_2d +end interface pass_vector + +!> Initiate a non-blocking halo update on an array +interface pass_var_start + module procedure pass_var_start_3d, pass_var_start_2d +end interface pass_var_start + +!> Complete a non-blocking halo update on an array +interface pass_var_complete + module procedure pass_var_complete_3d, pass_var_complete_2d +end interface pass_var_complete + +!> Initiate a halo update on a pair of arrays representing the two components of a vector +interface pass_vector_start + module procedure pass_vector_start_3d, pass_vector_start_2d +end interface pass_vector_start + +!> Complete a halo update on a pair of arrays representing the two components of a vector +interface pass_vector_complete + module procedure pass_vector_complete_3d, pass_vector_complete_2d +end interface pass_vector_complete + +!> Set up a group of halo updates +interface create_group_pass + module procedure create_var_group_pass_2d + module procedure create_var_group_pass_3d + module procedure create_vector_group_pass_2d + module procedure create_vector_group_pass_3d +end interface create_group_pass + +!> Do a set of halo updates that fill in the values at the duplicated edges +!! of a staggered symmetric memory domain +interface fill_symmetric_edges + module procedure fill_vector_symmetric_edges_2d !, fill_vector_symmetric_edges_3d +! module procedure fill_scalar_symmetric_edges_2d, fill_scalar_symmetric_edges_3d +end interface fill_symmetric_edges + +!> Rescale the values of an array in its computational domain by a constant factor +interface rescale_comp_data + module procedure rescale_comp_data_4d, rescale_comp_data_3d, rescale_comp_data_2d +end interface rescale_comp_data + +!> Pass an array from one MOM domain to another +interface redistribute_array + module procedure redistribute_array_3d, redistribute_array_2d +end interface redistribute_array + +!> Copy one MOM_domain_type into another +interface clone_MOM_domain + module procedure clone_MD_to_MD, clone_MD_to_d2D +end interface clone_MOM_domain + +!> Extract the 1-d domain components from a MOM_domain or domain2d +interface get_domain_components + module procedure get_domain_components_MD, get_domain_components_d2D +end interface get_domain_components + +!> Returns the index ranges that have been stored in a MOM_domain_type +interface get_domain_extent + module procedure get_domain_extent_MD, get_domain_extent_d2D +end interface get_domain_extent + + +!> The MOM_domain_type contains information about the domain decomposition. +type, public :: MOM_domain_type + character(len=64) :: name !< The name of this domain + type(domain2D), pointer :: mpp_domain => NULL() !< The FMS domain with halos + !! on this processor, centered at h points. + type(domain2D), pointer :: mpp_domain_d2 => NULL() !< A coarse FMS domain with halos + !! on this processor, centered at h points. + integer :: niglobal !< The total horizontal i-domain size. + integer :: njglobal !< The total horizontal j-domain size. + integer :: nihalo !< The i-halo size in memory. + integer :: njhalo !< The j-halo size in memory. + logical :: symmetric !< True if symmetric memory is used with this domain. + logical :: nonblocking_updates !< If true, non-blocking halo updates are + !! allowed. The default is .false. (for now). + logical :: thin_halo_updates !< If true, optional arguments may be used to + !! specify the width of the halos that are + !! updated with each call. + integer :: layout(2) !< This domain's processor layout. This is + !! saved to enable the construction of related + !! new domains with different resolutions or + !! other properties. + integer :: io_layout(2) !< The IO-layout used with this domain. + integer :: X_FLAGS !< Flag that specifies the properties of the + !! domain in the i-direction in a define_domain call. + integer :: Y_FLAGS !< Flag that specifies the properties of the + !! domain in the j-direction in a define_domain call. + logical, pointer :: maskmap(:,:) => NULL() !< A pointer to an array indicating + !! which logical processors are actually used for + !! the ocean code. The other logical processors + !! would be contain only land points and are not + !! assigned to actual processors. This need not be + !! assigned if all logical processors are used. +end type MOM_domain_type + +integer, parameter :: To_All = To_East + To_West + To_North + To_South !< A flag for passing in all directions + +contains + +!> pass_var_3d does a halo update for a three-dimensional array. +subroutine pass_var_3d(array, MOM_dom, sideflag, complete, position, halo, & + clock) + real, dimension(:,:,:), intent(inout) :: array !< The array which is having its halos points + !! exchanged. + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent. + integer, optional, intent(in) :: sideflag !< An optional integer indicating which + !! directions the data should be sent. It is TO_ALL or the sum of any of TO_EAST, TO_WEST, + !! TO_NORTH, and TO_SOUTH. For example, TO_EAST sends the data to the processor to the east, + !! sothe halos on the western side are filled. TO_ALL is the default if sideflag is omitted. + logical, optional, intent(in) :: complete !< An optional argument indicating whether the + !! halo updates should be completed before + !! progress resumes. Omitting complete is the + !! same as setting complete to .true. + integer, optional, intent(in) :: position !< An optional argument indicating the position. + !! This is CENTER by default and is often CORNER, + !! but could also be EAST_FACE or NORTH_FACE. + integer, optional, intent(in) :: halo !< The size of the halo to update - the full + !! halo by default. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + + integer :: dirflag + logical :: block_til_complete + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + dirflag = To_All ! 60 + if (present(sideflag)) then ; if (sideflag > 0) dirflag = sideflag ; endif + block_til_complete = .true. + if (present(complete)) block_til_complete = complete + + if (present(halo) .and. MOM_dom%thin_halo_updates) then + call mpp_update_domains(array, MOM_dom%mpp_domain, flags=dirflag, & + complete=block_til_complete, position=position, & + whalo=halo, ehalo=halo, shalo=halo, nhalo=halo) + else + call mpp_update_domains(array, MOM_dom%mpp_domain, flags=dirflag, & + complete=block_til_complete, position=position) + endif + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end subroutine pass_var_3d + +!> pass_var_2d does a halo update for a two-dimensional array. +subroutine pass_var_2d(array, MOM_dom, sideflag, complete, position, halo, inner_halo, clock) + real, dimension(:,:), intent(inout) :: array !< The array which is having its halos points + !! exchanged. + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be sent. + integer, optional, intent(in) :: sideflag !< An optional integer indicating which + !! directions the data should be sent. It is TO_ALL or the sum of any of TO_EAST, TO_WEST, + !! TO_NORTH, and TO_SOUTH. For example, TO_EAST sends the data to the processor to the east, + !! so the halos on the western side are filled. TO_ALL is the default if sideflag is omitted. + logical, optional, intent(in) :: complete !< An optional argument indicating whether the + !! halo updates should be completed before + !! progress resumes. Omitting complete is the + !! same as setting complete to .true. + integer, optional, intent(in) :: position !< An optional argument indicating the position. + !! This is CENTER by default and is often CORNER, + !! but could also be EAST_FACE or NORTH_FACE. + integer, optional, intent(in) :: halo !< The size of the halo to update - the full halo + !! by default. + integer, optional, intent(in) :: inner_halo !< The size of an inner halo to avoid updating, + !! or 0 to avoid updating symmetric memory + !! computational domain points. Setting this >=0 + !! also enforces that complete=.true. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + + ! Local variables + real, allocatable, dimension(:,:) :: tmp + integer :: pos, i_halo, j_halo + integer :: isc, iec, jsc, jec, isd, ied, jsd, jed, IscB, IecB, JscB, JecB + integer :: inner, i, j, isfw, iefw, isfe, iefe, jsfs, jefs, jsfn, jefn + integer :: dirflag + logical :: block_til_complete + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + dirflag = To_All ! 60 + if (present(sideflag)) then ; if (sideflag > 0) dirflag = sideflag ; endif + block_til_complete = .true. ; if (present(complete)) block_til_complete = complete + pos = CENTER ; if (present(position)) pos = position + + if (present(inner_halo)) then ; if (inner_halo >= 0) then + ! Store the original values. + allocate(tmp(size(array,1), size(array,2))) + tmp(:,:) = array(:,:) + block_til_complete = .true. + endif ; endif + + if (present(halo) .and. MOM_dom%thin_halo_updates) then + call mpp_update_domains(array, MOM_dom%mpp_domain, flags=dirflag, & + complete=block_til_complete, position=position, & + whalo=halo, ehalo=halo, shalo=halo, nhalo=halo) + else + call mpp_update_domains(array, MOM_dom%mpp_domain, flags=dirflag, & + complete=block_til_complete, position=position) + endif + + if (present(inner_halo)) then ; if (inner_halo >= 0) then + call mpp_get_compute_domain(MOM_dom%mpp_domain, isc, iec, jsc, jec) + call mpp_get_data_domain(MOM_dom%mpp_domain, isd, ied, jsd, jed) + ! Convert to local indices for arrays starting at 1. + isc = isc - (isd-1) ; iec = iec - (isd-1) ; ied = ied - (isd-1) ; isd = 1 + jsc = jsc - (jsd-1) ; jec = jec - (jsd-1) ; jed = jed - (jsd-1) ; jsd = 1 + i_halo = min(inner_halo, isc-1) ; j_halo = min(inner_halo, jsc-1) + + ! Figure out the array index extents of the eastern, western, northern and southern regions to copy. + if (pos == CENTER) then + if (size(array,1) == ied) then + isfw = isc - i_halo ; iefw = isc ; isfe = iec ; iefe = iec + i_halo + else ; call MOM_error(FATAL, "pass_var_2d: wrong i-size for CENTER array.") ; endif + if (size(array,2) == jed) then + isfw = isc - i_halo ; iefw = isc ; isfe = iec ; iefe = iec + i_halo + else ; call MOM_error(FATAL, "pass_var_2d: wrong j-size for CENTER array.") ; endif + elseif (pos == CORNER) then + if (size(array,1) == ied) then + isfw = max(isc - (i_halo+1), 1) ; iefw = isc ; isfe = iec ; iefe = iec + i_halo + elseif (size(array,1) == ied+1) then + isfw = isc - i_halo ; iefw = isc+1 ; isfe = iec+1 ; iefe = min(iec + 1 + i_halo, ied+1) + else ; call MOM_error(FATAL, "pass_var_2d: wrong i-size for CORNER array.") ; endif + if (size(array,2) == jed) then + jsfs = max(jsc - (j_halo+1), 1) ; jefs = jsc ; jsfn = jec ; jefn = jec + j_halo + elseif (size(array,2) == jed+1) then + jsfs = jsc - j_halo ; jefs = jsc+1 ; jsfn = jec+1 ; jefn = min(jec + 1 + j_halo, jed+1) + else ; call MOM_error(FATAL, "pass_var_2d: wrong j-size for CORNER array.") ; endif + elseif (pos == NORTH_FACE) then + if (size(array,1) == ied) then + isfw = isc - i_halo ; iefw = isc ; isfe = iec ; iefe = iec + i_halo + else ; call MOM_error(FATAL, "pass_var_2d: wrong i-size for NORTH_FACE array.") ; endif + if (size(array,2) == jed) then + jsfs = max(jsc - (j_halo+1), 1) ; jefs = jsc ; jsfn = jec ; jefn = jec + j_halo + elseif (size(array,2) == jed+1) then + jsfs = jsc - j_halo ; jefs = jsc+1 ; jsfn = jec+1 ; jefn = min(jec + 1 + j_halo, jed+1) + else ; call MOM_error(FATAL, "pass_var_2d: wrong j-size for NORTH_FACE array.") ; endif + elseif (pos == EAST_FACE) then + if (size(array,1) == ied) then + isfw = max(isc - (i_halo+1), 1) ; iefw = isc ; isfe = iec ; iefe = iec + i_halo + elseif (size(array,1) == ied+1) then + isfw = isc - i_halo ; iefw = isc+1 ; isfe = iec+1 ; iefe = min(iec + 1 + i_halo, ied+1) + else ; call MOM_error(FATAL, "pass_var_2d: wrong i-size for EAST_FACE array.") ; endif + if (size(array,2) == jed) then + isfw = isc - i_halo ; iefw = isc ; isfe = iec ; iefe = iec + i_halo + else ; call MOM_error(FATAL, "pass_var_2d: wrong j-size for EAST_FACE array.") ; endif + else + call MOM_error(FATAL, "pass_var_2d: Unrecognized position") + endif + + ! Copy back the stored inner halo points + do j=jsfs,jefn ; do i=isfw,iefw ; array(i,j) = tmp(i,j) ; enddo ; enddo + do j=jsfs,jefn ; do i=isfe,iefe ; array(i,j) = tmp(i,j) ; enddo ; enddo + do j=jsfs,jefs ; do i=isfw,iefe ; array(i,j) = tmp(i,j) ; enddo ; enddo + do j=jsfn,jefn ; do i=isfw,iefe ; array(i,j) = tmp(i,j) ; enddo ; enddo + + deallocate(tmp) + endif ; endif + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end subroutine pass_var_2d + +!> pass_var_start_2d starts a halo update for a two-dimensional array. +function pass_var_start_2d(array, MOM_dom, sideflag, position, complete, halo, & + clock) + real, dimension(:,:), intent(inout) :: array !< The array which is having its halos points + !! exchanged. + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent. + integer, optional, intent(in) :: sideflag !< An optional integer indicating which + !! directions the data should be sent. It is TO_ALL or the sum of any of TO_EAST, TO_WEST, + !! TO_NORTH, and TO_SOUTH. For example, TO_EAST sends the data to the processor to the east, + !! so the halos on the western side are filled. TO_ALL is the default if sideflag is omitted. + integer, optional, intent(in) :: position !< An optional argument indicating the position. + !! This is CENTER by default and is often CORNER, + !! but could also be EAST_FACE or NORTH_FACE. + logical, optional, intent(in) :: complete !< An optional argument indicating whether the + !! halo updates should be completed before + !! progress resumes. Omitting complete is the + !! same as setting complete to .true. + integer, optional, intent(in) :: halo !< The size of the halo to update - the full + !! halo by default. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + integer :: pass_var_start_2d !0) call cpu_clock_begin(clock) ; endif + + dirflag = To_All ! 60 + if (present(sideflag)) then ; if (sideflag > 0) dirflag = sideflag ; endif + + if (present(halo) .and. MOM_dom%thin_halo_updates) then + pass_var_start_2d = mpp_start_update_domains(array, MOM_dom%mpp_domain, & + flags=dirflag, position=position, & + whalo=halo, ehalo=halo, shalo=halo, nhalo=halo) + else + pass_var_start_2d = mpp_start_update_domains(array, MOM_dom%mpp_domain, & + flags=dirflag, position=position) + endif + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end function pass_var_start_2d + +!> pass_var_start_3d starts a halo update for a three-dimensional array. +function pass_var_start_3d(array, MOM_dom, sideflag, position, complete, halo, & + clock) + real, dimension(:,:,:), intent(inout) :: array !< The array which is having its halos points + !! exchanged. + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent. + integer, optional, intent(in) :: sideflag !< An optional integer indicating which + !! directions the data should be sent. It is TO_ALL or the sum of any of TO_EAST, TO_WEST, + !! TO_NORTH, and TO_SOUTH. For example, TO_EAST sends the data to the processor to the east, + !! so the halos on the western side are filled. TO_ALL is the default if sideflag is omitted. + integer, optional, intent(in) :: position !< An optional argument indicating the position. + !! This is CENTER by default and is often CORNER, + !! but could also be EAST_FACE or NORTH_FACE. + logical, optional, intent(in) :: complete !< An optional argument indicating whether the + !! halo updates should be completed before + !! progress resumes. Omitting complete is the + !! same as setting complete to .true. + integer, optional, intent(in) :: halo !< The size of the halo to update - the full + !! halo by default. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + integer :: pass_var_start_3d !< The integer index for this update. + + integer :: dirflag + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + dirflag = To_All ! 60 + if (present(sideflag)) then ; if (sideflag > 0) dirflag = sideflag ; endif + + if (present(halo) .and. MOM_dom%thin_halo_updates) then + pass_var_start_3d = mpp_start_update_domains(array, MOM_dom%mpp_domain, & + flags=dirflag, position=position, & + whalo=halo, ehalo=halo, shalo=halo, nhalo=halo) + else + pass_var_start_3d = mpp_start_update_domains(array, MOM_dom%mpp_domain, & + flags=dirflag, position=position) + endif + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end function pass_var_start_3d + +!> pass_var_complete_2d completes a halo update for a two-dimensional array. +subroutine pass_var_complete_2d(id_update, array, MOM_dom, sideflag, position, halo, & + clock) + integer, intent(in) :: id_update !< The integer id of this update which has + !! been returned from a previous call to + !! pass_var_start. + real, dimension(:,:), intent(inout) :: array !< The array which is having its halos points + !! exchanged. + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent. + integer, optional, intent(in) :: sideflag !< An optional integer indicating which + !! directions the data should be sent. It is TO_ALL or the sum of any of TO_EAST, TO_WEST, + !! TO_NORTH, and TO_SOUTH. For example, TO_EAST sends the data to the processor to the east, + !! so the halos on the western side are filled. TO_ALL is the default if sideflag is omitted. + integer, optional, intent(in) :: position !< An optional argument indicating the position. + !! This is CENTER by default and is often CORNER, + !! but could also be EAST_FACE or NORTH_FACE. + integer, optional, intent(in) :: halo !< The size of the halo to update - the full + !! halo by default. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + + integer :: dirflag + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + dirflag = To_All ! 60 + if (present(sideflag)) then ; if (sideflag > 0) dirflag = sideflag ; endif + + if (present(halo) .and. MOM_dom%thin_halo_updates) then + call mpp_complete_update_domains(id_update, array, MOM_dom%mpp_domain, & + flags=dirflag, position=position, & + whalo=halo, ehalo=halo, shalo=halo, nhalo=halo) + else + call mpp_complete_update_domains(id_update, array, MOM_dom%mpp_domain, & + flags=dirflag, position=position) + endif + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end subroutine pass_var_complete_2d + +!> pass_var_complete_3d completes a halo update for a three-dimensional array. +subroutine pass_var_complete_3d(id_update, array, MOM_dom, sideflag, position, halo, & + clock) + integer, intent(in) :: id_update !< The integer id of this update which has + !! been returned from a previous call to + !! pass_var_start. + real, dimension(:,:,:), intent(inout) :: array !< The array which is having its halos points + !! exchanged. + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent. + integer, optional, intent(in) :: sideflag !< An optional integer indicating which + !! directions the data should be sent. It is TO_ALL or the sum of any of TO_EAST, TO_WEST, + !! TO_NORTH, and TO_SOUTH. For example, TO_EAST sends the data to the processor to the east, + !! so the halos on the western side are filled. TO_ALL is the default if sideflag is omitted. + integer, optional, intent(in) :: position !< An optional argument indicating the position. + !! This is CENTER by default and is often CORNER, + !! but could also be EAST_FACE or NORTH_FACE. + integer, optional, intent(in) :: halo !< The size of the halo to update - the full + !! halo by default. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + + integer :: dirflag + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + dirflag = To_All ! 60 + if (present(sideflag)) then ; if (sideflag > 0) dirflag = sideflag ; endif + + if (present(halo) .and. MOM_dom%thin_halo_updates) then + call mpp_complete_update_domains(id_update, array, MOM_dom%mpp_domain, & + flags=dirflag, position=position, & + whalo=halo, ehalo=halo, shalo=halo, nhalo=halo) + else + call mpp_complete_update_domains(id_update, array, MOM_dom%mpp_domain, & + flags=dirflag, position=position) + endif + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end subroutine pass_var_complete_3d + +!> pass_vector_2d does a halo update for a pair of two-dimensional arrays +!! representing the components of a two-dimensional horizontal vector. +subroutine pass_vector_2d(u_cmpt, v_cmpt, MOM_dom, direction, stagger, complete, halo, & + clock) + real, dimension(:,:), intent(inout) :: u_cmpt !< The nominal zonal (u) component of the vector + !! pair which is having its halos points + !! exchanged. + real, dimension(:,:), intent(inout) :: v_cmpt !< The nominal meridional (v) component of the + !! vector pair which is having its halos points + !! exchanged. + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent. + integer, optional, intent(in) :: direction !< An optional integer indicating which + !! directions the data should be sent. It is TO_ALL or the sum of any of TO_EAST, TO_WEST, + !! TO_NORTH, and TO_SOUTH, possibly plus SCALAR_PAIR if these are paired non-directional + !! scalars discretized at the typical vector component locations. For example, TO_EAST sends + !! the data to the processor to the east, so the halos on the western side are filled. TO_ALL + !! is the default if omitted. + integer, optional, intent(in) :: stagger !< An optional flag, which may be one of A_GRID, + !! BGRID_NE, or CGRID_NE, indicating where the two components of the vector are + !! discretized. Omitting stagger is the same as setting it to CGRID_NE. + logical, optional, intent(in) :: complete !< An optional argument indicating whether the + !! halo updates should be completed before progress resumes. + !! Omitting complete is the same as setting complete to .true. + integer, optional, intent(in) :: halo !< The size of the halo to update - the full + !! halo by default. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + + ! Local variables + integer :: stagger_local + integer :: dirflag + logical :: block_til_complete + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + stagger_local = CGRID_NE ! Default value for type of grid + if (present(stagger)) stagger_local = stagger + + dirflag = To_All ! 60 + if (present(direction)) then ; if (direction > 0) dirflag = direction ; endif + block_til_complete = .true. + if (present(complete)) block_til_complete = complete + + if (present(halo) .and. MOM_dom%thin_halo_updates) then + call mpp_update_domains(u_cmpt, v_cmpt, MOM_dom%mpp_domain, flags=dirflag, & + gridtype=stagger_local, complete = block_til_complete, & + whalo=halo, ehalo=halo, shalo=halo, nhalo=halo) + else + call mpp_update_domains(u_cmpt, v_cmpt, MOM_dom%mpp_domain, flags=dirflag, & + gridtype=stagger_local, complete = block_til_complete) + endif + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end subroutine pass_vector_2d + +!> fill_vector_symmetric_edges_2d does an usual set of halo updates that only +!! fill in the values at the edge of a pair of symmetric memory two-dimensional +!! arrays representing the components of a two-dimensional horizontal vector. +!! If symmetric memory is not being used, this subroutine does nothing except to +!! possibly turn optional cpu clocks on or off. +subroutine fill_vector_symmetric_edges_2d(u_cmpt, v_cmpt, MOM_dom, stagger, scalar, & + clock) + real, dimension(:,:), intent(inout) :: u_cmpt !< The nominal zonal (u) component of the vector + !! pair which is having its halos points + !! exchanged. + real, dimension(:,:), intent(inout) :: v_cmpt !< The nominal meridional (v) component of the + !! vector pair which is having its halos points + !! exchanged. + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent. + integer, optional, intent(in) :: stagger !< An optional flag, which may be one of A_GRID, + !! BGRID_NE, or CGRID_NE, indicating where the two components of the vector are + !! discretized. Omitting stagger is the same as setting it to CGRID_NE. + logical, optional, intent(in) :: scalar !< An optional argument indicating whether. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + + ! Local variables + integer :: stagger_local + integer :: dirflag + integer :: i, j, isc, iec, jsc, jec, isd, ied, jsd, jed, IscB, IecB, JscB, JecB + real, allocatable, dimension(:) :: sbuff_x, sbuff_y, wbuff_x, wbuff_y + logical :: block_til_complete + + if (.not. MOM_dom%symmetric) then + return + endif + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + stagger_local = CGRID_NE ! Default value for type of grid + if (present(stagger)) stagger_local = stagger + + if (.not.(stagger_local == CGRID_NE .or. stagger_local == BGRID_NE)) return + + call mpp_get_compute_domain(MOM_dom%mpp_domain, isc, iec, jsc, jec) + call mpp_get_data_domain(MOM_dom%mpp_domain, isd, ied, jsd, jed) + + ! Adjust isc, etc., to account for the fact that the input arrays indices all + ! start at 1 (and are effectively on a SW grid!). + isc = isc - (isd-1) ; iec = iec - (isd-1) + jsc = jsc - (jsd-1) ; jec = jec - (jsd-1) + IscB = isc ; IecB = iec+1 ; JscB = jsc ; JecB = jec+1 + + dirflag = To_All ! 60 + if (present(scalar)) then ; if (scalar) dirflag = To_All+SCALAR_PAIR ; endif + + if (stagger_local == CGRID_NE) then + allocate(wbuff_x(jsc:jec)) ; allocate(sbuff_y(isc:iec)) + wbuff_x(:) = 0.0 ; sbuff_y(:) = 0.0 + call mpp_get_boundary(u_cmpt, v_cmpt, MOM_dom%mpp_domain, flags=dirflag, & + wbufferx=wbuff_x, sbuffery=sbuff_y, & + gridtype=CGRID_NE) + do i=isc,iec + v_cmpt(i,JscB) = sbuff_y(i) + enddo + do j=jsc,jec + u_cmpt(IscB,j) = wbuff_x(j) + enddo + deallocate(wbuff_x) ; deallocate(sbuff_y) + elseif (stagger_local == BGRID_NE) then + allocate(wbuff_x(JscB:JecB)) ; allocate(sbuff_x(IscB:IecB)) + allocate(wbuff_y(JscB:JecB)) ; allocate(sbuff_y(IscB:IecB)) + wbuff_x(:) = 0.0 ; wbuff_y(:) = 0.0 ; sbuff_x(:) = 0.0 ; sbuff_y(:) = 0.0 + call mpp_get_boundary(u_cmpt, v_cmpt, MOM_dom%mpp_domain, flags=dirflag, & + wbufferx=wbuff_x, sbufferx=sbuff_x, & + wbuffery=wbuff_y, sbuffery=sbuff_y, & + gridtype=BGRID_NE) + do I=IscB,IecB + u_cmpt(I,JscB) = sbuff_x(I) ; v_cmpt(I,JscB) = sbuff_y(I) + enddo + do J=JscB,JecB + u_cmpt(IscB,J) = wbuff_x(J) ; v_cmpt(IscB,J) = wbuff_y(J) + enddo + deallocate(wbuff_x) ; deallocate(sbuff_x) + deallocate(wbuff_y) ; deallocate(sbuff_y) + endif + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end subroutine fill_vector_symmetric_edges_2d + +!> pass_vector_3d does a halo update for a pair of three-dimensional arrays +!! representing the components of a three-dimensional horizontal vector. +subroutine pass_vector_3d(u_cmpt, v_cmpt, MOM_dom, direction, stagger, complete, halo, & + clock) + real, dimension(:,:,:), intent(inout) :: u_cmpt !< The nominal zonal (u) component of the vector + !! pair which is having its halos points + !! exchanged. + real, dimension(:,:,:), intent(inout) :: v_cmpt !< The nominal meridional (v) component of the + !! vector pair which is having its halos points + !! exchanged. + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent. + integer, optional, intent(in) :: direction !< An optional integer indicating which + !! directions the data should be sent. It is TO_ALL or the sum of any of TO_EAST, TO_WEST, + !! TO_NORTH, and TO_SOUTH, possibly plus SCALAR_PAIR if these are paired non-directional + !! scalars discretized at the typical vector component locations. For example, TO_EAST sends + !! the data to the processor to the east, so the halos on the western side are filled. TO_ALL + !! is the default if omitted. + integer, optional, intent(in) :: stagger !< An optional flag, which may be one of A_GRID, + !! BGRID_NE, or CGRID_NE, indicating where the two components of the vector are + !! discretized. Omitting stagger is the same as setting it to CGRID_NE. + logical, optional, intent(in) :: complete !< An optional argument indicating whether the + !! halo updates should be completed before progress resumes. + !! Omitting complete is the same as setting complete to .true. + integer, optional, intent(in) :: halo !< The size of the halo to update - the full + !! halo by default. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + + ! Local variables + integer :: stagger_local + integer :: dirflag + logical :: block_til_complete + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + stagger_local = CGRID_NE ! Default value for type of grid + if (present(stagger)) stagger_local = stagger + + dirflag = To_All ! 60 + if (present(direction)) then ; if (direction > 0) dirflag = direction ; endif + block_til_complete = .true. + if (present(complete)) block_til_complete = complete + + if (present(halo) .and. MOM_dom%thin_halo_updates) then + call mpp_update_domains(u_cmpt, v_cmpt, MOM_dom%mpp_domain, flags=dirflag, & + gridtype=stagger_local, complete = block_til_complete, & + whalo=halo, ehalo=halo, shalo=halo, nhalo=halo) + else + call mpp_update_domains(u_cmpt, v_cmpt, MOM_dom%mpp_domain, flags=dirflag, & + gridtype=stagger_local, complete = block_til_complete) + endif + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end subroutine pass_vector_3d + +!> pass_vector_start_2d starts a halo update for a pair of two-dimensional arrays +!! representing the components of a two-dimensional horizontal vector. +function pass_vector_start_2d(u_cmpt, v_cmpt, MOM_dom, direction, stagger, complete, halo, & + clock) + real, dimension(:,:), intent(inout) :: u_cmpt !< The nominal zonal (u) component of the vector + !! pair which is having its halos points + !! exchanged. + real, dimension(:,:), intent(inout) :: v_cmpt !< The nominal meridional (v) component of the + !! vector pair which is having its halos points + !! exchanged. + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent. + integer, optional, intent(in) :: direction !< An optional integer indicating which + !! directions the data should be sent. It is TO_ALL or the sum of any of TO_EAST, TO_WEST, + !! TO_NORTH, and TO_SOUTH, possibly plus SCALAR_PAIR if these are paired non-directional + !! scalars discretized at the typical vector component locations. For example, TO_EAST sends + !! the data to the processor to the east, so the halos on the western side are filled. TO_ALL + !! is the default if omitted. + integer, optional, intent(in) :: stagger !< An optional flag, which may be one of A_GRID, + !! BGRID_NE, or CGRID_NE, indicating where the two components of the vector are + !! discretized. Omitting stagger is the same as setting it to CGRID_NE. + logical, optional, intent(in) :: complete !< An optional argument indicating whether the + !! halo updates should be completed before progress resumes. + !! Omitting complete is the same as setting complete to .true. + integer, optional, intent(in) :: halo !< The size of the halo to update - the full + !! halo by default. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + integer :: pass_vector_start_2d !< The integer index for this + !! update. + + ! Local variables + integer :: stagger_local + integer :: dirflag + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + stagger_local = CGRID_NE ! Default value for type of grid + if (present(stagger)) stagger_local = stagger + + dirflag = To_All ! 60 + if (present(direction)) then ; if (direction > 0) dirflag = direction ; endif + + if (present(halo) .and. MOM_dom%thin_halo_updates) then + pass_vector_start_2d = mpp_start_update_domains(u_cmpt, v_cmpt, & + MOM_dom%mpp_domain, flags=dirflag, gridtype=stagger_local, & + whalo=halo, ehalo=halo, shalo=halo, nhalo=halo) + else + pass_vector_start_2d = mpp_start_update_domains(u_cmpt, v_cmpt, & + MOM_dom%mpp_domain, flags=dirflag, gridtype=stagger_local) + endif + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end function pass_vector_start_2d + +!> pass_vector_start_3d starts a halo update for a pair of three-dimensional arrays +!! representing the components of a three-dimensional horizontal vector. +function pass_vector_start_3d(u_cmpt, v_cmpt, MOM_dom, direction, stagger, complete, halo, & + clock) + real, dimension(:,:,:), intent(inout) :: u_cmpt !< The nominal zonal (u) component of the vector + !! pair which is having its halos points + !! exchanged. + real, dimension(:,:,:), intent(inout) :: v_cmpt !< The nominal meridional (v) component of the + !! vector pair which is having its halos points + !! exchanged. + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent. + integer, optional, intent(in) :: direction !< An optional integer indicating which + !! directions the data should be sent. It is TO_ALL or the sum of any of TO_EAST, TO_WEST, + !! TO_NORTH, and TO_SOUTH, possibly plus SCALAR_PAIR if these are paired non-directional + !! scalars discretized at the typical vector component locations. For example, TO_EAST sends + !! the data to the processor to the east, so the halos on the western side are filled. TO_ALL + !! is the default if omitted. + integer, optional, intent(in) :: stagger !< An optional flag, which may be one of A_GRID, + !! BGRID_NE, or CGRID_NE, indicating where the two components of the vector are + !! discretized. Omitting stagger is the same as setting it to CGRID_NE. + logical, optional, intent(in) :: complete !< An optional argument indicating whether the + !! halo updates should be completed before progress resumes. + !! Omitting complete is the same as setting complete to .true. + integer, optional, intent(in) :: halo !< The size of the halo to update - the full + !! halo by default. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + integer :: pass_vector_start_3d !< The integer index for this + !! update. + ! Local variables + integer :: stagger_local + integer :: dirflag + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + stagger_local = CGRID_NE ! Default value for type of grid + if (present(stagger)) stagger_local = stagger + + dirflag = To_All ! 60 + if (present(direction)) then ; if (direction > 0) dirflag = direction ; endif + + if (present(halo) .and. MOM_dom%thin_halo_updates) then + pass_vector_start_3d = mpp_start_update_domains(u_cmpt, v_cmpt, & + MOM_dom%mpp_domain, flags=dirflag, gridtype=stagger_local, & + whalo=halo, ehalo=halo, shalo=halo, nhalo=halo) + else + pass_vector_start_3d = mpp_start_update_domains(u_cmpt, v_cmpt, & + MOM_dom%mpp_domain, flags=dirflag, gridtype=stagger_local) + endif + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end function pass_vector_start_3d + +!> pass_vector_complete_2d completes a halo update for a pair of two-dimensional arrays +!! representing the components of a two-dimensional horizontal vector. +subroutine pass_vector_complete_2d(id_update, u_cmpt, v_cmpt, MOM_dom, direction, stagger, halo, & + clock) + integer, intent(in) :: id_update !< The integer id of this update which has been + !! returned from a previous call to + !! pass_var_start. + real, dimension(:,:), intent(inout) :: u_cmpt !< The nominal zonal (u) component of the vector + !! pair which is having its halos points + !! exchanged. + real, dimension(:,:), intent(inout) :: v_cmpt !< The nominal meridional (v) component of the + !! vector pair which is having its halos points + !! exchanged. + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent. + integer, optional, intent(in) :: direction !< An optional integer indicating which + !! directions the data should be sent. It is TO_ALL or the sum of any of TO_EAST, TO_WEST, + !! TO_NORTH, and TO_SOUTH, possibly plus SCALAR_PAIR if these are paired non-directional + !! scalars discretized at the typical vector component locations. For example, TO_EAST sends + !! the data to the processor to the east, so the halos on the western side are filled. TO_ALL + !! is the default if omitted. + integer, optional, intent(in) :: stagger !< An optional flag, which may be one of A_GRID, + !! BGRID_NE, or CGRID_NE, indicating where the two components of the vector are + !! discretized. Omitting stagger is the same as setting it to CGRID_NE. + integer, optional, intent(in) :: halo !< The size of the halo to update - the full + !! halo by default. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + ! Local variables + integer :: stagger_local + integer :: dirflag + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + stagger_local = CGRID_NE ! Default value for type of grid + if (present(stagger)) stagger_local = stagger + + dirflag = To_All ! 60 + if (present(direction)) then ; if (direction > 0) dirflag = direction ; endif + + if (present(halo) .and. MOM_dom%thin_halo_updates) then + call mpp_complete_update_domains(id_update, u_cmpt, v_cmpt, & + MOM_dom%mpp_domain, flags=dirflag, gridtype=stagger_local, & + whalo=halo, ehalo=halo, shalo=halo, nhalo=halo) + else + call mpp_complete_update_domains(id_update, u_cmpt, v_cmpt, & + MOM_dom%mpp_domain, flags=dirflag, gridtype=stagger_local) + endif + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end subroutine pass_vector_complete_2d + +!> pass_vector_complete_3d completes a halo update for a pair of three-dimensional +!! arrays representing the components of a three-dimensional horizontal vector. +subroutine pass_vector_complete_3d(id_update, u_cmpt, v_cmpt, MOM_dom, direction, stagger, halo, & + clock) + integer, intent(in) :: id_update !< The integer id of this update which has been + !! returned from a previous call to + !! pass_var_start. + real, dimension(:,:,:), intent(inout) :: u_cmpt !< The nominal zonal (u) component of the vector + !! pair which is having its halos points + !! exchanged. + real, dimension(:,:,:), intent(inout) :: v_cmpt !< The nominal meridional (v) component of the + !! vector pair which is having its halos points + !! exchanged. + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent. + integer, optional, intent(in) :: direction !< An optional integer indicating which + !! directions the data should be sent. It is TO_ALL or the sum of any of TO_EAST, TO_WEST, + !! TO_NORTH, and TO_SOUTH, possibly plus SCALAR_PAIR if these are paired non-directional + !! scalars discretized at the typical vector component locations. For example, TO_EAST sends + !! the data to the processor to the east, so the halos on the western side are filled. TO_ALL + !! is the default if omitted. + integer, optional, intent(in) :: stagger !< An optional flag, which may be one of A_GRID, + !! BGRID_NE, or CGRID_NE, indicating where the two components of the vector are + !! discretized. Omitting stagger is the same as setting it to CGRID_NE. + integer, optional, intent(in) :: halo !< The size of the halo to update - the full + !! halo by default. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + ! Local variables + integer :: stagger_local + integer :: dirflag + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + stagger_local = CGRID_NE ! Default value for type of grid + if (present(stagger)) stagger_local = stagger + + dirflag = To_All ! 60 + if (present(direction)) then ; if (direction > 0) dirflag = direction ; endif + + if (present(halo) .and. MOM_dom%thin_halo_updates) then + call mpp_complete_update_domains(id_update, u_cmpt, v_cmpt, & + MOM_dom%mpp_domain, flags=dirflag, gridtype=stagger_local, & + whalo=halo, ehalo=halo, shalo=halo, nhalo=halo) + else + call mpp_complete_update_domains(id_update, u_cmpt, v_cmpt, & + MOM_dom%mpp_domain, flags=dirflag, gridtype=stagger_local) + endif + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end subroutine pass_vector_complete_3d + +!> create_var_group_pass_2d sets up a group of two-dimensional array halo updates. +subroutine create_var_group_pass_2d(group, array, MOM_dom, sideflag, position, & + halo, clock) + type(group_pass_type), intent(inout) :: group !< The data type that store information for + !! group update. This data will be used in + !! do_group_pass. + real, dimension(:,:), intent(inout) :: array !< The array which is having its halos points + !! exchanged. + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent. + integer, optional, intent(in) :: sideflag !< An optional integer indicating which + !! directions the data should be sent. It is TO_ALL or the sum of any of TO_EAST, TO_WEST, + !! TO_NORTH, and TO_SOUTH. For example, TO_EAST sends the data to the processor to the east, + !! so the halos on the western side are filled. TO_ALL is the default if sideflag is omitted. + integer, optional, intent(in) :: position !< An optional argument indicating the position. + !! This is CENTER by default and is often CORNER, + !! but could also be EAST_FACE or NORTH_FACE. + integer, optional, intent(in) :: halo !< The size of the halo to update - the full + !! halo by default. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + ! Local variables + integer :: dirflag + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + dirflag = To_All ! 60 + if (present(sideflag)) then ; if (sideflag > 0) dirflag = sideflag ; endif + + if (mpp_group_update_initialized(group)) then + call mpp_reset_group_update_field(group,array) + elseif (present(halo) .and. MOM_dom%thin_halo_updates) then + call mpp_create_group_update(group, array, MOM_dom%mpp_domain, flags=dirflag, & + position=position, whalo=halo, ehalo=halo, & + shalo=halo, nhalo=halo) + else + call mpp_create_group_update(group, array, MOM_dom%mpp_domain, flags=dirflag, & + position=position) + endif + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end subroutine create_var_group_pass_2d + +!> create_var_group_pass_3d sets up a group of three-dimensional array halo updates. +subroutine create_var_group_pass_3d(group, array, MOM_dom, sideflag, position, halo, & + clock) + type(group_pass_type), intent(inout) :: group !< The data type that store information for + !! group update. This data will be used in + !! do_group_pass. + real, dimension(:,:,:), intent(inout) :: array !< The array which is having its halos points + !! exchanged. + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent. + integer, optional, intent(in) :: sideflag !< An optional integer indicating which + !! directions the data should be sent. It is TO_ALL or the sum of any of TO_EAST, TO_WEST, + !! TO_NORTH, and TO_SOUTH. For example, TO_EAST sends the data to the processor to the east, + !! so the halos on the western side are filled. TO_ALL is the default if sideflag is omitted. + integer, optional, intent(in) :: position !< An optional argument indicating the position. + !! This is CENTER by default and is often CORNER, + !! but could also be EAST_FACE or NORTH_FACE. + integer, optional, intent(in) :: halo !< The size of the halo to update - the full + !! halo by default. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + ! Local variables + integer :: dirflag + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + dirflag = To_All ! 60 + if (present(sideflag)) then ; if (sideflag > 0) dirflag = sideflag ; endif + + if (mpp_group_update_initialized(group)) then + call mpp_reset_group_update_field(group,array) + elseif (present(halo) .and. MOM_dom%thin_halo_updates) then + call mpp_create_group_update(group, array, MOM_dom%mpp_domain, flags=dirflag, & + position=position, whalo=halo, ehalo=halo, & + shalo=halo, nhalo=halo) + else + call mpp_create_group_update(group, array, MOM_dom%mpp_domain, flags=dirflag, & + position=position) + endif + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end subroutine create_var_group_pass_3d + +!> create_vector_group_pass_2d sets up a group of two-dimensional vector halo updates. +subroutine create_vector_group_pass_2d(group, u_cmpt, v_cmpt, MOM_dom, direction, stagger, halo, & + clock) + type(group_pass_type), intent(inout) :: group !< The data type that store information for + !! group update. This data will be used in + !! do_group_pass. + real, dimension(:,:), intent(inout) :: u_cmpt !< The nominal zonal (u) component of the vector + !! pair which is having its halos points + !! exchanged. + real, dimension(:,:), intent(inout) :: v_cmpt !< The nominal meridional (v) component of the + !! vector pair which is having its halos points + !! exchanged. + + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent + integer, optional, intent(in) :: direction !< An optional integer indicating which + !! directions the data should be sent. It is TO_ALL or the sum of any of TO_EAST, TO_WEST, + !! TO_NORTH, and TO_SOUTH, possibly plus SCALAR_PAIR if these are paired non-directional + !! scalars discretized at the typical vector component locations. For example, TO_EAST sends + !! the data to the processor to the east, so the halos on the western side are filled. TO_ALL + !! is the default if omitted. + integer, optional, intent(in) :: stagger !< An optional flag, which may be one of A_GRID, + !! BGRID_NE, or CGRID_NE, indicating where the two components of the vector are + !! discretized. Omitting stagger is the same as setting it to CGRID_NE. + integer, optional, intent(in) :: halo !< The size of the halo to update - the full + !! halo by default. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + ! Local variables + integer :: stagger_local + integer :: dirflag + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + stagger_local = CGRID_NE ! Default value for type of grid + if (present(stagger)) stagger_local = stagger + + dirflag = To_All ! 60 + if (present(direction)) then ; if (direction > 0) dirflag = direction ; endif + + if (mpp_group_update_initialized(group)) then + call mpp_reset_group_update_field(group,u_cmpt, v_cmpt) + elseif (present(halo) .and. MOM_dom%thin_halo_updates) then + call mpp_create_group_update(group, u_cmpt, v_cmpt, MOM_dom%mpp_domain, & + flags=dirflag, gridtype=stagger_local, whalo=halo, ehalo=halo, & + shalo=halo, nhalo=halo) + else + call mpp_create_group_update(group, u_cmpt, v_cmpt, MOM_dom%mpp_domain, & + flags=dirflag, gridtype=stagger_local) + endif + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end subroutine create_vector_group_pass_2d + +!> create_vector_group_pass_3d sets up a group of three-dimensional vector halo updates. +subroutine create_vector_group_pass_3d(group, u_cmpt, v_cmpt, MOM_dom, direction, stagger, halo, & + clock) + type(group_pass_type), intent(inout) :: group !< The data type that store information for + !! group update. This data will be used in + !! do_group_pass. + real, dimension(:,:,:), intent(inout) :: u_cmpt !< The nominal zonal (u) component of the vector + !! pair which is having its halos points + !! exchanged. + real, dimension(:,:,:), intent(inout) :: v_cmpt !< The nominal meridional (v) component of the + !! vector pair which is having its halos points + !! exchanged. + + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent. + integer, optional, intent(in) :: direction !< An optional integer indicating which + !! directions the data should be sent. It is TO_ALL or the sum of any of TO_EAST, TO_WEST, + !! TO_NORTH, and TO_SOUTH, possibly plus SCALAR_PAIR if these are paired non-directional + !! scalars discretized at the typical vector component locations. For example, TO_EAST sends + !! the data to the processor to the east, so the halos on the western side are filled. TO_ALL + !! is the default if omitted. + integer, optional, intent(in) :: stagger !< An optional flag, which may be one of A_GRID, + !! BGRID_NE, or CGRID_NE, indicating where the two components of the vector are + !! discretized. Omitting stagger is the same as setting it to CGRID_NE. + integer, optional, intent(in) :: halo !< The size of the halo to update - the full + !! halo by default. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + + ! Local variables + integer :: stagger_local + integer :: dirflag + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + stagger_local = CGRID_NE ! Default value for type of grid + if (present(stagger)) stagger_local = stagger + + dirflag = To_All ! 60 + if (present(direction)) then ; if (direction > 0) dirflag = direction ; endif + + if (mpp_group_update_initialized(group)) then + call mpp_reset_group_update_field(group,u_cmpt, v_cmpt) + elseif (present(halo) .and. MOM_dom%thin_halo_updates) then + call mpp_create_group_update(group, u_cmpt, v_cmpt, MOM_dom%mpp_domain, & + flags=dirflag, gridtype=stagger_local, whalo=halo, ehalo=halo, & + shalo=halo, nhalo=halo) + else + call mpp_create_group_update(group, u_cmpt, v_cmpt, MOM_dom%mpp_domain, & + flags=dirflag, gridtype=stagger_local) + endif + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end subroutine create_vector_group_pass_3d + +!> do_group_pass carries out a group halo update. +subroutine do_group_pass(group, MOM_dom, clock) + type(group_pass_type), intent(inout) :: group !< The data type that store information for + !! group update. This data will be used in + !! do_group_pass. + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + real :: d_type + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + call mpp_do_group_update(group, MOM_dom%mpp_domain, d_type) + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end subroutine do_group_pass + +!> start_group_pass starts out a group halo update. +subroutine start_group_pass(group, MOM_dom, clock) + type(group_pass_type), intent(inout) :: group !< The data type that store information for + !! group update. This data will be used in + !! do_group_pass. + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + + real :: d_type + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + call mpp_start_group_update(group, MOM_dom%mpp_domain, d_type) + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end subroutine start_group_pass + +!> complete_group_pass completes a group halo update. +subroutine complete_group_pass(group, MOM_dom, clock) + type(group_pass_type), intent(inout) :: group !< The data type that store information for + !! group update. This data will be used in + !! do_group_pass. + type(MOM_domain_type), intent(inout) :: MOM_dom !< The MOM_domain_type containing the mpp_domain + !! needed to determine where data should be + !! sent. + integer, optional, intent(in) :: clock !< The handle for a cpu time clock that should be + !! started then stopped to time this routine. + real :: d_type + + if (present(clock)) then ; if (clock>0) call cpu_clock_begin(clock) ; endif + + call mpp_complete_group_update(group, MOM_dom%mpp_domain, d_type) + + if (present(clock)) then ; if (clock>0) call cpu_clock_end(clock) ; endif + +end subroutine complete_group_pass + + +!> Pass a 2-D array from one MOM domain to another +subroutine redistribute_array_2d(Domain1, array1, Domain2, array2, complete) + type(domain2d), & + intent(in) :: Domain1 !< The MOM domain from which to extract information. + real, dimension(:,:), intent(in) :: array1 !< The array from which to extract information. + type(domain2d), & + intent(in) :: Domain2 !< The MOM domain receiving information. + real, dimension(:,:), intent(out) :: array2 !< The array receiving information. + logical, optional, intent(in) :: complete !< If true, finish communication before proceeding. + + ! Local variables + logical :: do_complete + + do_complete=.true.;if (PRESENT(complete)) do_complete = complete + + call mpp_redistribute(Domain1, array1, Domain2, array2, do_complete) + +end subroutine redistribute_array_2d + +!> Pass a 3-D array from one MOM domain to another +subroutine redistribute_array_3d(Domain1, array1, Domain2, array2, complete) + type(domain2d), & + intent(in) :: Domain1 !< The MOM domain from which to extract information. + real, dimension(:,:,:), intent(in) :: array1 !< The array from which to extract information. + type(domain2d), & + intent(in) :: Domain2 !< The MOM domain receiving information. + real, dimension(:,:,:), intent(out) :: array2 !< The array receiving information. + logical, optional, intent(in) :: complete !< If true, finish communication before proceeding. + + ! Local variables + logical :: do_complete + + do_complete=.true.;if (PRESENT(complete)) do_complete = complete + + call mpp_redistribute(Domain1, array1, Domain2, array2, do_complete) + +end subroutine redistribute_array_3d + + +!> Rescale the values of a 4-D array in its computational domain by a constant factor +subroutine rescale_comp_data_4d(domain, array, scale) + type(MOM_domain_type), intent(in) :: domain !< MOM domain from which to extract information + real, dimension(:,:,:,:), intent(inout) :: array !< The array which is having the data in its + !! computational domain rescaled + real, intent(in) :: scale !< A scaling factor by which to multiply the + !! values in the computational domain of array + integer :: is, ie, js, je + + if (scale == 1.0) return + + call get_simple_array_i_ind(domain, size(array,1), is, ie) + call get_simple_array_j_ind(domain, size(array,2), js, je) + array(is:ie,js:je,:,:) = scale*array(is:ie,js:je,:,:) + +end subroutine rescale_comp_data_4d + +!> Rescale the values of a 3-D array in its computational domain by a constant factor +subroutine rescale_comp_data_3d(domain, array, scale) + type(MOM_domain_type), intent(in) :: domain !< MOM domain from which to extract information + real, dimension(:,:,:), intent(inout) :: array !< The array which is having the data in its + !! computational domain rescaled + real, intent(in) :: scale !< A scaling factor by which to multiply the + !! values in the computational domain of array + integer :: is, ie, js, je + + if (scale == 1.0) return + + call get_simple_array_i_ind(domain, size(array,1), is, ie) + call get_simple_array_j_ind(domain, size(array,2), js, je) + array(is:ie,js:je,:) = scale*array(is:ie,js:je,:) + +end subroutine rescale_comp_data_3d + +!> Rescale the values of a 2-D array in its computational domain by a constant factor +subroutine rescale_comp_data_2d(domain, array, scale) + type(MOM_domain_type), intent(in) :: domain !< MOM domain from which to extract information + real, dimension(:,:), intent(inout) :: array !< The array which is having the data in its + !! computational domain rescaled + real, intent(in) :: scale !< A scaling factor by which to multiply the + !! values in the computational domain of array + integer :: is, ie, js, je + + if (scale == 1.0) return + + call get_simple_array_i_ind(domain, size(array,1), is, ie) + call get_simple_array_j_ind(domain, size(array,2), js, je) + array(is:ie,js:je) = scale*array(is:ie,js:je) + +end subroutine rescale_comp_data_2d + +!> create_MOM_domain creates and initializes a MOM_domain_type variables, based on the information +!! provided in arguments. +subroutine create_MOM_domain(MOM_dom, n_global, n_halo, reentrant, tripolar_N, layout, io_layout, & + domain_name, mask_table, symmetric, thin_halos, nonblocking) + type(MOM_domain_type), pointer :: MOM_dom !< A pointer to the MOM_domain_type being defined here. + integer, dimension(2), intent(in) :: n_global !< The number of points on the global grid in + !! the i- and j-directions + integer, dimension(2), intent(in) :: n_halo !< The number of halo points on each processor + logical, dimension(2), intent(in) :: reentrant !< If true the grid is periodic in the i- and j- directions + logical, intent(in) :: tripolar_N !< If true the grid uses northern tripolar connectivity + integer, dimension(2), intent(in) :: layout !< The layout of logical PEs in the i- and j-directions. + integer, dimension(2), optional, intent(in) :: io_layout !< The layout for parallel input and output. + character(len=*), optional, intent(in) :: domain_name !< A name for this domain, "MOM" if missing. + character(len=*), optional, intent(in) :: mask_table !< The full relative or absolute path to the mask table. + logical, optional, intent(in) :: symmetric !< If present, this specifies whether this domain + !! uses symmetric memory, or true if missing. + logical, optional, intent(in) :: thin_halos !< If present, this specifies whether to permit the use of + !! thin halo updates, or true if missing. + logical, optional, intent(in) :: nonblocking !< If present, this specifies whether to permit the use of + !! nonblocking halo updates, or false if missing. + + ! local variables + integer, dimension(4) :: global_indices ! The lower and upper global i- and j-index bounds + integer :: X_FLAGS ! A combination of integers encoding the x-direction grid connectivity. + integer :: Y_FLAGS ! A combination of integers encoding the y-direction grid connectivity. + integer :: xhalo_d2, yhalo_d2 + character(len=200) :: mesg ! A string for use in error messages + logical :: mask_table_exists ! Mask_table is present and the file it points to exists + + if (.not.associated(MOM_dom)) then + allocate(MOM_dom) + allocate(MOM_dom%mpp_domain) + allocate(MOM_dom%mpp_domain_d2) + endif + + MOM_dom%name = "MOM" ; if (present(domain_name)) MOM_dom%name = trim(domain_name) + + X_FLAGS = 0 ; Y_FLAGS = 0 + if (reentrant(1)) X_FLAGS = CYCLIC_GLOBAL_DOMAIN + if (reentrant(2)) Y_FLAGS = CYCLIC_GLOBAL_DOMAIN + if (tripolar_N) then + Y_FLAGS = FOLD_NORTH_EDGE + if (reentrant(2)) call MOM_error(FATAL,"MOM_domains: "// & + "TRIPOLAR_N and REENTRANT_Y may not be used together.") + endif + + MOM_dom%nonblocking_updates = nonblocking + MOM_dom%thin_halo_updates = thin_halos + MOM_dom%symmetric = .true. ; if (present(symmetric)) MOM_dom%symmetric = symmetric + MOM_dom%niglobal = n_global(1) ; MOM_dom%njglobal = n_global(2) + MOM_dom%nihalo = n_halo(1) ; MOM_dom%njhalo = n_halo(2) + + ! Save the extra data for creating other domains of different resolution that overlay this domain. + MOM_dom%X_FLAGS = X_FLAGS + MOM_dom%Y_FLAGS = Y_FLAGS + MOM_dom%layout(:) = layout(:) + + ! Set up the io_layout, with error handling. + MOM_dom%io_layout(:) = (/ 1, 1 /) + if (present(io_layout)) then + if (io_layout(1) == 0) then + MOM_dom%io_layout(1) = layout(1) + elseif (io_layout(1) > 1) then + MOM_dom%io_layout(1) = io_layout(1) + if (modulo(layout(1), io_layout(1)) /= 0) then + write(mesg,'("MOM_domains_init: The i-direction I/O-layout, IO_LAYOUT(1)=",i4, & + &", does not evenly divide the i-direction layout, NIPROC=,",i4,".")') io_layout(1), layout(1) + call MOM_error(FATAL, mesg) + endif + endif + + if (io_layout(2) == 0) then + MOM_dom%io_layout(2) = layout(2) + elseif (io_layout(2) > 1) then + MOM_dom%io_layout(2) = io_layout(2) + if (modulo(layout(2), io_layout(2)) /= 0) then + write(mesg,'("MOM_domains_init: The j-direction I/O-layout, IO_LAYOUT(2)=",i4, & + &", does not evenly divide the j-direction layout, NJPROC=,",i4,".")') io_layout(2), layout(2) + call MOM_error(FATAL, mesg) + endif + endif + endif + + if (present(mask_table)) then + mask_table_exists = file_exist(mask_table) + if (mask_table_exists) then + allocate(MOM_dom%maskmap(layout(1), layout(2))) + call parse_mask_table(mask_table, MOM_dom%maskmap, MOM_dom%name) + endif + else + mask_table_exists = .false. + endif + + call clone_MD_to_d2D(MOM_dom, MOM_dom%mpp_domain) + + !For downsampled domain, recommend a halo of 1 (or 0?) since we're not doing wide-stencil computations. + !But that does not work because the downsampled field would not have the correct size to pass the checks, e.g., we get + !error: downsample_diag_indices_get: peculiar size 28 in i-direction\ndoes not match one of 24 25 26 27 + ! call clone_MD_to_d2D(MOM_dom, MOM_dom%mpp_domain_d2, halo_size=(MOM_dom%nihalo/2), coarsen=2) + call clone_MD_to_d2D(MOM_dom, MOM_dom%mpp_domain_d2, coarsen=2) + +end subroutine create_MOM_domain + +!> dealloc_MOM_domain deallocates memory associated with a pointer to a MOM_domain_type +!! and potentially all of its contents +subroutine deallocate_MOM_domain(MOM_domain, cursory) + type(MOM_domain_type), pointer :: MOM_domain !< A pointer to the MOM_domain_type being deallocated + logical, optional, intent(in) :: cursory !< If true do not deallocate fields associated + !! with the underlying infrastructure + logical :: invasive ! If true, deallocate fields associated with the underlying infrastructure + + invasive = .true. ; if (present(cursory)) invasive = .not.cursory + + if (associated(MOM_domain)) then + if (associated(MOM_domain%mpp_domain)) then + if (invasive) call mpp_deallocate_domain(MOM_domain%mpp_domain) + deallocate(MOM_domain%mpp_domain) + endif + if (associated(MOM_domain%mpp_domain_d2)) then + if (invasive) call mpp_deallocate_domain(MOM_domain%mpp_domain_d2) + deallocate(MOM_domain%mpp_domain_d2) + endif + if (associated(MOM_domain%maskmap)) deallocate(MOM_domain%maskmap) + deallocate(MOM_domain) + endif + +end subroutine deallocate_MOM_domain + +!> MOM_thread_affinity_set returns true if the number of openMP threads have been set to a value greater than 1. +function MOM_thread_affinity_set() + ! Local variables + !$ integer :: ocean_nthreads ! Number of openMP threads + !$ integer :: omp_get_num_threads ! An openMP function that returns the number of threads + logical :: MOM_thread_affinity_set + + MOM_thread_affinity_set = .false. + !$ call fms_affinity_init() + !$OMP PARALLEL + !$OMP MASTER + !$ ocean_nthreads = omp_get_num_threads() + !$OMP END MASTER + !$OMP END PARALLEL + !$ MOM_thread_affinity_set = (ocean_nthreads > 1 ) +end function MOM_thread_affinity_set + +!> set_MOM_thread_affinity sets the number of openMP threads to use with the ocean. +subroutine set_MOM_thread_affinity(ocean_nthreads, ocean_hyper_thread) + integer, intent(in) :: ocean_nthreads !< Number of openMP threads to use for the ocean model + logical, intent(in) :: ocean_hyper_thread !< If true, use hyper threading + + ! Local variables + !$ integer :: omp_get_thread_num, omp_get_num_threads !< These are the results of openMP functions + + !$ call fms_affinity_init() ! fms_affinity_init can be safely called more than once. + !$ call fms_affinity_set('OCEAN', ocean_hyper_thread, ocean_nthreads) + !$ call omp_set_num_threads(ocean_nthreads) + !$OMP PARALLEL + !$ write(6,*) "MOM_domains_mod OMPthreading ", fms_affinity_get(), omp_get_thread_num(), omp_get_num_threads() + !$ flush(6) + !$OMP END PARALLEL +end subroutine set_MOM_thread_affinity + +!> This subroutine retrieves the 1-d domains that make up the 2d-domain in a MOM_domain +subroutine get_domain_components_MD(MOM_dom, x_domain, y_domain) + type(MOM_domain_type), intent(in) :: MOM_dom !< The MOM_domain whose contents are being extracted + type(domain1D), optional, intent(inout) :: x_domain !< The 1-d logical x-domain + type(domain1D), optional, intent(inout) :: y_domain !< The 1-d logical y-domain + + call mpp_get_domain_components(MOM_dom%mpp_domain, x_domain, y_domain) +end subroutine get_domain_components_MD + +!> This subroutine retrieves the 1-d domains that make up a 2d-domain +subroutine get_domain_components_d2D(domain, x_domain, y_domain) + type(domain2D), intent(in) :: domain !< The 2D domain whose contents are being extracted + type(domain1D), optional, intent(inout) :: x_domain !< The 1-d logical x-domain + type(domain1D), optional, intent(inout) :: y_domain !< The 1-d logical y-domain + + call mpp_get_domain_components(domain, x_domain, y_domain) +end subroutine get_domain_components_d2D + +!> clone_MD_to_MD copies one MOM_domain_type into another, while allowing +!! some properties of the new type to differ from the original one. +subroutine clone_MD_to_MD(MD_in, MOM_dom, min_halo, halo_size, symmetric, domain_name, & + turns, refine, extra_halo) + type(MOM_domain_type), intent(in) :: MD_in !< An existing MOM_domain + type(MOM_domain_type), pointer :: MOM_dom !< A pointer to a MOM_domain that will be + !! allocated if it is unassociated, and will have data + !! copied from MD_in + integer, dimension(2), & + optional, intent(inout) :: min_halo !< If present, this sets the + !! minimum halo size for this domain in the i- and j- + !! directions, and returns the actual halo size used. + integer, optional, intent(in) :: halo_size !< If present, this sets the halo + !! size for the domain in the i- and j-directions. + !! min_halo and halo_size can not both be present. + logical, optional, intent(in) :: symmetric !< If present, this specifies + !! whether the new domain is symmetric, regardless of + !! whether the macro SYMMETRIC_MEMORY_ is defined. + character(len=*), & + optional, intent(in) :: domain_name !< A name for the new domain, copied + !! from MD_in if missing. + integer, optional, intent(in) :: turns !< Number of quarter turns + integer, optional, intent(in) :: refine !< A factor by which to enhance the grid resolution. + integer, optional, intent(in) :: extra_halo !< An extra number of points in the halos + !! compared with MD_in + + integer :: global_indices(4) + logical :: mask_table_exists + integer, dimension(:), allocatable :: exni ! The extents of the grid for each i-row of the layout. + ! The sum of exni must equal MOM_dom%niglobal. + integer, dimension(:), allocatable :: exnj ! The extents of the grid for each j-row of the layout. + ! The sum of exni must equal MOM_dom%niglobal. + integer :: qturns ! The number of quarter turns, restricted to the range of 0 to 3. + integer :: i, j, nl1, nl2 + + qturns = 0 + if (present(turns)) qturns = modulo(turns, 4) + + if (.not.associated(MOM_dom)) then + allocate(MOM_dom) + allocate(MOM_dom%mpp_domain) + allocate(MOM_dom%mpp_domain_d2) + endif + +! Save the extra data for creating other domains of different resolution that overlay this domain + MOM_dom%symmetric = MD_in%symmetric + MOM_dom%nonblocking_updates = MD_in%nonblocking_updates + MOM_dom%thin_halo_updates = MD_in%thin_halo_updates + + if (modulo(qturns, 2) /= 0) then + MOM_dom%niglobal = MD_in%njglobal ; MOM_dom%njglobal = MD_in%niglobal + MOM_dom%nihalo = MD_in%njhalo ; MOM_dom%njhalo = MD_in%nihalo + call get_layout_extents(MD_in, exnj, exni) + + MOM_dom%X_FLAGS = MD_in%Y_FLAGS ; MOM_dom%Y_FLAGS = MD_in%X_FLAGS + MOM_dom%layout(:) = MD_in%layout(2:1:-1) + MOM_dom%io_layout(:) = MD_in%io_layout(2:1:-1) + else + MOM_dom%niglobal = MD_in%niglobal ; MOM_dom%njglobal = MD_in%njglobal + MOM_dom%nihalo = MD_in%nihalo ; MOM_dom%njhalo = MD_in%njhalo + call get_layout_extents(MD_in, exni, exnj) + + MOM_dom%X_FLAGS = MD_in%X_FLAGS ; MOM_dom%Y_FLAGS = MD_in%Y_FLAGS + MOM_dom%layout(:) = MD_in%layout(:) + MOM_dom%io_layout(:) = MD_in%io_layout(:) + endif + + ! Ensure that the points per processor are the same on the source and densitation grids. + select case (qturns) + case (1) ; call invert(exni) + case (2) ; call invert(exni) ; call invert(exnj) + case (3) ; call invert(exnj) + end select + + if (associated(MD_in%maskmap)) then + mask_table_exists = .true. + allocate(MOM_dom%maskmap(MOM_dom%layout(1), MOM_dom%layout(2))) + + nl1 = MOM_dom%layout(1) ; nl2 = MOM_dom%layout(2) + select case (qturns) + case (0) + do j=1,nl2 ; do i=1,nl1 + MOM_dom%maskmap(i,j) = MD_in%maskmap(i, j) + enddo ; enddo + case (1) + do j=1,nl2 ; do i=1,nl1 + MOM_dom%maskmap(i,j) = MD_in%maskmap(j, nl1+1-i) + enddo ; enddo + case (2) + do j=1,nl2 ; do i=1,nl1 + MOM_dom%maskmap(i,j) = MD_in%maskmap(nl1+1-i, nl2+1-j) + enddo ; enddo + case (3) + do j=1,nl2 ; do i=1,nl1 + MOM_dom%maskmap(i,j) = MD_in%maskmap(nl2+1-j, i) + enddo ; enddo + end select + else + mask_table_exists = .false. + endif + + ! Optionally enhance the grid resolution. + if (present(refine)) then ; if (refine > 1) then + MOM_dom%niglobal = refine*MOM_dom%niglobal ; MOM_dom%njglobal = refine*MOM_dom%njglobal + MOM_dom%nihalo = refine*MOM_dom%nihalo ; MOM_dom%njhalo = refine*MOM_dom%njhalo + do i=1,MOM_dom%layout(1) ; exni(i) = refine*exni(i) ; enddo + do j=1,MOM_dom%layout(2) ; exnj(j) = refine*exnj(j) ; enddo + endif ; endif + + ! Optionally enhance the grid resolution. + if (present(extra_halo)) then ; if (extra_halo > 0) then + MOM_dom%nihalo = MOM_dom%nihalo + extra_halo ; MOM_dom%njhalo = MOM_dom%njhalo + extra_halo + endif ; endif + + if (present(halo_size) .and. present(min_halo)) call MOM_error(FATAL, & + "clone_MOM_domain can not have both halo_size and min_halo present.") + + if (present(min_halo)) then + MOM_dom%nihalo = max(MOM_dom%nihalo, min_halo(1)) + min_halo(1) = MOM_dom%nihalo + MOM_dom%njhalo = max(MOM_dom%njhalo, min_halo(2)) + min_halo(2) = MOM_dom%njhalo + endif + + if (present(halo_size)) then + MOM_dom%nihalo = halo_size ; MOM_dom%njhalo = halo_size + endif + + if (present(symmetric)) then ; MOM_dom%symmetric = symmetric ; endif + + if (present(domain_name)) then + MOM_dom%name = trim(domain_name) + else + MOM_dom%name = MD_in%name + endif + + call clone_MD_to_d2D(MOM_dom, MOM_dom%mpp_domain, xextent=exni, yextent=exnj) + + call clone_MD_to_d2D(MOM_dom, MOM_dom%mpp_domain_d2, domain_name=MOM_dom%name, coarsen=2) + +end subroutine clone_MD_to_MD + + +!> clone_MD_to_d2D uses information from a MOM_domain_type to create a new +!! domain2d type, while allowing some properties of the new type to differ from +!! the original one. +subroutine clone_MD_to_d2D(MD_in, mpp_domain, min_halo, halo_size, symmetric, & + domain_name, turns, xextent, yextent, coarsen) + type(MOM_domain_type), intent(in) :: MD_in !< An existing MOM_domain to be cloned + type(domain2d), intent(inout) :: mpp_domain !< The new mpp_domain to be set up + integer, dimension(2), & + optional, intent(inout) :: min_halo !< If present, this sets the + !! minimum halo size for this domain in the i- and j- + !! directions, and returns the actual halo size used. + integer, optional, intent(in) :: halo_size !< If present, this sets the halo + !! size for the domain in the i- and j-directions. + !! min_halo and halo_size can not both be present. + logical, optional, intent(in) :: symmetric !< If present, this specifies + !! whether the new domain is symmetric, regardless of + !! whether the macro SYMMETRIC_MEMORY_ is defined or + !! whether MD_in is symmetric. + character(len=*), & + optional, intent(in) :: domain_name !< A name for the new domain, "MOM" + !! if missing. + integer, optional, intent(in) :: turns !< Number of quarter turns - not implemented here. + integer, optional, intent(in) :: coarsen !< A factor by which to coarsen this grid. + !! The default of 1 is for no coarsening. + integer, dimension(:), optional, intent(in) :: xextent !< The number of grid points in the + !! tracer computational domain for division of the x-layout. + integer, dimension(:), optional, intent(in) :: yextent !< The number of grid points in the + !! tracer computational domain for division of the y-layout. + + integer :: global_indices(4) + integer :: nihalo, njhalo + logical :: symmetric_dom, do_coarsen + character(len=64) :: dom_name + + if (present(turns)) & + call MOM_error(FATAL, "Rotation not supported for MOM_domain to domain2d") + + if (present(halo_size) .and. present(min_halo)) call MOM_error(FATAL, & + "clone_MOM_domain can not have both halo_size and min_halo present.") + + do_coarsen = .false. ; if (present(coarsen)) then ; do_coarsen = (coarsen > 1) ; endif + + nihalo = MD_in%nihalo ; njhalo = MD_in%njhalo + if (do_coarsen) then + nihalo = int(MD_in%nihalo / coarsen) ; njhalo = int(MD_in%njhalo / coarsen) + endif + + if (present(min_halo)) then + nihalo = max(nihalo, min_halo(1)) + njhalo = max(njhalo, min_halo(2)) + min_halo(1) = nihalo ; min_halo(2) = njhalo + endif + if (present(halo_size)) then + nihalo = halo_size ; njhalo = halo_size + endif + + symmetric_dom = MD_in%symmetric + if (present(symmetric)) then ; symmetric_dom = symmetric ; endif + + dom_name = MD_in%name + if (do_coarsen) dom_name = trim(MD_in%name)//"c" + if (present(domain_name)) dom_name = trim(domain_name) + + global_indices(1:4) = (/ 1, MD_in%niglobal, 1, MD_in%njglobal /) + if (do_coarsen) then + global_indices(1:4) = (/ 1, (MD_in%niglobal/coarsen), 1, (MD_in%njglobal/coarsen) /) + endif + + if (associated(MD_in%maskmap)) then + call mpp_define_domains( global_indices, MD_in%layout, mpp_domain, & + xflags=MD_in%X_FLAGS, yflags=MD_in%Y_FLAGS, xhalo=nihalo, yhalo=njhalo, & + xextent=xextent, yextent=yextent, symmetry=symmetric_dom, name=dom_name, & + maskmap=MD_in%maskmap ) + else + call mpp_define_domains( global_indices, MD_in%layout, mpp_domain, & + xflags=MD_in%X_FLAGS, yflags=MD_in%Y_FLAGS, xhalo=nihalo, yhalo=njhalo, & + symmetry=symmetric_dom, xextent=xextent, yextent=yextent, name=dom_name) + endif + + if ((MD_in%io_layout(1) + MD_in%io_layout(2) > 0) .and. & + (MD_in%layout(1)*MD_in%layout(2) > 1)) then + call mpp_define_io_domain(mpp_domain, MD_in%io_layout) + else + call mpp_define_io_domain(mpp_domain, (/ 1, 1 /) ) + endif + +end subroutine clone_MD_to_d2D + +!> Returns the index ranges that have been stored in a MOM_domain_type +subroutine get_domain_extent_MD(Domain, isc, iec, jsc, jec, isd, ied, jsd, jed, & + isg, ieg, jsg, jeg, idg_offset, jdg_offset, & + symmetric, local_indexing, index_offset, coarsen) + type(MOM_domain_type), & + intent(in) :: Domain !< The MOM domain from which to extract information + integer, intent(out) :: isc !< The start i-index of the computational domain + integer, intent(out) :: iec !< The end i-index of the computational domain + integer, intent(out) :: jsc !< The start j-index of the computational domain + integer, intent(out) :: jec !< The end j-index of the computational domain + integer, intent(out) :: isd !< The start i-index of the data domain + integer, intent(out) :: ied !< The end i-index of the data domain + integer, intent(out) :: jsd !< The start j-index of the data domain + integer, intent(out) :: jed !< The end j-index of the data domain + integer, optional, intent(out) :: isg !< The start i-index of the global domain + integer, optional, intent(out) :: ieg !< The end i-index of the global domain + integer, optional, intent(out) :: jsg !< The start j-index of the global domain + integer, optional, intent(out) :: jeg !< The end j-index of the global domain + integer, optional, intent(out) :: idg_offset !< The offset between the corresponding global and + !! data i-index spaces. + integer, optional, intent(out) :: jdg_offset !< The offset between the corresponding global and + !! data j-index spaces. + logical, optional, intent(out) :: symmetric !< True if symmetric memory is used. + logical, optional, intent(in) :: local_indexing !< If true, local tracer array indices start at 1, + !! as in most MOM6 code. The default is true. + integer, optional, intent(in) :: index_offset !< A fixed additional offset to all indices. This + !! can be useful for some types of debugging with + !! dynamic memory allocation. The default is 0. + integer, optional, intent(in) :: coarsen !< A factor by which the grid is coarsened. + !! The default is 1, for no coarsening. + + ! Local variables + integer :: isg_, ieg_, jsg_, jeg_ + integer :: ind_off, idg_off, jdg_off, coarsen_lev + logical :: local + + local = .true. ; if (present(local_indexing)) local = local_indexing + ind_off = 0 ; if (present(index_offset)) ind_off = index_offset + + coarsen_lev = 1 ; if (present(coarsen)) coarsen_lev = coarsen + + if (coarsen_lev == 1) then + call mpp_get_compute_domain(Domain%mpp_domain, isc, iec, jsc, jec) + call mpp_get_data_domain(Domain%mpp_domain, isd, ied, jsd, jed) + call mpp_get_global_domain(Domain%mpp_domain, isg_, ieg_, jsg_, jeg_) + elseif (coarsen_lev == 2) then + if (.not.associated(Domain%mpp_domain_d2)) call MOM_error(FATAL, & + "get_domain_extent called with coarsen=2, but Domain%mpp_domain_d2 is not associated.") + call mpp_get_compute_domain(Domain%mpp_domain_d2, isc, iec, jsc, jec) + call mpp_get_data_domain(Domain%mpp_domain_d2, isd, ied, jsd, jed) + call mpp_get_global_domain(Domain%mpp_domain_d2, isg_, ieg_, jsg_, jeg_) + else + call MOM_error(FATAL, "get_domain_extent called with an unsupported level of coarsening.") + endif + + if (local) then + ! This code institutes the MOM convention that local array indices start at 1. + idg_off = isd - 1 ; jdg_off = jsd - 1 + isc = isc - isd + 1 ; iec = iec - isd + 1 ; jsc = jsc - jsd + 1 ; jec = jec - jsd + 1 + ied = ied - isd + 1 ; jed = jed - jsd + 1 + isd = 1 ; jsd = 1 + else + idg_off = 0 ; jdg_off = 0 + endif + if (ind_off /= 0) then + idg_off = idg_off + ind_off ; jdg_off = jdg_off + ind_off + isc = isc + ind_off ; iec = iec + ind_off + jsc = jsc + ind_off ; jec = jec + ind_off + isd = isd + ind_off ; ied = ied + ind_off + jsd = jsd + ind_off ; jed = jed + ind_off + endif + if (present(isg)) isg = isg_ + if (present(ieg)) ieg = ieg_ + if (present(jsg)) jsg = jsg_ + if (present(jeg)) jeg = jeg_ + if (present(idg_offset)) idg_offset = idg_off + if (present(jdg_offset)) jdg_offset = jdg_off + if (present(symmetric)) symmetric = Domain%symmetric + +end subroutine get_domain_extent_MD + +!> Returns the index ranges that have been stored in a domain2D type +subroutine get_domain_extent_d2D(Domain, isc, iec, jsc, jec, isd, ied, jsd, jed) + type(domain2d), intent(in) :: Domain !< The MOM domain from which to extract information + integer, intent(out) :: isc !< The start i-index of the computational domain + integer, intent(out) :: iec !< The end i-index of the computational domain + integer, intent(out) :: jsc !< The start j-index of the computational domain + integer, intent(out) :: jec !< The end j-index of the computational domain + integer, optional, intent(out) :: isd !< The start i-index of the data domain + integer, optional, intent(out) :: ied !< The end i-index of the data domain + integer, optional, intent(out) :: jsd !< The start j-index of the data domain + integer, optional, intent(out) :: jed !< The end j-index of the data domain + + ! Local variables + integer :: isd_, ied_, jsd_, jed_, jsg_, jeg_, isg_, ieg_ + + call mpp_get_compute_domain(Domain, isc, iec, jsc, jec) + call mpp_get_data_domain(Domain, isd_, ied_, jsd_, jed_) + + if (present(isd)) isd = isd_ + if (present(ied)) ied = ied_ + if (present(jsd)) jsd = jsd_ + if (present(jed)) jed = jed_ + +end subroutine get_domain_extent_d2D + +!> Return the (potentially symmetric) computational domain i-bounds for an array +!! passed without index specifications (i.e. indices start at 1) based on an array size. +subroutine get_simple_array_i_ind(domain, size, is, ie, symmetric) + type(MOM_domain_type), intent(in) :: domain !< MOM domain from which to extract information + integer, intent(in) :: size !< The i-array size + integer, intent(out) :: is !< The computational domain starting i-index. + integer, intent(out) :: ie !< The computational domain ending i-index. + logical, optional, intent(in) :: symmetric !< If present, indicates whether symmetric sizes + !! can be considered. + ! Local variables + logical :: sym + character(len=120) :: mesg, mesg2 + integer :: isc, iec, jsc, jec, isd, ied, jsd, jed + + call mpp_get_compute_domain(Domain%mpp_domain, isc, iec, jsc, jec) + call mpp_get_data_domain(Domain%mpp_domain, isd, ied, jsd, jed) + + isc = isc-isd+1 ; iec = iec-isd+1 ; ied = ied-isd+1 ; isd = 1 + sym = Domain%symmetric ; if (present(symmetric)) sym = symmetric + + if (size == ied) then ; is = isc ; ie = iec + elseif (size == 1+iec-isc) then ; is = 1 ; ie = size + elseif (sym .and. (size == 1+ied)) then ; is = isc ; ie = iec+1 + elseif (sym .and. (size == 2+iec-isc)) then ; is = 1 ; ie = size+1 + else + write(mesg,'("Unrecognized size ", i6, "in call to get_simple_array_i_ind. \")') size + if (sym) then + write(mesg2,'("Valid sizes are : ", 2i7)') ied, 1+iec-isc + else + write(mesg2,'("Valid sizes are : ", 4i7)') ied, 1+iec-isc, 1+ied, 2+iec-isc + endif + call MOM_error(FATAL, trim(mesg)//trim(mesg2)) + endif + +end subroutine get_simple_array_i_ind + + +!> Return the (potentially symmetric) computational domain j-bounds for an array +!! passed without index specifications (i.e. indices start at 1) based on an array size. +subroutine get_simple_array_j_ind(domain, size, js, je, symmetric) + type(MOM_domain_type), intent(in) :: domain !< MOM domain from which to extract information + integer, intent(in) :: size !< The j-array size + integer, intent(out) :: js !< The computational domain starting j-index. + integer, intent(out) :: je !< The computational domain ending j-index. + logical, optional, intent(in) :: symmetric !< If present, indicates whether symmetric sizes + !! can be considered. + ! Local variables + logical :: sym + character(len=120) :: mesg, mesg2 + integer :: isc, iec, jsc, jec, isd, ied, jsd, jed + + call mpp_get_compute_domain(Domain%mpp_domain, isc, iec, jsc, jec) + call mpp_get_data_domain(Domain%mpp_domain, isd, ied, jsd, jed) + + jsc = jsc-jsd+1 ; jec = jec-jsd+1 ; jed = jed-jsd+1 ; jsd = 1 + sym = Domain%symmetric ; if (present(symmetric)) sym = symmetric + + if (size == jed) then ; js = jsc ; je = jec + elseif (size == 1+jec-jsc) then ; js = 1 ; je = size + elseif (sym .and. (size == 1+jed)) then ; js = jsc ; je = jec+1 + elseif (sym .and. (size == 2+jec-jsc)) then ; js = 1 ; je = size+1 + else + write(mesg,'("Unrecognized size ", i6, "in call to get_simple_array_j_ind. \")') size + if (sym) then + write(mesg2,'("Valid sizes are : ", 2i7)') jed, 1+jec-jsc + else + write(mesg2,'("Valid sizes are : ", 4i7)') jed, 1+jec-jsc, 1+jed, 2+jec-jsc + endif + call MOM_error(FATAL, trim(mesg)//trim(mesg2)) + endif + +end subroutine get_simple_array_j_ind + +!> Invert the contents of a 1-d array +subroutine invert(array) + integer, dimension(:), intent(inout) :: array !< The 1-d array to invert + integer :: i, ni, swap + ni = size(array) + do i=1,ni + swap = array(i) + array(i) = array(ni+1-i) + array(ni+1-i) = swap + enddo +end subroutine invert + +!> Returns the global shape of h-point arrays +subroutine get_global_shape(domain, niglobal, njglobal) + type(MOM_domain_type), intent(in) :: domain !< MOM domain from which to extract information + integer, intent(out) :: niglobal !< i-index global size of h-point arrays + integer, intent(out) :: njglobal !< j-index global size of h-point arrays + + niglobal = domain%niglobal + njglobal = domain%njglobal +end subroutine get_global_shape + +!> Get the array ranges in one dimension for the divisions of a global index space +subroutine compute_block_extent(isg, ieg, ndivs, ibegin, iend) + integer, intent(in) :: isg !< The starting index of the global index space + integer, intent(in) :: ieg !< The ending index of the global index space + integer, intent(in) :: ndivs !< The number of divisions + integer, dimension(:), intent(out) :: ibegin !< The starting index of each division + integer, dimension(:), intent(out) :: iend !< The ending index of each division + + call mpp_compute_block_extent(isg, ieg, ndivs, ibegin, iend) +end subroutine compute_block_extent + +!> Broadcast a 2-d domain from the root PE to the other PEs +subroutine broadcast_domain(domain) + type(domain2d), intent(inout) :: domain !< The domain2d type that will be shared across PEs. + + call mpp_broadcast_domain(domain) +end subroutine broadcast_domain + +!> Broadcast an entire 2-d array from the root processor to all others. +subroutine global_field(domain, local, global) + type(domain2d), intent(inout) :: domain !< The domain2d type that describes the decomposition + real, dimension(:,:), intent(in) :: local !< The portion of the array on the local PE + real, dimension(:,:), intent(out) :: global !< The whole global array + + call mpp_global_field(domain, local, global) +end subroutine global_field + +!> Returns arrays of the i- and j- sizes of the h-point computational domains for each +!! element of the grid layout. Any input values in the extent arrays are discarded, so +!! they are effectively intent out despite their declared intent of inout. +subroutine get_layout_extents(Domain, extent_i, extent_j) + type(MOM_domain_type), intent(in) :: domain !< MOM domain from which to extract information + integer, dimension(:), allocatable, intent(inout) :: extent_i !< The number of points in the + !! i-direction in each i-row of the layout + integer, dimension(:), allocatable, intent(inout) :: extent_j !< The number of points in the + !! j-direction in each j-row of the layout + + if (allocated(extent_i)) deallocate(extent_i) + if (allocated(extent_j)) deallocate(extent_j) + allocate(extent_i(domain%layout(1))) ; extent_i(:) = 0 + allocate(extent_j(domain%layout(2))) ; extent_j(:) = 0 + call mpp_get_domain_extents(domain%mpp_domain, extent_i, extent_j) +end subroutine get_layout_extents + +end module MOM_domain_infra diff --git a/config_src/infra/FMS1/MOM_ensemble_manager_infra.F90 b/config_src/infra/FMS1/MOM_ensemble_manager_infra.F90 new file mode 100644 index 0000000000..66bbb86e2f --- /dev/null +++ b/config_src/infra/FMS1/MOM_ensemble_manager_infra.F90 @@ -0,0 +1,95 @@ +!> A simple (very thin) wrapper for managing ensemble member layout information +module MOM_ensemble_manager_infra + +! This file is part of MOM6. See LICENSE.md for the license. + +use ensemble_manager_mod, only : FMS_ensemble_manager_init => ensemble_manager_init +use ensemble_manager_mod, only : FMS_ensemble_pelist_setup => ensemble_pelist_setup +use ensemble_manager_mod, only : FMS_get_ensemble_id => get_ensemble_id +use ensemble_manager_mod, only : FMS_get_ensemble_size => get_ensemble_size +use ensemble_manager_mod, only : FMS_get_ensemble_pelist => get_ensemble_pelist +use ensemble_manager_mod, only : FMS_get_ensemble_filter_pelist => get_ensemble_filter_pelist + +implicit none ; private + +public :: ensemble_manager_init, ensemble_pelist_setup +public :: get_ensemble_id, get_ensemble_size +public :: get_ensemble_pelist, get_ensemble_filter_pelist + +contains + +!> Initializes the ensemble manager which divides available resources +!! in order to concurrently execute an ensemble of model realizations. +subroutine ensemble_manager_init() + + call FMS_ensemble_manager_init() + +end subroutine ensemble_manager_init + +!> Create a list of processing elements (PEs) across components +!! associated with the current ensemble member. +subroutine ensemble_pelist_setup(concurrent, atmos_npes, ocean_npes, land_npes, ice_npes, & + Atm_pelist, Ocean_pelist, Land_pelist, Ice_pelist) + logical, intent(in) :: concurrent !< A logical flag, if True, then ocean fast + !! PEs are run concurrently with + !! slow PEs within the coupler. + integer, intent(in) :: atmos_npes !< The number of atmospheric (fast) PEs + integer, intent(in) :: ocean_npes !< The number of ocean (slow) PEs + integer, intent(in) :: land_npes !< The number of land PEs (fast) + integer, intent(in) :: ice_npes !< The number of ice (fast) PEs + integer, dimension(:), intent(inout) :: Atm_pelist !< A list of Atm PEs + integer, dimension(:), intent(inout) :: Ocean_pelist !< A list of Ocean PEs + integer, dimension(:), intent(inout) :: Land_pelist !< A list of Land PEs + integer, dimension(:), intent(inout) :: Ice_pelist !< A list of Ice PEs + + + call FMS_ensemble_pelist_setup(concurrent, atmos_npes, ocean_npes, land_npes, ice_npes, & + Atm_pelist, Ocean_pelist, Land_pelist, Ice_pelist) + +end subroutine ensemble_pelist_setup + +!> Returns the numeric id for the current ensemble member +function get_ensemble_id() + integer :: get_ensemble_id + + get_ensemble_id = FMS_get_ensemble_id() + +end function get_ensemble_id + +!> Returns ensemble information as follows, +!! index (1) :: ensemble size +!! index (2) :: Number of PEs per ensemble member +!! index (3) :: Number of ocean PEs per ensemble member +!! index (4) :: Number of atmos PEs per ensemble member +!! index (5) :: Number of land PEs per ensemble member +!! index (6) :: Number of ice PEs per ensemble member +function get_ensemble_size() + integer, dimension(6) :: get_ensemble_size + + get_ensemble_size = FMS_get_ensemble_size() + +end function get_ensemble_size + +!> Returns the list of PEs associated with all ensemble members +!! Results are stored in the argument array which must be large +!! enough to contain the list. If the optional name argument is present, +!! the returned processor list are for a particular component (atmos, ocean ,land, ice) +subroutine get_ensemble_pelist(pelist, name) + integer, intent(inout) :: pelist(:,:) !< A processor list for all ensemble members + character(len=*), optional, intent(in) :: name !< An optional component name (atmos, ocean, land, ice) + + call FMS_get_ensemble_pelist(pelist, name) + +end subroutine get_ensemble_pelist + +!> Returns the list of PEs associated with the named ensemble filter application. +!! Valid component names include ('atmos', 'ocean', 'land', and 'ice') +subroutine get_ensemble_filter_pelist(pelist, name) + integer, intent(inout) :: pelist(:) !< A processor list for the ensemble filter + character(len=*), intent(in) :: name !< The component name (atmos, ocean, land, ice) + + call FMS_get_Ensemble_filter_pelist(pelist, name) + +end subroutine get_ensemble_filter_pelist + +end module MOM_ensemble_manager_infra diff --git a/config_src/infra/FMS1/MOM_error_infra.F90 b/config_src/infra/FMS1/MOM_error_infra.F90 new file mode 100644 index 0000000000..e5a8b8dc68 --- /dev/null +++ b/config_src/infra/FMS1/MOM_error_infra.F90 @@ -0,0 +1,42 @@ +!> Routines for error handling and I/O management +module MOM_error_infra + +! This file is part of MOM6. See LICENSE.md for the license. + +use mpp_mod, only : mpp_error, mpp_pe, mpp_root_pe, mpp_stdlog=>stdlog, mpp_stdout=>stdout +use mpp_mod, only : NOTE, WARNING, FATAL + +implicit none ; private + +public :: MOM_err, is_root_pe, stdlog, stdout +!> Integer parameters encoding the severity of an error message +public :: NOTE, WARNING, FATAL + +contains + +!> MOM_err writes an error message, and may cause the run to stop depending on the +!! severity of the error. +subroutine MOM_err(severity, message) + integer, intent(in) :: severity !< The severity level of this error + character(len=*), intent(in) :: message !< A message to write out + + call mpp_error(severity, message) +end subroutine MOM_err + +!> stdout returns the standard Fortran unit number for output +integer function stdout() + stdout = mpp_stdout() +end function stdout + +!> stdlog returns the standard Fortran unit number to use to log messages +integer function stdlog() + stdlog = mpp_stdlog() +end function stdlog + +!> is_root_pe returns .true. if the current PE is the root PE. +logical function is_root_pe() + is_root_pe = .false. + if (mpp_pe() == mpp_root_pe()) is_root_pe = .true. +end function is_root_pe + +end module MOM_error_infra diff --git a/config_src/infra/FMS1/MOM_interp_infra.F90 b/config_src/infra/FMS1/MOM_interp_infra.F90 new file mode 100644 index 0000000000..ca5b2b8516 --- /dev/null +++ b/config_src/infra/FMS1/MOM_interp_infra.F90 @@ -0,0 +1,251 @@ +!> This module wraps the FMS temporal and spatial interpolation routines +module MOM_interp_infra + +! This file is part of MOM6. See LICENSE.md for the license. + +use MOM_domain_infra, only : MOM_domain_type, domain2d +use MOM_io_infra, only : axistype +use MOM_time_manager, only : time_type +use horiz_interp_mod, only : horiz_interp_new, horiz_interp, horiz_interp_init, horiz_interp_type +use time_interp_external_mod, only : time_interp_external +use time_interp_external_mod, only : init_external_field, time_interp_external_init +use time_interp_external_mod, only : get_external_field_size +use time_interp_external_mod, only : get_external_field_axes, get_external_field_missing + +implicit none ; private + +public :: horiz_interp_type, horiz_interp_init +public :: time_interp_extern, init_extern_field, time_interp_external_init +public :: get_external_field_info +public :: run_horiz_interp, build_horiz_interp_weights + +!> Read a field based on model time, and rotate to the model domain. +interface time_interp_extern + module procedure time_interp_extern_0d + module procedure time_interp_extern_2d + module procedure time_interp_extern_3d +end interface time_interp_extern + +!> perform horizontal interpolation of field +interface run_horiz_interp + module procedure horiz_interp_from_weights_field2d + module procedure horiz_interp_from_weights_field3d +end interface + +!> build weights for horizontal interpolation of field +interface build_horiz_interp_weights + module procedure build_horiz_interp_weights_2d_to_2d +end interface build_horiz_interp_weights + +contains + +!> perform horizontal interpolation of a 2d field using pre-computed weights +!! source and destination coordinates are 2d +subroutine horiz_interp_from_weights_field2d(Interp, data_in, data_out, verbose, mask_in, mask_out, & + missing_value, missing_permit, err_msg) + + type(horiz_interp_type), intent(in) :: Interp !< type containing interpolation options and weights + real, dimension(:,:), intent(in) :: data_in !< input data + real, dimension(:,:), intent(out) :: data_out !< output data + integer, optional, intent(in) :: verbose !< verbosity level + real, dimension(:,:), optional, intent(in) :: mask_in !< mask for input data + real, dimension(:,:), optional, intent(out) :: mask_out !< mask for output data + real, optional, intent(in) :: missing_value !< A value indicating missing data + integer, optional, intent(in) :: missing_permit !< number of allowed points with + !! missing value for interpolation (0-3) + character(len=*), optional, intent(out) :: err_msg !< error message + + call horiz_interp(Interp, data_in, data_out, verbose, & + mask_in, mask_out, missing_value, missing_permit, & + err_msg, new_missing_handle=.true. ) + +end subroutine horiz_interp_from_weights_field2d + + +!> perform horizontal interpolation of a 3d field using pre-computed weights +!! source and destination coordinates are 2d +subroutine horiz_interp_from_weights_field3d(Interp, data_in, data_out, verbose, mask_in, mask_out, & + missing_value, missing_permit, err_msg) + + type(horiz_interp_type), intent(in) :: Interp !< type containing interpolation options and weights + real, dimension(:,:,:), intent(in) :: data_in !< input data + real, dimension(:,:,:), intent(out) :: data_out !< output data + integer, optional, intent(in) :: verbose !< verbosity level + real, dimension(:,:,:), optional, intent(in) :: mask_in !< mask for input data + real, dimension(:,:,:), optional, intent(out) :: mask_out !< mask for output data + real, optional, intent(in) :: missing_value !< A value indicating missing data + integer, optional, intent(in) :: missing_permit !< number of allowed points with + !! missing value for interpolation (0-3) + character(len=*), optional, intent(out) :: err_msg !< error message + + call horiz_interp(Interp, data_in, data_out, verbose, mask_in, mask_out, & + missing_value, missing_permit, err_msg) + +end subroutine horiz_interp_from_weights_field3d + + +!> build horizontal interpolation weights from source grid defined by 2d lon/lat to destination grid +!! defined by 2d lon/lat +subroutine build_horiz_interp_weights_2d_to_2d(Interp, lon_in, lat_in, lon_out, lat_out, & + verbose, interp_method, num_nbrs, max_dist, & + src_modulo, mask_in, mask_out, & + is_latlon_in, is_latlon_out) + + type(horiz_interp_type), intent(inout) :: Interp !< type containing interpolation options and weights + real, dimension(:,:), intent(in) :: lon_in !< input longitude 2d + real, dimension(:,:), intent(in) :: lat_in !< input latitude 2d + real, dimension(:,:), intent(in) :: lon_out !< output longitude 2d + real, dimension(:,:), intent(in) :: lat_out !< output latitude 2d + integer, optional, intent(in) :: verbose !< verbosity level + character(len=*), optional, intent(in) :: interp_method !< interpolation method + integer, optional, intent(in) :: num_nbrs !< number of nearest neighbors + real, optional, intent(in) :: max_dist !< maximum region of influence + logical, optional, intent(in) :: src_modulo !< periodicity of E-W boundary + real, dimension(:,:), optional, intent(in) :: mask_in !< mask for input data + real, dimension(:,:), optional, intent(inout) :: mask_out !< mask for output data + logical, optional, intent(in) :: is_latlon_in !< input grid is regular lat/lon grid + logical, optional, intent(in) :: is_latlon_out !< output grid is regular lat/lon grid + + call horiz_interp_new(Interp, lon_in, lat_in, lon_out, lat_out, & + verbose, interp_method, num_nbrs, max_dist, & + src_modulo, mask_in, mask_out, & + is_latlon_in, is_latlon_out) + +end subroutine build_horiz_interp_weights_2d_to_2d + + +!> get size of an external field from field index +function get_extern_field_size(index) + + integer, intent(in) :: index !< field index + integer :: get_extern_field_size(4) !< field size + + get_extern_field_size = get_external_field_size(index) + +end function get_extern_field_size + + +!> get axes of an external field from field index +function get_extern_field_axes(index) + + integer, intent(in) :: index !< field index + type(axistype), dimension(4) :: get_extern_field_axes !< field axes + + get_extern_field_axes = get_external_field_axes(index) + +end function get_extern_field_axes + + +!> get missing value of an external field from field index +function get_extern_field_missing(index) + + integer, intent(in) :: index !< field index + real :: get_extern_field_missing !< field missing value + + get_extern_field_missing = get_external_field_missing(index) + +end function get_extern_field_missing + + +!> Get information about the external fields. +subroutine get_external_field_info(field_id, size, axes, missing) + integer, intent(in) :: field_id !< The integer index of the external + !! field returned from a previous + !! call to init_external_field() + integer, dimension(4), optional, intent(inout) :: size !< Dimension sizes for the input data + type(axistype), dimension(4), optional, intent(inout) :: axes !< Axis types for the input data + real, optional, intent(inout) :: missing !< Missing value for the input data + + if (present(size)) then + size(1:4) = get_extern_field_size(field_id) + endif + + if (present(axes)) then + axes(1:4) = get_extern_field_axes(field_id) + endif + + if (present(missing)) then + missing = get_extern_field_missing(field_id) + endif + +end subroutine get_external_field_info + + +!> Read a scalar field based on model time. +subroutine time_interp_extern_0d(field_id, time, data_in, verbose) + integer, intent(in) :: field_id !< The integer index of the external field returned + !! from a previous call to init_external_field() + type(time_type), intent(in) :: time !< The target time for the data + real, intent(inout) :: data_in !< The interpolated value + logical, optional, intent(in) :: verbose !< If true, write verbose output for debugging + + call time_interp_external(field_id, time, data_in, verbose=verbose) +end subroutine time_interp_extern_0d + + +!> Read a 2d field from an external based on model time, potentially including horizontal +!! interpolation and rotation of the data +subroutine time_interp_extern_2d(field_id, time, data_in, interp, verbose, horz_interp, mask_out) + integer, intent(in) :: field_id !< The integer index of the external field returned + !! from a previous call to init_external_field() + type(time_type), intent(in) :: time !< The target time for the data + real, dimension(:,:), intent(inout) :: data_in !< The array in which to store the interpolated values + integer, optional, intent(in) :: interp !< A flag indicating the temporal interpolation method + logical, optional, intent(in) :: verbose !< If true, write verbose output for debugging + type(horiz_interp_type), & + optional, intent(in) :: horz_interp !< A structure to control horizontal interpolation + logical, dimension(:,:), & + optional, intent(out) :: mask_out !< An array that is true where there is valid data + + call time_interp_external(field_id, time, data_in, interp=interp, verbose=verbose, & + horz_interp=horz_interp, mask_out=mask_out) +end subroutine time_interp_extern_2d + + +!> Read a 3d field based on model time, and rotate to the model grid +subroutine time_interp_extern_3d(field_id, time, data_in, interp, verbose, horz_interp, mask_out) + integer, intent(in) :: field_id !< The integer index of the external field returned + !! from a previous call to init_external_field() + type(time_type), intent(in) :: time !< The target time for the data + real, dimension(:,:,:), intent(inout) :: data_in !< The array in which to store the interpolated values + integer, optional, intent(in) :: interp !< A flag indicating the temporal interpolation method + logical, optional, intent(in) :: verbose !< If true, write verbose output for debugging + type(horiz_interp_type), & + optional, intent(in) :: horz_interp !< A structure to control horizontal interpolation + logical, dimension(:,:,:), & + optional, intent(out) :: mask_out !< An array that is true where there is valid data + + call time_interp_external(field_id, time, data_in, interp=interp, verbose=verbose, & + horz_interp=horz_interp, mask_out=mask_out) +end subroutine time_interp_extern_3d + + +!> initialize an external field +integer function init_extern_field(file, fieldname, MOM_domain, domain, verbose, & + threading, ierr, ignore_axis_atts ) + + character(len=*), intent(in) :: file !< The name of the file to read + character(len=*), intent(in) :: fieldname !< The name of the field in the file + integer, optional, intent(in) :: threading !< A flag specifying whether the root PE reads + !! the data and broadcasts it (SINGLE_FILE) or all + !! processors read (MULTIPLE, the default). + logical, optional, intent(in) :: verbose !< If true, write verbose output for debugging + type(domain2d), optional, intent(in) :: domain !< A domain2d type that describes the decomposition + type(MOM_domain_type), & + optional, intent(in) :: MOM_Domain !< A MOM_Domain that describes the decomposition + integer, optional, intent(out) :: ierr !< Returns a non-zero error code in case of failure + logical, optional, intent(in) :: ignore_axis_atts !< If present and true, do not issue a + !! fatal error if the axis Cartesian attribute is + !! not set to a recognized value. + + if (present(MOM_Domain)) then + init_extern_field = init_external_field(file, fieldname, domain=MOM_domain%mpp_domain, & + verbose=verbose, threading=threading, ierr=ierr, ignore_axis_atts=ignore_axis_atts) + else + init_extern_field = init_external_field(file, fieldname, domain=domain, & + verbose=verbose, threading=threading, ierr=ierr, ignore_axis_atts=ignore_axis_atts) + endif + +end function init_extern_field + +end module MOM_interp_infra diff --git a/config_src/infra/FMS1/MOM_io_infra.F90 b/config_src/infra/FMS1/MOM_io_infra.F90 new file mode 100644 index 0000000000..3ea201235a --- /dev/null +++ b/config_src/infra/FMS1/MOM_io_infra.F90 @@ -0,0 +1,801 @@ +!> This module contains a thin inteface to mpp and fms I/O code +module MOM_io_infra + +! This file is part of MOM6. See LICENSE.md for the license. + +use MOM_domain_infra, only : MOM_domain_type, rescale_comp_data, AGRID, BGRID_NE, CGRID_NE +use MOM_domain_infra, only : domain2d, domain1d, CENTER, CORNER, NORTH_FACE, EAST_FACE +use MOM_error_infra, only : MOM_error=>MOM_err, NOTE, FATAL, WARNING + +use fms_mod, only : write_version_number, open_namelist_file, check_nml_error +use fms_io_mod, only : file_exist, field_exist, field_size, read_data +use fms_io_mod, only : fms_io_exit, get_filename_appendix +use mpp_io_mod, only : mpp_open, mpp_close, mpp_flush +use mpp_io_mod, only : mpp_write_meta, mpp_write +use mpp_io_mod, only : mpp_get_atts, mpp_attribute_exist +use mpp_io_mod, only : mpp_get_axes, axistype, mpp_get_axis_data +use mpp_io_mod, only : mpp_get_fields, fieldtype +use mpp_io_mod, only : mpp_get_info, mpp_get_times +use mpp_io_mod, only : mpp_io_init +! These are encoding constants. +use mpp_io_mod, only : APPEND_FILE=>MPP_APPEND, WRITEONLY_FILE=>MPP_WRONLY +use mpp_io_mod, only : OVERWRITE_FILE=>MPP_OVERWR, READONLY_FILE=>MPP_RDONLY +use mpp_io_mod, only : NETCDF_FILE=>MPP_NETCDF, ASCII_FILE=>MPP_ASCII +use mpp_io_mod, only : MULTIPLE=>MPP_MULTI, SINGLE_FILE=>MPP_SINGLE +use iso_fortran_env, only : int64 + +implicit none ; private + +! These interfaces are actually implemented or have explicit interfaces in this file. +public :: open_file, open_ASCII_file, file_is_open, close_file, flush_file, file_exists +public :: get_file_info, get_file_fields, get_file_times, get_filename_suffix +public :: MOM_read_data, MOM_read_vector, write_metadata, write_field +public :: field_exists, get_field_atts, get_field_size, get_axis_data, read_field_chksum +public :: io_infra_init, io_infra_end, MOM_namelist_file, check_namelist_error, write_version +! These types are inherited from underlying infrastructure code, to act as containers for +! information about fields and axes, respectively, and are opaque to this module. +public :: fieldtype, axistype +! These are encoding constant parmeters. +public :: ASCII_FILE, NETCDF_FILE, SINGLE_FILE, MULTIPLE +public :: APPEND_FILE, READONLY_FILE, OVERWRITE_FILE, WRITEONLY_FILE +public :: CENTER, CORNER, NORTH_FACE, EAST_FACE + +!> Indicate whether a file exists, perhaps with domain decomposition +interface file_exists + module procedure FMS_file_exists + module procedure MOM_file_exists +end interface + +!> Open a file (or fileset) for parallel or single-file I/). +interface open_file + module procedure open_file_type, open_file_unit +end interface open_file + +!> Read a data field from a file +interface MOM_read_data + module procedure MOM_read_data_4d + module procedure MOM_read_data_3d + module procedure MOM_read_data_2d, MOM_read_data_2d_region + module procedure MOM_read_data_1d, MOM_read_data_1d_int + module procedure MOM_read_data_0d, MOM_read_data_0d_int +end interface + +!> Write a registered field to an output file +interface write_field + module procedure write_field_4d + module procedure write_field_3d + module procedure write_field_2d + module procedure write_field_1d + module procedure write_field_0d + module procedure MOM_write_axis +end interface write_field + +!> Read a pair of data fields representing the two components of a vector from a file +interface MOM_read_vector + module procedure MOM_read_vector_3d + module procedure MOM_read_vector_2d +end interface MOM_read_vector + +!> Write metadata about a variable or axis to a file and store it for later reuse +interface write_metadata + module procedure write_metadata_axis, write_metadata_field +end interface write_metadata + +!> Close a file (or fileset). If the file handle does not point to an open file, +!! close_file simply returns without doing anything. +interface close_file + module procedure close_file_type, close_file_unit +end interface close_file + +!> Ensure that the output stream associated with a file handle is fully sent to disk +interface flush_file + module procedure flush_file_type, flush_file_unit +end interface flush_file + +!> Type for holding a handle to an open file and related information +type, public :: file_type ; private + integer :: unit = -1 !< The framework identfier or netCDF unit number of an output file + character(len=:), allocatable :: filename !< The path to this file, if it is open + logical :: open_to_read = .false. !< If true, this file or fileset can be read + logical :: open_to_write = .false. !< If true, this file or fileset can be written to +end type file_type + +contains + +!> Reads the checksum value for a field that was recorded in a file, along with a flag indicating +!! whether the file contained a valid checksum for this field. +subroutine read_field_chksum(field, chksum, valid_chksum) + type(fieldtype), intent(in) :: field !< The field whose checksum attribute is to be read. + integer(kind=int64), intent(out) :: chksum !< The checksum for the field. + logical, intent(out) :: valid_chksum !< If true, chksum has been successfully read. + ! Local variables + integer(kind=int64), dimension(3) :: checksum_file + + checksum_file(:) = -1 + valid_chksum = mpp_attribute_exist(field, "checksum") + if (valid_chksum) then + call get_field_atts(field, checksum=checksum_file) + chksum = checksum_file(1) + else + chksum = -1 + endif +end subroutine read_field_chksum + +!> Returns true if the named file or its domain-decomposed variant exists. +logical function MOM_file_exists(filename, MOM_Domain) + character(len=*), intent(in) :: filename !< The name of the file being inquired about + type(MOM_domain_type), intent(in) :: MOM_Domain !< The MOM_Domain that describes the decomposition + +! This function uses the fms_io function file_exist to determine whether +! a named file (or its decomposed variant) exists. + + MOM_file_exists = file_exist(filename, MOM_Domain%mpp_domain) + +end function MOM_file_exists + +!> Returns true if the named file or its domain-decomposed variant exists. +logical function FMS_file_exists(filename, domain, no_domain) + character(len=*), intent(in) :: filename !< The name of the file being inquired about + type(domain2d), optional, intent(in) :: domain !< The mpp domain2d that describes the decomposition + logical, optional, intent(in) :: no_domain !< This file does not use domain decomposition +! This function uses the fms_io function file_exist to determine whether +! a named file (or its decomposed variant) exists. + + FMS_file_exists = file_exist(filename, domain, no_domain) + +end function FMS_file_exists + +!> indicates whether an I/O handle is attached to an open file +logical function file_is_open(IO_handle) + type(file_type), intent(in) :: IO_handle !< Handle to a file to inquire about + + file_is_open = (IO_handle%unit >= 0) +end function file_is_open + +!> closes a file (or fileset). If the file handle does not point to an open file, +!! close_file_type simply returns without doing anything. +subroutine close_file_type(IO_handle) + type(file_type), intent(inout) :: IO_handle !< The I/O handle for the file to be closed + + call mpp_close(IO_handle%unit) + if (allocated(IO_handle%filename)) deallocate(IO_handle%filename) + IO_handle%open_to_read = .false. ; IO_handle%open_to_write = .false. +end subroutine close_file_type + +!> closes a file. If the unit does not point to an open file, +!! close_file_unit simply returns without doing anything. +subroutine close_file_unit(unit) + integer, intent(inout) :: unit !< The I/O unit for the file to be closed + + call mpp_close(unit) +end subroutine close_file_unit + +!> Ensure that the output stream associated with a file handle is fully sent to disk. +subroutine flush_file_type(file) + type(file_type), intent(in) :: file !< The I/O handle for the file to flush + + call mpp_flush(file%unit) +end subroutine flush_file_type + +!> Ensure that the output stream associated with a unit is fully sent to disk. +subroutine flush_file_unit(unit) + integer, intent(in) :: unit !< The I/O unit for the file to flush + + call mpp_flush(unit) +end subroutine flush_file_unit + +!> Initialize the underlying I/O infrastructure +subroutine io_infra_init(maxunits) + integer, optional, intent(in) :: maxunits !< An optional maximum number of file + !! unit numbers that can be used. + call mpp_io_init(maxunit=maxunits) +end subroutine io_infra_init + +!> Gracefully close out and terminate the underlying I/O infrastructure +subroutine io_infra_end() + call fms_io_exit() +end subroutine io_infra_end + +!> Open a single namelist file that is potentially readable by all PEs. +function MOM_namelist_file(file) result(unit) + character(len=*), optional, intent(in) :: file !< The file to open, by default "input.nml". + integer :: unit !< The opened unit number of the namelist file + unit = open_namelist_file(file) +end function MOM_namelist_file + +!> Checks the iostat argument that is returned after reading a namelist variable and writes a +!! message if there is an error. +subroutine check_namelist_error(IOstat, nml_name) + integer, intent(in) :: IOstat !< An I/O status field from a namelist read call + character(len=*), intent(in) :: nml_name !< The name of the namelist + integer :: ierr + ierr = check_nml_error(IOstat, nml_name) +end subroutine check_namelist_error + +!> Write a file version number to the log file or other output file +subroutine write_version(version, tag, unit) + character(len=*), intent(in) :: version !< A string that contains the routine name and version + character(len=*), optional, intent(in) :: tag !< A tag name to add to the message + integer, optional, intent(in) :: unit !< An alternate unit number for output + + call write_version_number(version, tag, unit) +end subroutine write_version + +!> open_file opens a file for parallel or single-file I/O. +subroutine open_file_unit(unit, filename, action, form, threading, fileset, nohdrs, domain, MOM_domain) + integer, intent(out) :: unit !< The I/O unit for the opened file + character(len=*), intent(in) :: filename !< The name of the file being opened + integer, optional, intent(in) :: action !< A flag indicating whether the file can be read + !! or written to and how to handle existing files. + integer, optional, intent(in) :: form !< A flag indicating the format of a new file. The + !! default is ASCII_FILE, but NETCDF_FILE is also common. + integer, optional, intent(in) :: threading !< A flag indicating whether one (SINGLE_FILE) + !! or multiple PEs (MULTIPLE) participate in I/O. + !! With the default, the root PE does I/O. + integer, optional, intent(in) :: fileset !< A flag indicating whether multiple PEs doing I/O due + !! to threading=MULTIPLE write to the same file (SINGLE_FILE) + !! or to one file per PE (MULTIPLE, the default). + logical, optional, intent(in) :: nohdrs !< If nohdrs is .TRUE., headers are not written to + !! ASCII files. The default is .false. + type(domain2d), optional, intent(in) :: domain !< A domain2d type that describes the decomposition + type(MOM_domain_type), optional, intent(in) :: MOM_Domain !< A MOM_Domain that describes the decomposition + + if (present(MOM_Domain)) then + call mpp_open(unit, filename, action=action, form=form, threading=threading, fileset=fileset, & + nohdrs=nohdrs, domain=MOM_Domain%mpp_domain) + else + call mpp_open(unit, filename, action=action, form=form, threading=threading, fileset=fileset, & + nohdrs=nohdrs, domain=domain) + endif +end subroutine open_file_unit + +!> open_file opens a file for parallel or single-file I/O. +subroutine open_file_type(IO_handle, filename, action, MOM_domain, threading, fileset) + type(file_type), intent(inout) :: IO_handle !< The handle for the opened file + character(len=*), intent(in) :: filename !< The path name of the file being opened + integer, optional, intent(in) :: action !< A flag indicating whether the file can be read + !! or written to and how to handle existing files. + !! The default is WRITE_ONLY. + type(MOM_domain_type), & + optional, intent(in) :: MOM_Domain !< A MOM_Domain that describes the decomposition + integer, optional, intent(in) :: threading !< A flag indicating whether one (SINGLE_FILE) + !! or multiple PEs (MULTIPLE) participate in I/O. + !! With the default, the root PE does I/O. + integer, optional, intent(in) :: fileset !< A flag indicating whether multiple PEs doing I/O due + !! to threading=MULTIPLE write to the same file (SINGLE_FILE) + !! or to one file per PE (MULTIPLE, the default). + + if (present(MOM_Domain)) then + call mpp_open(IO_handle%unit, filename, action=action, form=NETCDF_FILE, threading=threading, & + fileset=fileset, domain=MOM_Domain%mpp_domain) + else + call mpp_open(IO_handle%unit, filename, action=action, form=NETCDF_FILE, threading=threading, & + fileset=fileset) + endif + IO_handle%filename = trim(filename) + if (present(action)) then + if (action == READONLY_FILE) then + IO_handle%open_to_read = .true. ; IO_handle%open_to_write = .false. + else + IO_handle%open_to_read = .false. ; IO_handle%open_to_write = .true. + endif + else + IO_handle%open_to_read = .false. ; IO_handle%open_to_write = .true. + endif + +end subroutine open_file_type + +!> open_file opens an ascii file for parallel or single-file I/O using Fortran read and write calls. +subroutine open_ASCII_file(unit, file, action, threading, fileset) + integer, intent(out) :: unit !< The I/O unit for the opened file + character(len=*), intent(in) :: file !< The name of the file being opened + integer, optional, intent(in) :: action !< A flag indicating whether the file can be read + !! or written to and how to handle existing files. + integer, optional, intent(in) :: threading !< A flag indicating whether one (SINGLE_FILE) + !! or multiple PEs (MULTIPLE) participate in I/O. + !! With the default, the root PE does I/O. + integer, optional, intent(in) :: fileset !< A flag indicating whether multiple PEs doing I/O due + !! to threading=MULTIPLE write to the same file (SINGLE_FILE) + !! or to one file per PE (MULTIPLE, the default). + + call mpp_open(unit, file, action=action, form=ASCII_FILE, threading=threading, fileset=fileset, & + nohdrs=.true.) + +end subroutine open_ASCII_file + + +!> Provide a string to append to filenames, to differentiate ensemble members, for example. +subroutine get_filename_suffix(suffix) + character(len=*), intent(out) :: suffix !< A string to append to filenames + + call get_filename_appendix(suffix) +end subroutine get_filename_suffix + + +!> Get information about the number of dimensions, variables, global attributes and time levels +!! in the file associated with an open file unit +subroutine get_file_info(IO_handle, ndim, nvar, natt, ntime) + type(file_type), intent(in) :: IO_handle !< Handle for a file that is open for I/O + integer, optional, intent(out) :: ndim !< The number of dimensions in the file + integer, optional, intent(out) :: nvar !< The number of variables in the file + integer, optional, intent(out) :: natt !< The number of global attributes in the file + integer, optional, intent(out) :: ntime !< The number of time levels in the file + + ! Local variables + integer :: ndims, nvars, natts, ntimes + + call mpp_get_info(IO_handle%unit, ndims, nvars, natts, ntimes ) + + if (present(ndim)) ndim = ndims + if (present(nvar)) nvar = nvars + if (present(natt)) natt = natts + if (present(ntime)) ntime = ntimes + +end subroutine get_file_info + + +!> Get the times of records from a file + !### Modify this to also convert to time_type, using information about the dimensions? +subroutine get_file_times(IO_handle, time_values, ntime) + type(file_type), intent(in) :: IO_handle !< Handle for a file that is open for I/O + real, allocatable, dimension(:), intent(inout) :: time_values !< The real times for the records in file. + integer, optional, intent(out) :: ntime !< The number of time levels in the file + + integer :: ntimes + + if (allocated(time_values)) deallocate(time_values) + call get_file_info(IO_handle, ntime=ntimes) + if (present(ntime)) ntime = ntimes + if (ntimes > 0) then + allocate(time_values(ntimes)) + call mpp_get_times(IO_handle%unit, time_values) + endif +end subroutine get_file_times + +!> Set up the field information (e.g., names and metadata) for all of the variables in a file. The +!! argument fields must be allocated with a size that matches the number of variables in a file. +subroutine get_file_fields(IO_handle, fields) + type(file_type), intent(in) :: IO_handle !< Handle for a file that is open for I/O + type(fieldtype), dimension(:), intent(inout) :: fields !< Field-type descriptions of all of + !! the variables in a file. + call mpp_get_fields(IO_handle%unit, fields) +end subroutine get_file_fields + +!> Extract information from a field type, as stored or as found in a file +subroutine get_field_atts(field, name, units, longname, checksum) + type(fieldtype), intent(in) :: field !< The field to extract information from + character(len=*), optional, intent(out) :: name !< The variable name + character(len=*), optional, intent(out) :: units !< The units of the variable + character(len=*), optional, intent(out) :: longname !< The long name of the variable + integer(kind=int64), dimension(:), & + optional, intent(out) :: checksum !< The checksums of the variable in a file + call mpp_get_atts(field, name=name, units=units, longname=longname, checksum=checksum) +end subroutine get_field_atts + +!> Field_exists returns true if the field indicated by field_name is present in the +!! file file_name. If file_name does not exist, it returns false. +function field_exists(filename, field_name, domain, no_domain, MOM_domain) + character(len=*), intent(in) :: filename !< The name of the file being inquired about + character(len=*), intent(in) :: field_name !< The name of the field being sought + type(domain2d), target, optional, intent(in) :: domain !< A domain2d type that describes the decomposition + logical, optional, intent(in) :: no_domain !< This file does not use domain decomposition + type(MOM_domain_type), optional, intent(in) :: MOM_Domain !< A MOM_Domain that describes the decomposition + logical :: field_exists !< True if filename exists and field_name is in filename + + if (present(MOM_domain)) then + field_exists = field_exist(filename, field_name, domain=MOM_domain%mpp_domain, no_domain=no_domain) + else + field_exists = field_exist(filename, field_name, domain=domain, no_domain=no_domain) + endif + +end function field_exists + +!> Given filename and fieldname, this subroutine returns the size of the field in the file +subroutine get_field_size(filename, fieldname, sizes, field_found, no_domain) + character(len=*), intent(in) :: filename !< The name of the file to read + character(len=*), intent(in) :: fieldname !< The name of the variable whose sizes are returned + integer, dimension(:), intent(inout) :: sizes !< The sizes of the variable in each dimension + logical, optional, intent(out) :: field_found !< This indicates whether the field was found in + !! the input file. Without this argument, there + !! is a fatal error if the field is not found. + logical, optional, intent(in) :: no_domain !< If present and true, do not check for file + !! names with an appended tile number + + call field_size(filename, fieldname, sizes, field_found=field_found, no_domain=no_domain) + +end subroutine get_field_size + +!> Extracts and returns the axis data stored in an axistype. +subroutine get_axis_data( axis, dat ) + type(axistype), intent(in) :: axis !< An axis type + real, dimension(:), intent(out) :: dat !< The data in the axis variable + + call mpp_get_axis_data( axis, dat ) +end subroutine get_axis_data + +!> This routine uses the fms_io subroutine read_data to read a scalar named +!! "fieldname" from a single or domain-decomposed file "filename". +subroutine MOM_read_data_0d(filename, fieldname, data, timelevel, scale, MOM_Domain) + character(len=*), intent(in) :: filename !< The name of the file to read + character(len=*), intent(in) :: fieldname !< The variable name of the data in the file + real, intent(inout) :: data !< The 1-dimensional array into which the data + integer, optional, intent(in) :: timelevel !< The time level in the file to read + real, optional, intent(in) :: scale !< A scaling factor that the field is multiplied + !! by before it is returned. + type(MOM_domain_type), & + optional, intent(in) :: MOM_Domain !< The MOM_Domain that describes the decomposition + + if (present(MOM_Domain)) then + call read_data(filename, fieldname, data, MOM_Domain%mpp_domain, timelevel=timelevel) + else + call read_data(filename, fieldname, data, timelevel=timelevel, no_domain=.true.) + endif + + if (present(scale)) then ; if (scale /= 1.0) then + data = scale*data + endif ; endif + +end subroutine MOM_read_data_0d + +!> This routine uses the fms_io subroutine read_data to read a 1-D data field named +!! "fieldname" from a single or domain-decomposed file "filename". +subroutine MOM_read_data_1d(filename, fieldname, data, timelevel, scale, MOM_Domain) + character(len=*), intent(in) :: filename !< The name of the file to read + character(len=*), intent(in) :: fieldname !< The variable name of the data in the file + real, dimension(:), intent(inout) :: data !< The 1-dimensional array into which the data + integer, optional, intent(in) :: timelevel !< The time level in the file to read + real, optional, intent(in) :: scale !< A scaling factor that the field is multiplied + !! by before they are returned. + type(MOM_domain_type), & + optional, intent(in) :: MOM_Domain !< The MOM_Domain that describes the decomposition + + if (present(MOM_Domain)) then + call read_data(filename, fieldname, data, MOM_Domain%mpp_domain, timelevel=timelevel) + else + call read_data(filename, fieldname, data, timelevel=timelevel, no_domain=.true.) + endif + + if (present(scale)) then ; if (scale /= 1.0) then + data(:) = scale*data(:) + endif ; endif + +end subroutine MOM_read_data_1d + +!> This routine uses the fms_io subroutine read_data to read a distributed +!! 2-D data field named "fieldname" from file "filename". Valid values for +!! "position" include CORNER, CENTER, EAST_FACE and NORTH_FACE. +subroutine MOM_read_data_2d(filename, fieldname, data, MOM_Domain, & + timelevel, position, scale) + character(len=*), intent(in) :: filename !< The name of the file to read + character(len=*), intent(in) :: fieldname !< The variable name of the data in the file + real, dimension(:,:), intent(inout) :: data !< The 2-dimensional array into which the data + !! should be read + type(MOM_domain_type), intent(in) :: MOM_Domain !< The MOM_Domain that describes the decomposition + integer, optional, intent(in) :: timelevel !< The time level in the file to read + integer, optional, intent(in) :: position !< A flag indicating where this data is located + real, optional, intent(in) :: scale !< A scaling factor that the field is multiplied + !! by before it is returned. + + call read_data(filename, fieldname, data, MOM_Domain%mpp_domain, & + timelevel=timelevel, position=position) + + if (present(scale)) then ; if (scale /= 1.0) then + call rescale_comp_data(MOM_Domain, data, scale) + endif ; endif + +end subroutine MOM_read_data_2d + +!> This routine uses the fms_io subroutine read_data to read a region from a distributed or +!! global 2-D data field named "fieldname" from file "filename". +subroutine MOM_read_data_2d_region(filename, fieldname, data, start, nread, MOM_domain, & + no_domain, scale) + character(len=*), intent(in) :: filename !< The name of the file to read + character(len=*), intent(in) :: fieldname !< The variable name of the data in the file + real, dimension(:,:), intent(inout) :: data !< The 2-dimensional array into which the data + !! should be read + integer, dimension(:), intent(in) :: start !< The starting index to read in each of 4 + !! dimensions. For this 2-d read, the 3rd + !! and 4th values are always 1. + integer, dimension(:), intent(in) :: nread !< The number of points to read in each of 4 + !! dimensions. For this 2-d read, the 3rd + !! and 4th values are always 1. + type(MOM_domain_type), & + optional, intent(in) :: MOM_Domain !< The MOM_Domain that describes the decomposition + logical, optional, intent(in) :: no_domain !< If present and true, this variable does not + !! use domain decomposion. + real, optional, intent(in) :: scale !< A scaling factor that the field is multiplied + !! by before it is returned. + + if (present(MOM_Domain)) then + call read_data(filename, fieldname, data, start, nread, domain=MOM_Domain%mpp_domain, & + no_domain=no_domain) + else + call read_data(filename, fieldname, data, start, nread, no_domain=no_domain) + endif + + if (present(scale)) then ; if (scale /= 1.0) then + if (present(MOM_Domain)) then + call rescale_comp_data(MOM_Domain, data, scale) + else + ! Dangerously rescale the whole array + data(:,:) = scale*data(:,:) + endif + endif ; endif + +end subroutine MOM_read_data_2d_region + +!> This routine uses the fms_io subroutine read_data to read a distributed +!! 3-D data field named "fieldname" from file "filename". Valid values for +!! "position" include CORNER, CENTER, EAST_FACE and NORTH_FACE. +subroutine MOM_read_data_3d(filename, fieldname, data, MOM_Domain, & + timelevel, position, scale) + character(len=*), intent(in) :: filename !< The name of the file to read + character(len=*), intent(in) :: fieldname !< The variable name of the data in the file + real, dimension(:,:,:), intent(inout) :: data !< The 3-dimensional array into which the data + !! should be read + type(MOM_domain_type), intent(in) :: MOM_Domain !< The MOM_Domain that describes the decomposition + integer, optional, intent(in) :: timelevel !< The time level in the file to read + integer, optional, intent(in) :: position !< A flag indicating where this data is located + real, optional, intent(in) :: scale !< A scaling factor that the field is multiplied + !! by before it is returned. + + call read_data(filename, fieldname, data, MOM_Domain%mpp_domain, & + timelevel=timelevel, position=position) + + if (present(scale)) then ; if (scale /= 1.0) then + call rescale_comp_data(MOM_Domain, data, scale) + endif ; endif + +end subroutine MOM_read_data_3d + +!> This routine uses the fms_io subroutine read_data to read a distributed +!! 4-D data field named "fieldname" from file "filename". Valid values for +!! "position" include CORNER, CENTER, EAST_FACE and NORTH_FACE. +subroutine MOM_read_data_4d(filename, fieldname, data, MOM_Domain, & + timelevel, position, scale) + character(len=*), intent(in) :: filename !< The name of the file to read + character(len=*), intent(in) :: fieldname !< The variable name of the data in the file + real, dimension(:,:,:,:), intent(inout) :: data !< The 4-dimensional array into which the data + !! should be read + type(MOM_domain_type), intent(in) :: MOM_Domain !< The MOM_Domain that describes the decomposition + integer, optional, intent(in) :: timelevel !< The time level in the file to read + integer, optional, intent(in) :: position !< A flag indicating where this data is located + real, optional, intent(in) :: scale !< A scaling factor that the field is multiplied + !! by before it is returned. + + call read_data(filename, fieldname, data, MOM_Domain%mpp_domain, & + timelevel=timelevel, position=position) + + if (present(scale)) then ; if (scale /= 1.0) then + call rescale_comp_data(MOM_Domain, data, scale) + endif ; endif + +end subroutine MOM_read_data_4d + +!> This routine uses the fms_io subroutine read_data to read a scalar integer +!! data field named "fieldname" from file "filename". +subroutine MOM_read_data_0d_int(filename, fieldname, data, timelevel) + character(len=*), intent(in) :: filename !< The name of the file to read + character(len=*), intent(in) :: fieldname !< The variable name of the data in the file + integer, intent(inout) :: data !< The 1-dimensional array into which the data + integer, optional, intent(in) :: timelevel !< The time level in the file to read + + call read_data(filename, fieldname, data, timelevel=timelevel, no_domain=.true.) + +end subroutine MOM_read_data_0d_int + +!> This routine uses the fms_io subroutine read_data to read a 1-D integer +!! data field named "fieldname" from file "filename". +subroutine MOM_read_data_1d_int(filename, fieldname, data, timelevel) + character(len=*), intent(in) :: filename !< The name of the file to read + character(len=*), intent(in) :: fieldname !< The variable name of the data in the file + integer, dimension(:), intent(inout) :: data !< The 1-dimensional array into which the data + integer, optional, intent(in) :: timelevel !< The time level in the file to read + + call read_data(filename, fieldname, data, timelevel=timelevel, no_domain=.true.) + +end subroutine MOM_read_data_1d_int + + +!> This routine uses the fms_io subroutine read_data to read a pair of distributed +!! 2-D data fields with names given by "[uv]_fieldname" from file "filename". Valid values for +!! "stagger" include CGRID_NE, BGRID_NE, and AGRID. +subroutine MOM_read_vector_2d(filename, u_fieldname, v_fieldname, u_data, v_data, MOM_Domain, & + timelevel, stagger, scalar_pair, scale) + character(len=*), intent(in) :: filename !< The name of the file to read + character(len=*), intent(in) :: u_fieldname !< The variable name of the u data in the file + character(len=*), intent(in) :: v_fieldname !< The variable name of the v data in the file + real, dimension(:,:), intent(inout) :: u_data !< The 2 dimensional array into which the + !! u-component of the data should be read + real, dimension(:,:), intent(inout) :: v_data !< The 2 dimensional array into which the + !! v-component of the data should be read + type(MOM_domain_type), intent(in) :: MOM_Domain !< The MOM_Domain that describes the decomposition + integer, optional, intent(in) :: timelevel !< The time level in the file to read + integer, optional, intent(in) :: stagger !< A flag indicating where this vector is discretized + logical, optional, intent(in) :: scalar_pair !< If true, a pair of scalars are to be read + real, optional, intent(in) :: scale !< A scaling factor that the fields are multiplied + !! by before they are returned. + integer :: u_pos, v_pos + + u_pos = EAST_FACE ; v_pos = NORTH_FACE + if (present(stagger)) then + if (stagger == CGRID_NE) then ; u_pos = EAST_FACE ; v_pos = NORTH_FACE + elseif (stagger == BGRID_NE) then ; u_pos = CORNER ; v_pos = CORNER + elseif (stagger == AGRID) then ; u_pos = CENTER ; v_pos = CENTER ; endif + endif + + call read_data(filename, u_fieldname, u_data, MOM_Domain%mpp_domain, & + timelevel=timelevel, position=u_pos) + call read_data(filename, v_fieldname, v_data, MOM_Domain%mpp_domain, & + timelevel=timelevel, position=v_pos) + + if (present(scale)) then ; if (scale /= 1.0) then + call rescale_comp_data(MOM_Domain, u_data, scale) + call rescale_comp_data(MOM_Domain, v_data, scale) + endif ; endif + +end subroutine MOM_read_vector_2d + +!> This routine uses the fms_io subroutine read_data to read a pair of distributed +!! 3-D data fields with names given by "[uv]_fieldname" from file "filename". Valid values for +!! "stagger" include CGRID_NE, BGRID_NE, and AGRID. +subroutine MOM_read_vector_3d(filename, u_fieldname, v_fieldname, u_data, v_data, MOM_Domain, & + timelevel, stagger, scalar_pair, scale) + character(len=*), intent(in) :: filename !< The name of the file to read + character(len=*), intent(in) :: u_fieldname !< The variable name of the u data in the file + character(len=*), intent(in) :: v_fieldname !< The variable name of the v data in the file + real, dimension(:,:,:), intent(inout) :: u_data !< The 3 dimensional array into which the + !! u-component of the data should be read + real, dimension(:,:,:), intent(inout) :: v_data !< The 3 dimensional array into which the + !! v-component of the data should be read + type(MOM_domain_type), intent(in) :: MOM_Domain !< The MOM_Domain that describes the decomposition + integer, optional, intent(in) :: timelevel !< The time level in the file to read + integer, optional, intent(in) :: stagger !< A flag indicating where this vector is discretized + logical, optional, intent(in) :: scalar_pair !< If true, a pair of scalars are to be read.cretized + real, optional, intent(in) :: scale !< A scaling factor that the fields are multiplied + !! by before they are returned. + + integer :: u_pos, v_pos + + u_pos = EAST_FACE ; v_pos = NORTH_FACE + if (present(stagger)) then + if (stagger == CGRID_NE) then ; u_pos = EAST_FACE ; v_pos = NORTH_FACE + elseif (stagger == BGRID_NE) then ; u_pos = CORNER ; v_pos = CORNER + elseif (stagger == AGRID) then ; u_pos = CENTER ; v_pos = CENTER ; endif + endif + + call read_data(filename, u_fieldname, u_data, MOM_Domain%mpp_domain, & + timelevel=timelevel, position=u_pos) + call read_data(filename, v_fieldname, v_data, MOM_Domain%mpp_domain, & + timelevel=timelevel, position=v_pos) + + if (present(scale)) then ; if (scale /= 1.0) then + call rescale_comp_data(MOM_Domain, u_data, scale) + call rescale_comp_data(MOM_Domain, v_data, scale) + endif ; endif + +end subroutine MOM_read_vector_3d + + +!> Write a 4d field to an output file. +subroutine write_field_4d(IO_handle, field_md, MOM_domain, field, tstamp, tile_count, fill_value) + type(file_type), intent(in) :: IO_handle !< Handle for a file that is open for writing + type(fieldtype), intent(in) :: field_md !< Field type with metadata + type(MOM_domain_type), intent(in) :: MOM_domain !< The MOM_Domain that describes the decomposition + real, dimension(:,:,:,:), intent(inout) :: field !< Field to write + real, optional, intent(in) :: tstamp !< Model timestamp + integer, optional, intent(in) :: tile_count !< PEs per tile (default: 1) + real, optional, intent(in) :: fill_value !< Missing data fill value + + call mpp_write(IO_handle%unit, field_md, MOM_domain%mpp_domain, field, tstamp=tstamp, & + tile_count=tile_count, default_data=fill_value) +end subroutine write_field_4d + +!> Write a 3d field to an output file. +subroutine write_field_3d(IO_handle, field_md, MOM_domain, field, tstamp, tile_count, fill_value) + type(file_type), intent(in) :: IO_handle !< Handle for a file that is open for writing + type(fieldtype), intent(in) :: field_md !< Field type with metadata + type(MOM_domain_type), intent(in) :: MOM_domain !< The MOM_Domain that describes the decomposition + real, dimension(:,:,:), intent(inout) :: field !< Field to write + real, optional, intent(in) :: tstamp !< Model timestamp + integer, optional, intent(in) :: tile_count !< PEs per tile (default: 1) + real, optional, intent(in) :: fill_value !< Missing data fill value + + call mpp_write(IO_handle%unit, field_md, MOM_domain%mpp_domain, field, tstamp=tstamp, & + tile_count=tile_count, default_data=fill_value) +end subroutine write_field_3d + +!> Write a 2d field to an output file. +subroutine write_field_2d(IO_handle, field_md, MOM_domain, field, tstamp, tile_count, fill_value) + type(file_type), intent(in) :: IO_handle !< Handle for a file that is open for writing + type(fieldtype), intent(in) :: field_md !< Field type with metadata + type(MOM_domain_type), intent(in) :: MOM_domain !< The MOM_Domain that describes the decomposition + real, dimension(:,:), intent(inout) :: field !< Field to write + real, optional, intent(in) :: tstamp !< Model timestamp + integer, optional, intent(in) :: tile_count !< PEs per tile (default: 1) + real, optional, intent(in) :: fill_value !< Missing data fill value + + call mpp_write(IO_handle%unit, field_md, MOM_domain%mpp_domain, field, tstamp=tstamp, & + tile_count=tile_count, default_data=fill_value) +end subroutine write_field_2d + +!> Write a 1d field to an output file. +subroutine write_field_1d(IO_handle, field_md, field, tstamp) + type(file_type), intent(in) :: IO_handle !< Handle for a file that is open for writing + type(fieldtype), intent(in) :: field_md !< Field type with metadata + real, dimension(:), intent(in) :: field !< Field to write + real, optional, intent(in) :: tstamp !< Model timestamp + + call mpp_write(IO_handle%unit, field_md, field, tstamp=tstamp) +end subroutine write_field_1d + +!> Write a 0d field to an output file. +subroutine write_field_0d(IO_handle, field_md, field, tstamp) + type(file_type), intent(in) :: IO_handle !< Handle for a file that is open for writing + type(fieldtype), intent(in) :: field_md !< Field type with metadata + real, intent(in) :: field !< Field to write + real, optional, intent(in) :: tstamp !< Model timestamp + + call mpp_write(IO_handle%unit, field_md, field, tstamp=tstamp) +end subroutine write_field_0d + +!> Write the data for an axis +subroutine MOM_write_axis(IO_handle, axis) + type(file_type), intent(in) :: IO_handle !< Handle for a file that is open for writing + type(axistype), intent(in) :: axis !< An axis type variable with information to write + + call mpp_write(IO_handle%unit, axis) + +end subroutine MOM_write_axis + +!> Store information about an axis in a previously defined axistype and write this +!! information to the file indicated by unit. +subroutine write_metadata_axis(IO_handle, axis, name, units, longname, cartesian, sense, domain, data, calendar) + type(file_type), intent(in) :: IO_handle !< Handle for a file that is open for writing + type(axistype), intent(inout) :: axis !< The axistype where this information is stored. + character(len=*), intent(in) :: name !< The name in the file of this axis + character(len=*), intent(in) :: units !< The units of this axis + character(len=*), intent(in) :: longname !< The long description of this axis + character(len=*), optional, intent(in) :: cartesian !< A variable indicating which direction + !! this axis corresponds with. Valid values + !! include 'X', 'Y', 'Z', 'T', and 'N' for none. + integer, optional, intent(in) :: sense !< This is 1 for axes whose values increase upward, or + !! -1 if they increase downward. + type(domain1D), optional, intent(in) :: domain !< The domain decomposion for this axis + real, dimension(:), optional, intent(in) :: data !< The coordinate values of the points on this axis + character(len=*), optional, intent(in) :: calendar !< The name of the calendar used with a time axis + + call mpp_write_meta(IO_handle%unit, axis, name, units, longname, cartesian=cartesian, sense=sense, & + domain=domain, data=data, calendar=calendar) +end subroutine write_metadata_axis + +!> Store information about an output variable in a previously defined fieldtype and write this +!! information to the file indicated by unit. +subroutine write_metadata_field(IO_handle, field, axes, name, units, longname, & + min, max, fill, scale, add, pack, standard_name, checksum) + type(file_type), intent(in) :: IO_handle !< Handle for a file that is open for writing + type(fieldtype), intent(inout) :: field !< The fieldtype where this information is stored + type(axistype), dimension(:), intent(in) :: axes !< Handles for the axis used for this variable + character(len=*), intent(in) :: name !< The name in the file of this variable + character(len=*), intent(in) :: units !< The units of this variable + character(len=*), intent(in) :: longname !< The long description of this variable + real, optional, intent(in) :: min !< The minimum valid value for this variable + real, optional, intent(in) :: max !< The maximum valid value for this variable + real, optional, intent(in) :: fill !< Missing data fill value + real, optional, intent(in) :: scale !< An multiplicative factor by which to scale + !! the variable before output + real, optional, intent(in) :: add !< An offset to add to the variable before output + integer, optional, intent(in) :: pack !< A precision reduction factor with which the + !! variable. The default, 1, has no reduction, + !! but 2 is not uncommon. + character(len=*), optional, intent(in) :: standard_name !< The standard (e.g., CMOR) name for this variable + integer(kind=int64), dimension(:), & + optional, intent(in) :: checksum !< Checksum values that can be used to verify reads. + + + call mpp_write_meta(IO_handle%unit, field, axes, name, units, longname, min=min, max=max, & + fill=fill, scale=scale, add=add, pack=pack, standard_name=standard_name, checksum=checksum) + +end subroutine write_metadata_field + +end module MOM_io_infra diff --git a/src/framework/MOM_time_manager.F90 b/config_src/infra/FMS1/MOM_time_manager.F90 similarity index 62% rename from src/framework/MOM_time_manager.F90 rename to config_src/infra/FMS1/MOM_time_manager.F90 index 229c3ded3a..5f3279b713 100644 --- a/src/framework/MOM_time_manager.F90 +++ b/config_src/infra/FMS1/MOM_time_manager.F90 @@ -14,9 +14,6 @@ module MOM_time_manager use time_manager_mod, only : set_calendar_type, get_calendar_type use time_manager_mod, only : JULIAN, NOLEAP, THIRTY_DAY_MONTHS, GREGORIAN use time_manager_mod, only : NO_CALENDAR -use time_interp_external_mod, only : init_external_field, time_interp_external, time_interp_external_init -use time_interp_external_mod, only : get_external_field_size -use time_interp_external_mod, only : get_external_field_axes, get_external_field_missing implicit none ; private @@ -29,24 +26,18 @@ module MOM_time_manager public :: get_date, set_date, increment_date, month_name, days_in_month public :: JULIAN, NOLEAP, THIRTY_DAY_MONTHS, GREGORIAN, NO_CALENDAR public :: set_calendar_type, get_calendar_type -public :: init_external_field -public :: time_interp_external -public :: time_interp_external_init -public :: get_external_field_size -public :: get_external_field_axes -public :: get_external_field_missing contains -!> This is an alternate implementation of the FMS function real_to_time_type that is accurate over -!! a larger range of input values. With 32 bit signed integers, this version should work over the -!! entire valid range (2^31 days or ~5.8835 million years) of time_types, whereas the standard -!! version in the FMS time_manager stops working for conversions of times greater than 2^31 seconds, -!! or ~68.1 years. -function real_to_time(x, err_msg) - type(time_type) :: real_to_time !< The output time as a time_type - real, intent(in) :: x !< The input time in real seconds. - character(len=*), intent(out), optional :: err_msg !< An optional returned error message. +!> Returns a time_type version of a real time in seconds, using an alternate implementation to the +!! FMS function real_to_time_type that is accurate over a larger range of input values. With 32 bit +!! signed integers, this version should work over the entire valid range (2^31 days or ~5.8835 +!! million years) of time_types, whereas the standard version in the FMS time_manager stops working +!! for conversions of times greater than 2^31 seconds, or ~68.1 years. +type(time_type) function real_to_time(x, err_msg) +! type(time_type) :: real_to_time !< The output time as a time_type + real, intent(in) :: x !< The input time in real seconds. + character(len=*), optional, intent(out) :: err_msg !< An optional returned error message. ! Local variables integer :: seconds, days, ticks @@ -60,5 +51,4 @@ function real_to_time(x, err_msg) real_to_time = set_time(seconds=seconds, days=days, ticks=ticks, err_msg=err_msg) end function real_to_time - end module MOM_time_manager diff --git a/config_src/dynamic/MOM_memory.h b/config_src/memory/dynamic_nonsymmetric/MOM_memory.h similarity index 100% rename from config_src/dynamic/MOM_memory.h rename to config_src/memory/dynamic_nonsymmetric/MOM_memory.h diff --git a/config_src/dynamic_symmetric/MOM_memory.h b/config_src/memory/dynamic_symmetric/MOM_memory.h similarity index 100% rename from config_src/dynamic_symmetric/MOM_memory.h rename to config_src/memory/dynamic_symmetric/MOM_memory.h diff --git a/docs/.gitignore b/docs/.gitignore index de2f06d096..e8b6a0513b 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -2,8 +2,17 @@ doxygen doxygen.log APIs +MOM6.tags +details/tutorial + + # Ignore sphinx-build output _build api src xml + + +# Citation output +bib*.aux +citelist.doc* diff --git a/docs/Doxyfile_nortd b/docs/Doxyfile_nortd index 76b66b9dd3..4a81c20bd7 100644 --- a/docs/Doxyfile_nortd +++ b/docs/Doxyfile_nortd @@ -1,4 +1,4 @@ -# Doxyfile 1.8.15 +# Doxyfile 1.8.19 # This file describes the settings to be used by the documentation system # doxygen (www.doxygen.org) for a project. @@ -58,7 +58,7 @@ PROJECT_LOGO = # entered, it will be relative to the location where doxygen was started. If # left blank the current directory will be used. -OUTPUT_DIRECTORY = +OUTPUT_DIRECTORY = . # If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- # directories (in 2 levels) under the output directory of each output format and @@ -187,6 +187,16 @@ SHORT_NAMES = NO JAVADOC_AUTOBRIEF = NO +# If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line +# such as +# /*************** +# as being the beginning of a Javadoc-style comment "banner". If set to NO, the +# Javadoc-style will behave just like regular comments and it will not be +# interpreted by doxygen. +# The default value is: NO. + +JAVADOC_BANNER = NO + # If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first # line (until the first dot) of a Qt-style comment as the brief description. If # set to NO, the Qt-style will behave just like regular Qt-style comments (thus @@ -236,15 +246,45 @@ TAB_SIZE = 2 # "Side Effects:". You can put \n's in the value part of an alias to insert # newlines (in the resulting output). You can put ^^ in the value part of an # alias to insert a newline as if a physical newline was in the original file. - -ALIASES = +# When you need a literal { or } or , in the value part of an alias you have to +# escape them by means of a backslash (\), this can lead to conflicts with the +# commands \{ and \} for these it is advised to use the version @{ and @} or use +# a double escape (\\{ and \\}) + +# Reference: https://git.ligo.org/lscsoft/lalsuite-archive/commit/e6e2dae8a73a73979a64854bbf697b077803697a +#ALIASES = "eqref{1}= Eq. \\eqref{\1}" \ +# "figref{1}= Fig. [\ref \1]" \ +# "tableref{1}= Table [\ref \1]" \ +# "figure{4}= \anchor \1 \image html \1.png \"Fig. [\1]: \4\"" + +# This allows doxygen passthrough of \eqref to html for mathjax +# Single reference within a math block +ALIASES += eqref{1}="\latexonly\ref{\1}\endlatexonly\htmlonly \eqref{\1}\endhtmlonly\xmlonly \\eqref{\1}\endxmlonly" + +# Large math block with multiple references +# TODO: We should be able to overload functions but recursion is happening? For now, the +# second command creates a \eqref2 that is passed to sphinx for processing. This breaks +# the html generation for doxygen +# Doxygen 1.8.13 requires extra help via xmlonly +# See python sphinxcontrib-autodoc_doxygen module autodoc_doxygen/xmlutils.py +# \eqref{eq:ale-thickness-equation,ale-equations,thickness} +ALIASES += eqref{3}="\latexonly\ref{\1}\endlatexonly\htmlonly \eqref2{\2,\3}\endhtmlonly\xmlonly \\eqref4{\1}\\eqref2{\2,\3}\endxmlonly" + +# Reference: https://stackoverflow.com/questions/25290453/how-do-i-add-a-footnote-in-doxygen +# TODO: Use this simple js library to create actual footnotes in html +# Reference: https://github.com/jheftmann/footnoted +ALIASES += footnote{1}="\latexonly\footnote\{\1\}\endlatexonly\htmlonly[*]\endhtmlonly\xmlonly[*]\endxmlonly" + +# \image latex does the wrong things to support equations in captions, this recreates +# what it does and just passes through the string uninterpreted. +# The image also needs to be added to LATEX_EXTRA_FILES. +# Default 3rd argument: \includegraphics[width=\textwidth\,height=\textheight/2\,keepaspectratio=true] +ALIASES += imagelatex{3}="\latexonly\begin{DoxyImage}\3{\1}\doxyfigcaption{\2}\end{DoxyImage}\endlatexonly\xmlonly\2\endxmlonly" # This tag can be used to specify a number of word-keyword mappings (TCL only). # A mapping has the form "name=value". For example adding "class=itcl::class" # will allow you to use the command class in the itcl::class meaning. -TCL_SUBST = - # Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources # only. Doxygen will then generate output that is more tailored for C. For # instance, some of the names that are used will be different. The list of all @@ -273,17 +313,26 @@ OPTIMIZE_FOR_FORTRAN = YES OPTIMIZE_OUTPUT_VHDL = NO +# Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice +# sources only. Doxygen will then generate output that is more tailored for that +# language. For instance, namespaces will be presented as modules, types will be +# separated into more groups, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_SLICE = NO + # Doxygen selects the parser to use depending on the extension of the files it # parses. With this tag you can assign which parser to use for a given # extension. Doxygen has a built-in mapping, but you can override or extend it # using this tag. The format is ext=language, where ext is a file extension, and -# language is one of the parsers supported by doxygen: IDL, Java, Javascript, -# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran: -# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran: -# Fortran. In the later case the parser tries to guess whether the code is fixed -# or free formatted code, this is the default for Fortran type files), VHDL. For -# instance to make doxygen treat .inc files as Fortran files (default is PHP), -# and .f files as C (default is Fortran), use: inc=Fortran f=C. +# language is one of the parsers supported by doxygen: IDL, Java, JavaScript, +# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice, VHDL, +# Fortran (fixed format Fortran: FortranFixed, free formatted Fortran: +# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser +# tries to guess whether the code is fixed or free formatted code, this is the +# default for Fortran type files). For instance to make doxygen treat .inc files +# as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. # # Note: For files without extension you can use no_extension as a placeholder. # @@ -294,7 +343,7 @@ EXTENSION_MAPPING = # If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments # according to the Markdown format, which allows for more readable -# documentation. See http://daringfireball.net/projects/markdown/ for details. +# documentation. See https://daringfireball.net/projects/markdown/ for details. # The output of markdown processing is further processed by doxygen, so you can # mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in # case of backward compatibilities issues. @@ -306,7 +355,7 @@ MARKDOWN_SUPPORT = YES # to that level are automatically included in the table of contents, even if # they do not have an id attribute. # Note: This feature currently applies only to Markdown headings. -# Minimum value: 0, maximum value: 99, default value: 0. +# Minimum value: 0, maximum value: 99, default value: 5. # This tag requires that the tag MARKDOWN_SUPPORT is set to YES. TOC_INCLUDE_HEADINGS = 0 @@ -422,6 +471,19 @@ TYPEDEF_HIDES_STRUCT = NO LOOKUP_CACHE_SIZE = 0 +# The NUM_PROC_THREADS specifies the number threads doxygen is allowed to use +# during processing. When set to 0 doxygen will based this on the number of +# cores available in the system. You can set it explicitly to a value larger +# than 0 to get more control over the balance between CPU load and processing +# speed. At this moment only the input processing can be done using multiple +# threads. Since this is still an experimental feature the default is set to 1, +# which efficively disables parallel processing. Please report any issues you +# encounter. Generating dot graphs in parallel is controlled by the +# DOT_NUM_THREADS setting. +# Minimum value: 0, maximum value: 32, default value: 1. + +NUM_PROC_THREADS = 1 + #--------------------------------------------------------------------------- # Build related configuration options #--------------------------------------------------------------------------- @@ -442,6 +504,12 @@ EXTRACT_ALL = NO EXTRACT_PRIVATE = YES +# If the EXTRACT_PRIV_VIRTUAL tag is set to YES, documented private virtual +# methods of a class will be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIV_VIRTUAL = NO + # If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal # scope will be included in the documentation. # The default value is: NO. @@ -496,8 +564,8 @@ HIDE_UNDOC_MEMBERS = NO HIDE_UNDOC_CLASSES = NO # If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend -# (class|struct|union) declarations. If set to NO, these declarations will be -# included in the documentation. +# declarations. If set to NO, these declarations will be included in the +# documentation. # The default value is: NO. HIDE_FRIEND_COMPOUNDS = NO @@ -520,7 +588,7 @@ INTERNAL_DOCS = YES # names in lower-case letters. If set to YES, upper-case letters are also # allowed. This is useful if you have classes or files whose names only differ # in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. +# (including Cygwin) and Mac users are advised to set this option to NO. # The default value is: system dependent. CASE_SENSE_NAMES = YES @@ -712,7 +780,7 @@ LAYOUT_FILE = layout.xml # LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the # search path. See also \cite for info how to create references. -CITE_BIB_FILES = +CITE_BIB_FILES = ocean.bib references.bib zotero.bib #--------------------------------------------------------------------------- # Configuration options related to warning and progress messages @@ -778,7 +846,7 @@ WARN_FORMAT = "$file:$line: $text" # messages should be written. If left blank the output is written to standard # error (stderr). -WARN_LOGFILE = doxygen.log +WARN_LOGFILE = _build/doxygen_warn_nortd_log.txt #--------------------------------------------------------------------------- # Configuration options related to the input files @@ -792,11 +860,10 @@ WARN_LOGFILE = doxygen.log INPUT = ../src \ front_page.md \ - ../config_src/solo_driver \ - ../config_src/dynamic_symmetric - ../config_src/external - ../config_src/coupled_driver - + ../config_src/drivers/solo_driver \ + ../config_src/memory/dynamic_symmetric \ + ../config_src/external \ + ../config_src/drivers/FMS_cap # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses @@ -818,28 +885,58 @@ INPUT_ENCODING = UTF-8 # If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, # *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, # *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, -# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, -# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf and *.qsf. +# *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C comment), +# *.doc (to be provided as doxygen C comment), *.txt (to be provided as doxygen +# C comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f18, *.f, *.for, *.vhd, +# *.vhdl, *.ucf, *.qsf and *.ice. FILE_PATTERNS = *.c \ *.cc \ *.cxx \ *.cpp \ *.c++ \ + *.java \ + *.ii \ + *.ixx \ + *.ipp \ + *.i++ \ + *.inl \ + *.idl \ + *.ddl \ + *.odl \ *.h \ *.hh \ *.hxx \ *.hpp \ *.h++ \ + *.cs \ + *.d \ + *.php \ + *.php4 \ + *.php5 \ + *.phtml \ *.inc \ *.m \ *.markdown \ *.md \ *.mm \ *.dox \ + *.doc \ + *.txt \ + *.py \ + *.pyw \ *.f90 \ + *.f95 \ + *.f03 \ + *.f08 \ + *.f18 \ *.f \ *.for \ + *.vhd \ + *.vhdl \ + *.ucf \ + *.qsf \ + *.ice \ *.F90 # The RECURSIVE tag can be used to specify whether or not subdirectories should @@ -1096,7 +1193,7 @@ GENERATE_HTML = YES # The default directory is: html. # This tag requires that the tag GENERATE_HTML is set to YES. -HTML_OUTPUT = APIs +HTML_OUTPUT = _build/APIs # The HTML_FILE_EXTENSION tag can be used to specify the file extension for each # generated HTML page (for example: .htm, .php, .asp). @@ -1211,9 +1308,9 @@ HTML_TIMESTAMP = NO # If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML # documentation will contain a main index with vertical navigation menus that -# are dynamically created via Javascript. If disabled, the navigation index will +# are dynamically created via JavaScript. If disabled, the navigation index will # consists of multiple levels of tabs that are statically embedded in every HTML -# page. Disable this option to support browsers that do not have Javascript, +# page. Disable this option to support browsers that do not have JavaScript, # like the Qt help browser. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. @@ -1243,13 +1340,13 @@ HTML_INDEX_NUM_ENTRIES = 900 # If the GENERATE_DOCSET tag is set to YES, additional index files will be # generated that can be used as input for Apple's Xcode 3 integrated development -# environment (see: https://developer.apple.com/tools/xcode/), introduced with -# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a +# environment (see: https://developer.apple.com/xcode/), introduced with OSX +# 10.5 (Leopard). To create a documentation set, doxygen will generate a # Makefile in the HTML output directory. Running make will produce the docset in # that directory and running make install will install the docset in # ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at -# startup. See https://developer.apple.com/tools/creatingdocsetswithdoxygen.html -# for more information. +# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy +# genXcode/_index.html for more information. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. @@ -1288,7 +1385,7 @@ DOCSET_PUBLISHER_NAME = Publisher # If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three # additional HTML index files: index.hhp, index.hhc, and index.hhk. The # index.hhp is a project file that can be read by Microsoft's HTML Help Workshop -# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on +# (see: https://www.microsoft.com/en-us/download/details.aspx?id=21138) on # Windows. # # The HTML Help Workshop contains a compiler that can convert all HTML output @@ -1319,7 +1416,7 @@ CHM_FILE = HHC_LOCATION = # The GENERATE_CHI flag controls if a separate .chi index file is generated -# (YES) or that it should be included in the master .chm file (NO). +# (YES) or that it should be included in the main .chm file (NO). # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. @@ -1364,7 +1461,7 @@ QCH_FILE = # The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help # Project output. For more information please see Qt Help Project / Namespace -# (see: http://doc.qt.io/qt-4.8/qthelpproject.html#namespace). +# (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace). # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_QHP is set to YES. @@ -1372,7 +1469,8 @@ QHP_NAMESPACE = org.doxygen.Project # The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt # Help Project output. For more information please see Qt Help Project / Virtual -# Folders (see: http://doc.qt.io/qt-4.8/qthelpproject.html#virtual-folders). +# Folders (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual- +# folders). # The default value is: doc. # This tag requires that the tag GENERATE_QHP is set to YES. @@ -1380,21 +1478,23 @@ QHP_VIRTUAL_FOLDER = doc # If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom # filter to add. For more information please see Qt Help Project / Custom -# Filters (see: http://doc.qt.io/qt-4.8/qthelpproject.html#custom-filters). +# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- +# filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_NAME = # The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the # custom filter to add. For more information please see Qt Help Project / Custom -# Filters (see: http://doc.qt.io/qt-4.8/qthelpproject.html#custom-filters). +# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- +# filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_ATTRS = # The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this # project's filter section matches. Qt Help Project / Filter Attributes (see: -# http://doc.qt.io/qt-4.8/qthelpproject.html#filter-attributes). +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_SECT_FILTER_ATTRS = @@ -1478,6 +1578,17 @@ TREEVIEW_WIDTH = 250 EXT_LINKS_IN_WINDOW = NO +# If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg +# tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see +# https://inkscape.org) to generate formulas as SVG images instead of PNGs for +# the HTML output. These images will generally look nicer at scaled resolutions. +# Possible values are: png (the default) and svg (looks nicer but requires the +# pdf2svg or inkscape tool). +# The default value is: png. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FORMULA_FORMAT = svg + # Use this tag to change the font size of LaTeX formulas included as images in # the HTML documentation. When you change the font size after a successful # doxygen run you need to manually remove any form_*.png images from the HTML @@ -1498,8 +1609,14 @@ FORMULA_FONTSIZE = 10 FORMULA_TRANSPARENT = YES +# The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands +# to create new LaTeX commands to be used in formulas as building blocks. See +# the section "Including formulas" for details. + +FORMULA_MACROFILE = + # Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see -# https://www.mathjax.org) which uses client side Javascript for the rendering +# https://www.mathjax.org) which uses client side JavaScript for the rendering # instead of using pre-rendered bitmaps. Use this if you do not have LaTeX # installed or if you want to formulas look prettier in the HTML output. When # enabled you may also need to install MathJax separately and configure the path @@ -1527,10 +1644,10 @@ MATHJAX_FORMAT = HTML-CSS # Content Delivery Network so you can quickly see the result without installing # MathJax. However, it is strongly recommended to install a local copy of # MathJax from https://www.mathjax.org before deployment. -# The default value is: https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.2/. +# The default value is: https://cdn.jsdelivr.net/npm/mathjax@2. # This tag requires that the tag USE_MATHJAX is set to YES. -MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest +MATHJAX_RELPATH = https://cdn.jsdelivr.net/npm/mathjax@2 # The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax # extension names that should be enabled during MathJax rendering. For example @@ -1569,7 +1686,7 @@ MATHJAX_CODEFILE = SEARCHENGINE = YES # When the SERVER_BASED_SEARCH tag is enabled the search engine will be -# implemented using a web server instead of a web client using Javascript. There +# implemented using a web server instead of a web client using JavaScript. There # are two flavors of web server based searching depending on the EXTERNAL_SEARCH # setting. When disabled, doxygen will generate a PHP script for searching and # an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing @@ -1664,11 +1781,24 @@ LATEX_CMD_NAME = latex # The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate # index for LaTeX. +# Note: This tag is used in the Makefile / make.bat. +# See also: LATEX_MAKEINDEX_CMD for the part in the generated output file +# (.tex). # The default file is: makeindex. # This tag requires that the tag GENERATE_LATEX is set to YES. MAKEINDEX_CMD_NAME = makeindex +# The LATEX_MAKEINDEX_CMD tag can be used to specify the command name to +# generate index for LaTeX. In case there is no backslash (\) as first character +# it will be automatically added in the LaTeX code. +# Note: This tag is used in the generated output file (.tex). +# See also: MAKEINDEX_CMD_NAME for the part in the Makefile / make.bat. +# The default value is: makeindex. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_MAKEINDEX_CMD = makeindex + # If the COMPACT_LATEX tag is set to YES, doxygen generates more compact LaTeX # documents. This may be useful for small projects and may help to save some # trees in general. @@ -1696,7 +1826,7 @@ PAPER_TYPE = a4 # If left blank no extra packages will be included. # This tag requires that the tag GENERATE_LATEX is set to YES. -EXTRA_PACKAGES = +EXTRA_PACKAGES =amsmath amstext # The LATEX_HEADER tag can be used to specify a personal LaTeX header for the # generated LaTeX document. The header should contain everything until the first @@ -1742,7 +1872,15 @@ LATEX_EXTRA_STYLESHEET = # markers available. # This tag requires that the tag GENERATE_LATEX is set to YES. -LATEX_EXTRA_FILES = +# Graphics that have math or equations in the caption need to be listed below. +LATEX_EXTRA_FILES = \ + images/cell_3d.png \ + images/Grid_metrics.png \ + images/h_PPM.png \ + images/Newton_PPM.png \ + images/PG_loop.png \ + images/shao3.png \ + images/shao4.png # If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is # prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will @@ -1753,9 +1891,11 @@ LATEX_EXTRA_FILES = PDF_HYPERLINKS = YES -# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate -# the PDF file directly from the LaTeX files. Set this option to YES, to get a -# higher quality PDF documentation. +# If the USE_PDFLATEX tag is set to YES, doxygen will use the engine as +# specified with LATEX_CMD_NAME to generate the PDF file directly from the LaTeX +# files. Set this option to YES, to get a higher quality PDF documentation. +# +# See also section LATEX_CMD_NAME for selecting the engine. # The default value is: YES. # This tag requires that the tag GENERATE_LATEX is set to YES. @@ -1803,6 +1943,14 @@ LATEX_BIB_STYLE = plain LATEX_TIMESTAMP = NO +# The LATEX_EMOJI_DIRECTORY tag is used to specify the (relative or absolute) +# path from which the emoji images will be read. If a relative path is entered, +# it will be relative to the LATEX_OUTPUT directory. If left blank the +# LATEX_OUTPUT directory will be used. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_EMOJI_DIRECTORY = + #--------------------------------------------------------------------------- # Configuration options related to the RTF output #--------------------------------------------------------------------------- @@ -1940,6 +2088,13 @@ XML_OUTPUT = xml XML_PROGRAMLISTING = YES +# If the XML_NS_MEMB_FILE_SCOPE tag is set to YES, doxygen will include +# namespace members in file scope as well, matching the HTML output. +# The default value is: NO. +# This tag requires that the tag GENERATE_XML is set to YES. + +XML_NS_MEMB_FILE_SCOPE = NO + #--------------------------------------------------------------------------- # Configuration options related to the DOCBOOK output #--------------------------------------------------------------------------- @@ -2057,7 +2212,7 @@ SEARCH_INCLUDES = YES # This tag requires that the tag SEARCH_INCLUDES is set to YES. INCLUDE_PATH = ../src/framework \ - ../config_src/dynamic_symmetric + ../config_src/memory/dynamic_symmetric # You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard # patterns (like *.h and *.hpp) to filter out the header-files in the @@ -2119,7 +2274,7 @@ TAGFILES = # tag file that is based on the input files it reads. See section "Linking to # external documentation" for more information about the usage of tag files. -GENERATE_TAGFILE = +GENERATE_TAGFILE = MOM6.tags # If the ALLEXTERNALS tag is set to YES, all external class will be listed in # the class index. If set to NO, only the inherited external classes will be @@ -2142,12 +2297,6 @@ EXTERNAL_GROUPS = YES EXTERNAL_PAGES = YES -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of 'which perl'). -# The default file (with absolute path) is: /usr/bin/perl. - -PERL_PATH = /usr/bin/perl - #--------------------------------------------------------------------------- # Configuration options related to the dot tool #--------------------------------------------------------------------------- @@ -2161,15 +2310,6 @@ PERL_PATH = /usr/bin/perl CLASS_DIAGRAMS = YES -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see: -# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - # You can include diagrams made with dia in doxygen documentation. Doxygen will # then run dia to produce the diagram and insert it in the documentation. The # DIA_PATH tag allows you to specify the directory where the dia binary resides. diff --git a/docs/Doxyfile_nortd_latex b/docs/Doxyfile_nortd_latex new file mode 100644 index 0000000000..207e645195 --- /dev/null +++ b/docs/Doxyfile_nortd_latex @@ -0,0 +1,2608 @@ +# Doxyfile 1.8.19 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project. +# +# All text after a double hash (##) is considered a comment and is placed in +# front of the TAG it is preceding. +# +# All text after a single hash (#) is considered a comment and will be ignored. +# The format is: +# TAG = value [value, ...] +# For lists, items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (\" \"). + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the configuration +# file that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# https://www.gnu.org/software/libiconv/ for the list of possible encodings. +# The default value is: UTF-8. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by +# double-quotes, unless you are using Doxywizard) that should identify the +# project for which the documentation is generated. This name is used in the +# title of most generated pages and in a few other places. +# The default value is: My Project. + +PROJECT_NAME = "MOM6" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. This +# could be handy for archiving the generated documentation or if some version +# control system is used. + +PROJECT_NUMBER = + +# Using the PROJECT_BRIEF tag one can provide an optional one line description +# for a project that appears at the top of each page and should give viewer a +# quick idea about the purpose of the project. Keep the description short. + +PROJECT_BRIEF = + +# With the PROJECT_LOGO tag one can specify a logo or an icon that is included +# in the documentation. The maximum height of the logo should not exceed 55 +# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy +# the logo to the output directory. + +PROJECT_LOGO = + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path +# into which the generated documentation will be written. If a relative path is +# entered, it will be relative to the location where doxygen was started. If +# left blank the current directory will be used. + +OUTPUT_DIRECTORY = . + +# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- +# directories (in 2 levels) under the output directory of each output format and +# will distribute the generated files over these directories. Enabling this +# option can be useful when feeding doxygen a huge amount of source files, where +# putting all generated files in the same directory would otherwise causes +# performance problems for the file system. +# The default value is: NO. + +CREATE_SUBDIRS = NO + +# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII +# characters to appear in the names of generated files. If set to NO, non-ASCII +# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode +# U+3044. +# The default value is: NO. + +ALLOW_UNICODE_NAMES = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, +# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), +# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, +# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), +# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, +# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, +# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, +# Ukrainian and Vietnamese. +# The default value is: English. + +OUTPUT_LANGUAGE = English + +# The OUTPUT_TEXT_DIRECTION tag is used to specify the direction in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all generated output in the proper direction. +# Possible values are: None, LTR, RTL and Context. +# The default value is: None. + +OUTPUT_TEXT_DIRECTION = None + +# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member +# descriptions after the members that are listed in the file and class +# documentation (similar to Javadoc). Set to NO to disable this. +# The default value is: YES. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief +# description of a member or function before the detailed description +# +# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. +# The default value is: YES. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator that is +# used to form the text in various listings. Each string in this list, if found +# as the leading text of the brief description, will be stripped from the text +# and the result, after processing the whole list, is used as the annotated +# text. Otherwise, the brief description is used as-is. If left blank, the +# following values are used ($name is automatically replaced with the name of +# the entity):The $name class, The $name widget, The $name file, is, provides, +# specifies, contains, represents, a, an and the. + +ABBREVIATE_BRIEF = + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# doxygen will generate a detailed section even if there is only a brief +# description. +# The default value is: NO. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. +# The default value is: NO. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path +# before files name in the file list and in the header files. If set to NO the +# shortest path that makes the file name unique will be used +# The default value is: YES. + +FULL_PATH_NAMES = YES + +# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. +# Stripping is only done if one of the specified strings matches the left-hand +# part of the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the path to +# strip. +# +# Note that you can specify absolute paths here, but also relative paths, which +# will be relative from the directory where doxygen is started. +# This tag requires that the tag FULL_PATH_NAMES is set to YES. + +STRIP_FROM_PATH = + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the +# path mentioned in the documentation of a class, which tells the reader which +# header file to include in order to use a class. If left blank only the name of +# the header file containing the class definition is used. Otherwise one should +# specify the list of include paths that are normally passed to the compiler +# using the -I flag. + +STRIP_FROM_INC_PATH = + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but +# less readable) file names. This can be useful is your file systems doesn't +# support long names like on DOS, Mac, or CD-ROM. +# The default value is: NO. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the +# first line (until the first dot) of a Javadoc-style comment as the brief +# description. If set to NO, the Javadoc-style will behave just like regular Qt- +# style comments (thus requiring an explicit @brief command for a brief +# description.) +# The default value is: NO. + +JAVADOC_AUTOBRIEF = NO + +# If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line +# such as +# /*************** +# as being the beginning of a Javadoc-style comment "banner". If set to NO, the +# Javadoc-style will behave just like regular comments and it will not be +# interpreted by doxygen. +# The default value is: NO. + +JAVADOC_BANNER = NO + +# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first +# line (until the first dot) of a Qt-style comment as the brief description. If +# set to NO, the Qt-style will behave just like regular Qt-style comments (thus +# requiring an explicit \brief command for a brief description.) +# The default value is: NO. + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a +# multi-line C++ special comment block (i.e. a block of //! or /// comments) as +# a brief description. This used to be the default behavior. The new default is +# to treat a multi-line C++ comment block as a detailed description. Set this +# tag to YES if you prefer the old behavior instead. +# +# Note that setting this tag to YES also means that rational rose comments are +# not recognized any more. +# The default value is: NO. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the +# documentation from any documented member that it re-implements. +# The default value is: YES. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new +# page for each member. If set to NO, the documentation of a member will be part +# of the file/class/namespace that contains it. +# The default value is: NO. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen +# uses this value to replace tabs by spaces in code fragments. +# Minimum value: 1, maximum value: 16, default value: 4. + +TAB_SIZE = 2 + +# This tag can be used to specify a number of aliases that act as commands in +# the documentation. An alias has the form: +# name=value +# For example adding +# "sideeffect=@par Side Effects:\n" +# will allow you to put the command \sideeffect (or @sideeffect) in the +# documentation, which will result in a user-defined paragraph with heading +# "Side Effects:". You can put \n's in the value part of an alias to insert +# newlines (in the resulting output). You can put ^^ in the value part of an +# alias to insert a newline as if a physical newline was in the original file. +# When you need a literal { or } or , in the value part of an alias you have to +# escape them by means of a backslash (\), this can lead to conflicts with the +# commands \{ and \} for these it is advised to use the version @{ and @} or use +# a double escape (\\{ and \\}) + +# Reference: https://git.ligo.org/lscsoft/lalsuite-archive/commit/e6e2dae8a73a73979a64854bbf697b077803697a +#ALIASES = "eqref{1}= Eq. \\eqref{\1}" \ +# "figref{1}= Fig. [\ref \1]" \ +# "tableref{1}= Table [\ref \1]" \ +# "figure{4}= \anchor \1 \image html \1.png \"Fig. [\1]: \4\"" + +# This allows doxygen passthrough of \eqref to html for mathjax +# Single reference within a math block +ALIASES += eqref{1}="\latexonly\ref{\1}\endlatexonly\htmlonly \eqref{\1}\endhtmlonly\xmlonly \\eqref{\1}\endxmlonly" + +# Large math block with multiple references +# TODO: We should be able to overload functions but recursion is happening? For now, the +# second command creates a \eqref2 that is passed to sphinx for processing. This breaks +# the html generation for doxygen +# Doxygen 1.8.13 requires extra help via xmlonly +# See python sphinxcontrib-autodoc_doxygen module autodoc_doxygen/xmlutils.py +# \eqref{eq:ale-thickness-equation,ale-equations,thickness} +ALIASES += eqref{3}="\latexonly\ref{\1}\endlatexonly\htmlonly \eqref2{\2,\3}\endhtmlonly\xmlonly \\eqref4{\1}\\eqref2{\2,\3}\endxmlonly" + +# Reference: https://stackoverflow.com/questions/25290453/how-do-i-add-a-footnote-in-doxygen +# TODO: Use this simple js library to create actual footnotes in html +# Reference: https://github.com/jheftmann/footnoted +ALIASES += footnote{1}="\latexonly\footnote\{\1\}\endlatexonly\htmlonly[*]\endhtmlonly\xmlonly[*]\endxmlonly" + +# \image latex does the wrong things to support equations in captions, this recreates +# what it does and just passes through the string uninterpreted. +# The image also needs to be added to LATEX_EXTRA_FILES. +# Default 3rd argument: \includegraphics[width=\textwidth\,height=\textheight/2\,keepaspectratio=true] +ALIASES += imagelatex{3}="\latexonly\begin{DoxyImage}\3{\1}\doxyfigcaption{\2}\end{DoxyImage}\endlatexonly\xmlonly\2\endxmlonly" + +# This tag can be used to specify a number of word-keyword mappings (TCL only). +# A mapping has the form "name=value". For example adding "class=itcl::class" +# will allow you to use the command class in the itcl::class meaning. + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources +# only. Doxygen will then generate output that is more tailored for C. For +# instance, some of the names that are used will be different. The list of all +# members will be omitted, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or +# Python sources only. Doxygen will then generate output that is more tailored +# for that language. For instance, namespaces will be presented as packages, +# qualified scopes will look different, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources. Doxygen will then generate output that is tailored for Fortran. +# The default value is: NO. + +OPTIMIZE_FOR_FORTRAN = YES + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for VHDL. +# The default value is: NO. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice +# sources only. Doxygen will then generate output that is more tailored for that +# language. For instance, namespaces will be presented as modules, types will be +# separated into more groups, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_SLICE = NO + +# Doxygen selects the parser to use depending on the extension of the files it +# parses. With this tag you can assign which parser to use for a given +# extension. Doxygen has a built-in mapping, but you can override or extend it +# using this tag. The format is ext=language, where ext is a file extension, and +# language is one of the parsers supported by doxygen: IDL, Java, JavaScript, +# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice, VHDL, +# Fortran (fixed format Fortran: FortranFixed, free formatted Fortran: +# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser +# tries to guess whether the code is fixed or free formatted code, this is the +# default for Fortran type files). For instance to make doxygen treat .inc files +# as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. +# +# Note: For files without extension you can use no_extension as a placeholder. +# +# Note that for custom extensions you also need to set FILE_PATTERNS otherwise +# the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments +# according to the Markdown format, which allows for more readable +# documentation. See https://daringfireball.net/projects/markdown/ for details. +# The output of markdown processing is further processed by doxygen, so you can +# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in +# case of backward compatibilities issues. +# The default value is: YES. + +MARKDOWN_SUPPORT = YES + +# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up +# to that level are automatically included in the table of contents, even if +# they do not have an id attribute. +# Note: This feature currently applies only to Markdown headings. +# Minimum value: 0, maximum value: 99, default value: 5. +# This tag requires that the tag MARKDOWN_SUPPORT is set to YES. + +TOC_INCLUDE_HEADINGS = 0 + +# When enabled doxygen tries to link words that correspond to documented +# classes, or namespaces to their corresponding documentation. Such a link can +# be prevented in individual cases by putting a % sign in front of the word or +# globally by setting AUTOLINK_SUPPORT to NO. +# The default value is: YES. + +AUTOLINK_SUPPORT = YES + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should set this +# tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); +# versus func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. +# The default value is: NO. + +BUILTIN_STL_SUPPORT = NO + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. +# The default value is: NO. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip (see: +# https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen +# will parse them like normal C++ but will assume all classes use public instead +# of private inheritance when no explicit protection keyword is present. +# The default value is: NO. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate +# getter and setter methods for a property. Setting this option to YES will make +# doxygen to replace the get and set methods by a property in the documentation. +# This will only work if the methods are indeed getting or setting a simple +# type. If this is not the case, or you want to show the methods anyway, you +# should set this option to NO. +# The default value is: YES. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. +# The default value is: NO. + +DISTRIBUTE_GROUP_DOC = YES + +# If one adds a struct or class to a group and this option is enabled, then also +# any nested class or struct is added to the same group. By default this option +# is disabled and one has to add nested compounds explicitly via \ingroup. +# The default value is: NO. + +GROUP_NESTED_COMPOUNDS = NO + +# Set the SUBGROUPING tag to YES to allow class member groups of the same type +# (for instance a group of public functions) to be put as a subgroup of that +# type (e.g. under the Public Functions section). Set it to NO to prevent +# subgrouping. Alternatively, this can be done per class using the +# \nosubgrouping command. +# The default value is: YES. + +SUBGROUPING = YES + +# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions +# are shown inside the group in which they are included (e.g. using \ingroup) +# instead of on a separate page (for HTML and Man pages) or section (for LaTeX +# and RTF). +# +# Note that this feature does not work in combination with +# SEPARATE_MEMBER_PAGES. +# The default value is: NO. + +INLINE_GROUPED_CLASSES = NO + +# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions +# with only public data fields or simple typedef fields will be shown inline in +# the documentation of the scope in which they are defined (i.e. file, +# namespace, or group documentation), provided this scope is documented. If set +# to NO, structs, classes, and unions are shown on a separate page (for HTML and +# Man pages) or section (for LaTeX and RTF). +# The default value is: NO. + +INLINE_SIMPLE_STRUCTS = NO + +# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or +# enum is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically be +# useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. +# The default value is: NO. + +TYPEDEF_HIDES_STRUCT = NO + +# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This +# cache is used to resolve symbols given their name and scope. Since this can be +# an expensive process and often the same symbol appears multiple times in the +# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small +# doxygen will become slower. If the cache is too large, memory is wasted. The +# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range +# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 +# symbols. At the end of a run doxygen will report the cache usage and suggest +# the optimal cache size from a speed point of view. +# Minimum value: 0, maximum value: 9, default value: 0. + +LOOKUP_CACHE_SIZE = 0 + +# The NUM_PROC_THREADS specifies the number threads doxygen is allowed to use +# during processing. When set to 0 doxygen will based this on the number of +# cores available in the system. You can set it explicitly to a value larger +# than 0 to get more control over the balance between CPU load and processing +# speed. At this moment only the input processing can be done using multiple +# threads. Since this is still an experimental feature the default is set to 1, +# which efficively disables parallel processing. Please report any issues you +# encounter. Generating dot graphs in parallel is controlled by the +# DOT_NUM_THREADS setting. +# Minimum value: 0, maximum value: 32, default value: 1. + +NUM_PROC_THREADS = 1 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in +# documentation are documented, even if no documentation was available. Private +# class members and static file members will be hidden unless the +# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. +# Note: This will also disable the warnings about undocumented members that are +# normally produced when WARNINGS is set to YES. +# The default value is: NO. + +EXTRACT_ALL = NO + +# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will +# be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIVATE = YES + +# If the EXTRACT_PRIV_VIRTUAL tag is set to YES, documented private virtual +# methods of a class will be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIV_VIRTUAL = NO + +# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal +# scope will be included in the documentation. +# The default value is: NO. + +EXTRACT_PACKAGE = YES + +# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be +# included in the documentation. +# The default value is: NO. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined +# locally in source files will be included in the documentation. If set to NO, +# only classes defined in header files are included. Does not have any effect +# for Java sources. +# The default value is: YES. + +EXTRACT_LOCAL_CLASSES = YES + +# This flag is only useful for Objective-C code. If set to YES, local methods, +# which are defined in the implementation section but not in the interface are +# included in the documentation. If set to NO, only methods in the interface are +# included. +# The default value is: NO. + +EXTRACT_LOCAL_METHODS = YES + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base name of +# the file that contains the anonymous namespace. By default anonymous namespace +# are hidden. +# The default value is: NO. + +EXTRACT_ANON_NSPACES = YES + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all +# undocumented members inside documented classes or files. If set to NO these +# members will be included in the various overviews, but no documentation +# section is generated. This option has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. If set +# to NO, these classes will be included in the various overviews. This option +# has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend +# declarations. If set to NO, these declarations will be included in the +# documentation. +# The default value is: NO. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any +# documentation blocks found inside the body of a function. If set to NO, these +# blocks will be appended to the function's detailed documentation block. +# The default value is: NO. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation that is typed after a +# \internal command is included. If the tag is set to NO then the documentation +# will be excluded. Set it to YES to include the internal documentation. +# The default value is: NO. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file +# names in lower-case letters. If set to YES, upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# (including Cygwin) and Mac users are advised to set this option to NO. +# The default value is: system dependent. + +CASE_SENSE_NAMES = YES + +# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with +# their full class and namespace scopes in the documentation. If set to YES, the +# scope will be hidden. +# The default value is: NO. + +HIDE_SCOPE_NAMES = NO + +# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will +# append additional text to a page's title, such as Class Reference. If set to +# YES the compound reference will be hidden. +# The default value is: NO. + +HIDE_COMPOUND_REFERENCE= NO + +# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of +# the files that are included by a file in the documentation of that file. +# The default value is: YES. + +SHOW_INCLUDE_FILES = YES + +# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each +# grouped member an include statement to the documentation, telling the reader +# which file to include in order to use the member. +# The default value is: NO. + +SHOW_GROUPED_MEMB_INC = NO + +# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include +# files with double quotes in the documentation rather than with sharp brackets. +# The default value is: NO. + +FORCE_LOCAL_INCLUDES = NO + +# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the +# documentation for inline members. +# The default value is: YES. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the +# (detailed) documentation of file and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. +# The default value is: YES. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief +# descriptions of file, namespace and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. Note that +# this will also influence the order of the classes in the class list. +# The default value is: NO. + +SORT_BRIEF_DOCS = NO + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the +# (brief and detailed) documentation of class members so that constructors and +# destructors are listed first. If set to NO the constructors will appear in the +# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. +# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief +# member documentation. +# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting +# detailed member documentation. +# The default value is: NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy +# of group names into alphabetical order. If set to NO the group names will +# appear in their defined order. +# The default value is: NO. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by +# fully-qualified names, including namespaces. If set to NO, the class list will +# be sorted only by class name, not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the alphabetical +# list. +# The default value is: NO. + +SORT_BY_SCOPE_NAME = NO + +# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper +# type resolution of all parameters of a function it will reject a match between +# the prototype and the implementation of a member function even if there is +# only one candidate or it is obvious which candidate to choose by doing a +# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still +# accept a match between prototype and implementation in such cases. +# The default value is: NO. + +STRICT_PROTO_MATCHING = NO + +# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo +# list. This list is created by putting \todo commands in the documentation. +# The default value is: YES. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test +# list. This list is created by putting \test commands in the documentation. +# The default value is: YES. + +GENERATE_TESTLIST = YES + +# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug +# list. This list is created by putting \bug commands in the documentation. +# The default value is: YES. + +GENERATE_BUGLIST = YES + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) +# the deprecated list. This list is created by putting \deprecated commands in +# the documentation. +# The default value is: YES. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional documentation +# sections, marked by \if ... \endif and \cond +# ... \endcond blocks. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the +# initial value of a variable or macro / define can have for it to appear in the +# documentation. If the initializer consists of more lines than specified here +# it will be hidden. Use a value of 0 to hide initializers completely. The +# appearance of the value of individual variables and macros / defines can be +# controlled using \showinitializer or \hideinitializer command in the +# documentation regardless of this setting. +# Minimum value: 0, maximum value: 10000, default value: 30. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at +# the bottom of the documentation of classes and structs. If set to YES, the +# list will mention the files that were used to generate the documentation. +# The default value is: YES. + +SHOW_USED_FILES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This +# will remove the Files entry from the Quick Index and from the Folder Tree View +# (if specified). +# The default value is: YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces +# page. This will remove the Namespaces entry from the Quick Index and from the +# Folder Tree View (if specified). +# The default value is: YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command command input-file, where command is the value of the +# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided +# by doxygen. Whatever the program writes to standard output is used as the file +# version. For an example see the documentation. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed +# by doxygen. The layout file controls the global structure of the generated +# output files in an output format independent way. To create the layout file +# that represents doxygen's defaults, run doxygen with the -l option. You can +# optionally specify a file name after the option, if omitted DoxygenLayout.xml +# will be used as the name of the layout file. +# +# Note that if you run doxygen from a directory containing a file called +# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE +# tag is left empty. + +LAYOUT_FILE = layout.xml + +# The CITE_BIB_FILES tag can be used to specify one or more bib files containing +# the reference definitions. This must be a list of .bib files. The .bib +# extension is automatically appended if omitted. This requires the bibtex tool +# to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info. +# For LaTeX the style of the bibliography can be controlled using +# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the +# search path. See also \cite for info how to create references. + +CITE_BIB_FILES = ocean.bib references.bib zotero.bib + +#--------------------------------------------------------------------------- +# Configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated to +# standard output by doxygen. If QUIET is set to YES this implies that the +# messages are off. +# The default value is: NO. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES +# this implies that the warnings are on. +# +# Tip: Turn warnings on while writing the documentation. +# The default value is: YES. + +WARNINGS = YES + +# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate +# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag +# will automatically be disabled. +# The default value is: YES. + +WARN_IF_UNDOCUMENTED = YES + +# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some parameters +# in a documented function, or documenting parameters that don't exist or using +# markup commands wrongly. +# The default value is: YES. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that +# are documented, but have no documentation for their parameters or return +# value. If set to NO, doxygen will only warn about wrong or incomplete +# parameter documentation, but not about the absence of documentation. If +# EXTRACT_ALL is set to YES then this flag will automatically be disabled. +# The default value is: NO. + +WARN_NO_PARAMDOC = NO + +# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when +# a warning is encountered. +# The default value is: NO. + +WARN_AS_ERROR = NO + +# The WARN_FORMAT tag determines the format of the warning messages that doxygen +# can produce. The string should contain the $file, $line, and $text tags, which +# will be replaced by the file and line number from which the warning originated +# and the warning text. Optionally the format may contain $version, which will +# be replaced by the version of the file (if it could be obtained via +# FILE_VERSION_FILTER) +# The default value is: $file:$line: $text. + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning and error +# messages should be written. If left blank the output is written to standard +# error (stderr). + +WARN_LOGFILE = _build/doxygen_warn_nortd_latex_log.txt + +#--------------------------------------------------------------------------- +# Configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag is used to specify the files and/or directories that contain +# documented source files. You may enter file names like myfile.cpp or +# directories like /usr/src/myproject. Separate the files or directories with +# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING +# Note: If this tag is empty the current directory is searched. + +INPUT = ../src \ + front_page.md \ + ../config_src/drivers/solo_driver \ + ../config_src/memory/dynamic_symmetric \ + ../config_src/external \ + ../config_src/drivers/FMS_cap + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses +# libiconv (or the iconv built into libc) for the transcoding. See the libiconv +# documentation (see: https://www.gnu.org/software/libiconv/) for the list of +# possible encodings. +# The default value is: UTF-8. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and +# *.h) to filter out the source-files in the directories. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# read by doxygen. +# +# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, +# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, +# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, +# *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C comment), +# *.doc (to be provided as doxygen C comment), *.txt (to be provided as doxygen +# C comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f18, *.f, *.for, *.vhd, +# *.vhdl, *.ucf, *.qsf and *.ice. + +FILE_PATTERNS = *.c \ + *.cc \ + *.cxx \ + *.cpp \ + *.c++ \ + *.java \ + *.ii \ + *.ixx \ + *.ipp \ + *.i++ \ + *.inl \ + *.idl \ + *.ddl \ + *.odl \ + *.h \ + *.hh \ + *.hxx \ + *.hpp \ + *.h++ \ + *.cs \ + *.d \ + *.php \ + *.php4 \ + *.php5 \ + *.phtml \ + *.inc \ + *.m \ + *.markdown \ + *.md \ + *.mm \ + *.dox \ + *.doc \ + *.txt \ + *.py \ + *.pyw \ + *.f90 \ + *.f95 \ + *.f03 \ + *.f08 \ + *.f18 \ + *.f \ + *.for \ + *.vhd \ + *.vhdl \ + *.ucf \ + *.qsf \ + *.ice \ + *.F90 + +# The RECURSIVE tag can be used to specify whether or not subdirectories should +# be searched for input files as well. +# The default value is: NO. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should be +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. +# +# Note that relative paths are relative to the directory from which doxygen is +# run. + +EXCLUDE = ../src/equation_of_state/TEOS10 + +# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or +# directories that are symbolic links (a Unix file system feature) are excluded +# from the input. +# The default value is: NO. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories for example use the pattern */test/* + +EXCLUDE_PATTERNS = makedep.py \ + Makefile \ + INSTALL + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories use the pattern */test/* + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or directories +# that contain example code fragments that are included (see the \include +# command). + +EXAMPLE_PATH = ../src + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and +# *.h) to filter out the source-files in the directories. If left blank all +# files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude commands +# irrespective of the value of the RECURSIVE tag. +# The default value is: NO. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or directories +# that contain images that are to be included in the documentation (see the +# \image command). + +IMAGE_PATH = images \ + ../src + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command: +# +# +# +# where is the value of the INPUT_FILTER tag, and is the +# name of an input file. Doxygen will then use the output that the filter +# program writes to standard output. If FILTER_PATTERNS is specified, this tag +# will be ignored. +# +# Note that the filter must not add or remove lines; it is applied before the +# code is scanned, but not when the output code is generated. If lines are added +# or removed, the anchors will not be placed correctly. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. The filters are a list of the form: pattern=filter +# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how +# filters are used. If the FILTER_PATTERNS tag is empty or if none of the +# patterns match the file name, INPUT_FILTER is applied. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will also be used to filter the input files that are used for +# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). +# The default value is: NO. + +FILTER_SOURCE_FILES = NO + +# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file +# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and +# it is also possible to disable source filtering for a specific pattern using +# *.ext= (so without naming a filter). +# This tag requires that the tag FILTER_SOURCE_FILES is set to YES. + +FILTER_SOURCE_PATTERNS = + +# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that +# is part of the input, its contents will be placed on the main page +# (index.html). This can be useful if you have a project on for instance GitHub +# and want to reuse the introduction page also for the doxygen output. + +USE_MDFILE_AS_MAINPAGE = front_page.md + +#--------------------------------------------------------------------------- +# Configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will be +# generated. Documented entities will be cross-referenced with these sources. +# +# Note: To get rid of all source code in the generated output, make sure that +# also VERBATIM_HEADERS is set to NO. +# The default value is: NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body of functions, +# classes and enums directly into the documentation. +# The default value is: NO. + +INLINE_SOURCES = YES + +# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any +# special comment blocks from generated source code fragments. Normal C, C++ and +# Fortran comments will always remain visible. +# The default value is: YES. + +STRIP_CODE_COMMENTS = NO + +# If the REFERENCED_BY_RELATION tag is set to YES then for each documented +# entity all documented functions referencing it will be listed. +# The default value is: NO. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES then for each documented function +# all documented entities called/used by that function will be listed. +# The default value is: NO. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set +# to YES then the hyperlinks from functions in REFERENCES_RELATION and +# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will +# link to the documentation. +# The default value is: YES. + +REFERENCES_LINK_SOURCE = YES + +# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the +# source code will show a tooltip with additional information such as prototype, +# brief description and links to the definition and documentation. Since this +# will make the HTML file larger and loading of large files a bit slower, you +# can opt to disable this feature. +# The default value is: YES. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +SOURCE_TOOLTIPS = YES + +# If the USE_HTAGS tag is set to YES then the references to source code will +# point to the HTML generated by the htags(1) tool instead of doxygen built-in +# source browser. The htags tool is part of GNU's global source tagging system +# (see https://www.gnu.org/software/global/global.html). You will need version +# 4.8.6 or higher. +# +# To use it do the following: +# - Install the latest version of global +# - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file +# - Make sure the INPUT points to the root of the source tree +# - Run doxygen as normal +# +# Doxygen will invoke htags (and that will in turn invoke gtags), so these +# tools must be available from the command line (i.e. in the search path). +# +# The result: instead of the source browser generated by doxygen, the links to +# source code will now point to the output of htags. +# The default value is: NO. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a +# verbatim copy of the header file for each class for which an include is +# specified. Set to NO to disable this. +# See also: Section \class. +# The default value is: YES. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# Configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all +# compounds will be generated. Enable this if the project contains a lot of +# classes, structs, unions or interfaces. +# The default value is: YES. + +ALPHABETICAL_INDEX = YES + +# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in +# which the alphabetical index list will be split. +# Minimum value: 1, maximum value: 20, default value: 5. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +COLS_IN_ALPHA_INDEX = 1 + +# In case all classes in a project start with a common prefix, all classes will +# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag +# can be used to specify a prefix (or a list of prefixes) that should be ignored +# while generating the index headers. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output +# The default value is: YES. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_OUTPUT = _build/APIs + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each +# generated HTML page (for example: .htm, .php, .asp). +# The default value is: .html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a user-defined HTML header file for +# each generated HTML page. If the tag is left blank doxygen will generate a +# standard header. +# +# To get valid HTML the header file that includes any scripts and style sheets +# that doxygen needs, which is dependent on the configuration options used (e.g. +# the setting GENERATE_TREEVIEW). It is highly recommended to start with a +# default header using +# doxygen -w html new_header.html new_footer.html new_stylesheet.css +# YourConfigFile +# and then modify the file new_header.html. See also section "Doxygen usage" +# for information on how to generate the default header that doxygen normally +# uses. +# Note: The header is subject to change so you typically have to regenerate the +# default header when upgrading to a newer version of doxygen. For a description +# of the possible markers and block names see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_HEADER = + +# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each +# generated HTML page. If the tag is left blank doxygen will generate a standard +# footer. See HTML_HEADER for more information on how to generate a default +# footer and what special commands can be used inside the footer. See also +# section "Doxygen usage" for information on how to generate the default footer +# that doxygen normally uses. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FOOTER = + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style +# sheet that is used by each HTML page. It can be used to fine-tune the look of +# the HTML output. If left blank doxygen will generate a default style sheet. +# See also section "Doxygen usage" for information on how to generate the style +# sheet that doxygen normally uses. +# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as +# it is more robust and this tag (HTML_STYLESHEET) will in the future become +# obsolete. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_STYLESHEET = + +# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined +# cascading style sheets that are included after the standard style sheets +# created by doxygen. Using this option one can overrule certain style aspects. +# This is preferred over using HTML_STYLESHEET since it does not replace the +# standard style sheet and is therefore more robust against future updates. +# Doxygen will copy the style sheet files to the output directory. +# Note: The order of the extra style sheet files is of importance (e.g. the last +# style sheet in the list overrules the setting of the previous ones in the +# list). For an example see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_STYLESHEET = + +# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or +# other source files which should be copied to the HTML output directory. Note +# that these files will be copied to the base HTML output directory. Use the +# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these +# files. In the HTML_STYLESHEET file, use the file name only. Also note that the +# files will be copied as-is; there are no commands or markers available. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_FILES = + +# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen +# will adjust the colors in the style sheet and background images according to +# this color. Hue is specified as an angle on a colorwheel, see +# https://en.wikipedia.org/wiki/Hue for more information. For instance the value +# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 +# purple, and 360 is red again. +# Minimum value: 0, maximum value: 359, default value: 220. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_HUE = 220 + +# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors +# in the HTML output. For a value of 0 the output will use grayscales only. A +# value of 255 will produce the most vivid colors. +# Minimum value: 0, maximum value: 255, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_SAT = 100 + +# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the +# luminance component of the colors in the HTML output. Values below 100 +# gradually make the output lighter, whereas values above 100 make the output +# darker. The value divided by 100 is the actual gamma applied, so 80 represents +# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not +# change the gamma. +# Minimum value: 40, maximum value: 240, default value: 80. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_GAMMA = 80 + +# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML +# page will contain the date and time when the page was generated. Setting this +# to YES can help to show when doxygen was last run and thus if the +# documentation is up to date. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_TIMESTAMP = NO + +# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML +# documentation will contain a main index with vertical navigation menus that +# are dynamically created via JavaScript. If disabled, the navigation index will +# consists of multiple levels of tabs that are statically embedded in every HTML +# page. Disable this option to support browsers that do not have JavaScript, +# like the Qt help browser. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_MENUS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_SECTIONS = NO + +# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries +# shown in the various tree structured indices initially; the user can expand +# and collapse entries dynamically later on. Doxygen will expand the tree to +# such a level that at most the specified number of entries are visible (unless +# a fully collapsed tree already exceeds this amount). So setting the number of +# entries 1 will produce a full collapsed tree by default. 0 is a special value +# representing an infinite number of entries and will result in a full expanded +# tree by default. +# Minimum value: 0, maximum value: 9999, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_INDEX_NUM_ENTRIES = 900 + +# If the GENERATE_DOCSET tag is set to YES, additional index files will be +# generated that can be used as input for Apple's Xcode 3 integrated development +# environment (see: https://developer.apple.com/xcode/), introduced with OSX +# 10.5 (Leopard). To create a documentation set, doxygen will generate a +# Makefile in the HTML output directory. Running make will produce the docset in +# that directory and running make install will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at +# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy +# genXcode/_index.html for more information. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_DOCSET = NO + +# This tag determines the name of the docset feed. A documentation feed provides +# an umbrella under which multiple documentation sets from a single provider +# (such as a company or product suite) can be grouped. +# The default value is: Doxygen generated docs. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# This tag specifies a string that should uniquely identify the documentation +# set bundle. This should be a reverse domain-name style string, e.g. +# com.mycompany.MyDocSet. Doxygen will append .docset to the name. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify +# the documentation publisher. This should be a reverse domain-name style +# string, e.g. com.mycompany.MyDocSet.documentation. +# The default value is: org.doxygen.Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_ID = org.doxygen.Publisher + +# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. +# The default value is: Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_NAME = Publisher + +# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three +# additional HTML index files: index.hhp, index.hhc, and index.hhk. The +# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop +# (see: https://www.microsoft.com/en-us/download/details.aspx?id=21138) on +# Windows. +# +# The HTML Help Workshop contains a compiler that can convert all HTML output +# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML +# files are now used as the Windows 98 help format, and will replace the old +# Windows help format (.hlp) on all Windows platforms in the future. Compressed +# HTML files also contain an index, a table of contents, and you can search for +# words in the documentation. The HTML workshop also contains a viewer for +# compressed HTML files. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_HTMLHELP = NO + +# The CHM_FILE tag can be used to specify the file name of the resulting .chm +# file. You can add a path in front of the file if the result should not be +# written to the html output directory. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_FILE = + +# The HHC_LOCATION tag can be used to specify the location (absolute path +# including file name) of the HTML help compiler (hhc.exe). If non-empty, +# doxygen will try to run the HTML help compiler on the generated index.hhp. +# The file has to be specified with full path. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +HHC_LOCATION = + +# The GENERATE_CHI flag controls if a separate .chi index file is generated +# (YES) or that it should be included in the main .chm file (NO). +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +GENERATE_CHI = NO + +# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) +# and project file content. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_INDEX_ENCODING = + +# The BINARY_TOC flag controls whether a binary table of contents is generated +# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it +# enables the Previous and Next buttons. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +BINARY_TOC = NO + +# The TOC_EXPAND flag can be set to YES to add extra items for group members to +# the table of contents of the HTML help documentation and to the tree view. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +TOC_EXPAND = NO + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and +# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that +# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help +# (.qch) of the generated HTML documentation. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify +# the file name of the resulting .qch file. The path specified is relative to +# the HTML output folder. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help +# Project output. For more information please see Qt Help Project / Namespace +# (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace). +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_NAMESPACE = org.doxygen.Project + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt +# Help Project output. For more information please see Qt Help Project / Virtual +# Folders (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual- +# folders). +# The default value is: doc. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_VIRTUAL_FOLDER = doc + +# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom +# filter to add. For more information please see Qt Help Project / Custom +# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- +# filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the +# custom filter to add. For more information please see Qt Help Project / Custom +# Filters (see: https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom- +# filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this +# project's filter section matches. Qt Help Project / Filter Attributes (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_SECT_FILTER_ATTRS = + +# The QHG_LOCATION tag can be used to specify the location of Qt's +# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the +# generated .qhp file. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHG_LOCATION = + +# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be +# generated, together with the HTML files, they form an Eclipse help plugin. To +# install this plugin and make it available under the help contents menu in +# Eclipse, the contents of the directory containing the HTML and XML files needs +# to be copied into the plugins directory of eclipse. The name of the directory +# within the plugins directory should be the same as the ECLIPSE_DOC_ID value. +# After copying Eclipse needs to be restarted before the help appears. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_ECLIPSEHELP = NO + +# A unique identifier for the Eclipse help plugin. When installing the plugin +# the directory name containing the HTML and XML files should also have this +# name. Each documentation set should have its own identifier. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. + +ECLIPSE_DOC_ID = org.doxygen.Project + +# If you want full control over the layout of the generated HTML pages it might +# be necessary to disable the index and replace it with your own. The +# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top +# of each HTML page. A value of NO enables the index and the value YES disables +# it. Since the tabs in the index contain the same information as the navigation +# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +DISABLE_INDEX = NO + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. If the tag +# value is set to YES, a side panel will be generated containing a tree-like +# index structure (just like the one that is generated for HTML Help). For this +# to work a browser that supports JavaScript, DHTML, CSS and frames is required +# (i.e. any modern browser). Windows users are probably better off using the +# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can +# further fine-tune the look of the index. As an example, the default style +# sheet generated by doxygen has an example that shows how to put an image at +# the root of the tree instead of the PROJECT_NAME. Since the tree basically has +# the same information as the tab index, you could consider setting +# DISABLE_INDEX to YES when enabling this option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_TREEVIEW = YES + +# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that +# doxygen will group on one line in the generated HTML documentation. +# +# Note that a value of 0 will completely suppress the enum values from appearing +# in the overview section. +# Minimum value: 0, maximum value: 20, default value: 4. +# This tag requires that the tag GENERATE_HTML is set to YES. + +ENUM_VALUES_PER_LINE = 4 + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used +# to set the initial width (in pixels) of the frame in which the tree is shown. +# Minimum value: 0, maximum value: 1500, default value: 250. +# This tag requires that the tag GENERATE_HTML is set to YES. + +TREEVIEW_WIDTH = 250 + +# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to +# external symbols imported via tag files in a separate window. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +EXT_LINKS_IN_WINDOW = NO + +# If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg +# tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see +# https://inkscape.org) to generate formulas as SVG images instead of PNGs for +# the HTML output. These images will generally look nicer at scaled resolutions. +# Possible values are: png (the default) and svg (looks nicer but requires the +# pdf2svg or inkscape tool). +# The default value is: png. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FORMULA_FORMAT = svg + +# Use this tag to change the font size of LaTeX formulas included as images in +# the HTML documentation. When you change the font size after a successful +# doxygen run you need to manually remove any form_*.png images from the HTML +# output directory to force them to be regenerated. +# Minimum value: 8, maximum value: 50, default value: 10. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_FONTSIZE = 10 + +# Use the FORMULA_TRANSPARENT tag to determine whether or not the images +# generated for formulas are transparent PNGs. Transparent PNGs are not +# supported properly for IE 6.0, but are supported on all modern browsers. +# +# Note that when changing this option you need to delete any form_*.png files in +# the HTML output directory before the changes have effect. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_TRANSPARENT = YES + +# The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands +# to create new LaTeX commands to be used in formulas as building blocks. See +# the section "Including formulas" for details. + +FORMULA_MACROFILE = + +# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see +# https://www.mathjax.org) which uses client side JavaScript for the rendering +# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX +# installed or if you want to formulas look prettier in the HTML output. When +# enabled you may also need to install MathJax separately and configure the path +# to it using the MATHJAX_RELPATH option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +USE_MATHJAX = YES + +# When MathJax is enabled you can set the default output format to be used for +# the MathJax output. See the MathJax site (see: +# http://docs.mathjax.org/en/latest/output.html) for more details. +# Possible values are: HTML-CSS (which is slower, but has the best +# compatibility), NativeMML (i.e. MathML) and SVG. +# The default value is: HTML-CSS. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_FORMAT = HTML-CSS + +# When MathJax is enabled you need to specify the location relative to the HTML +# output directory using the MATHJAX_RELPATH option. The destination directory +# should contain the MathJax.js script. For instance, if the mathjax directory +# is located at the same level as the HTML output directory, then +# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax +# Content Delivery Network so you can quickly see the result without installing +# MathJax. However, it is strongly recommended to install a local copy of +# MathJax from https://www.mathjax.org before deployment. +# The default value is: https://cdn.jsdelivr.net/npm/mathjax@2. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_RELPATH = https://cdn.jsdelivr.net/npm/mathjax@2 + +# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax +# extension names that should be enabled during MathJax rendering. For example +# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_EXTENSIONS = + +# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces +# of code that will be used on startup of the MathJax code. See the MathJax site +# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an +# example see the documentation. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_CODEFILE = + +# When the SEARCHENGINE tag is enabled doxygen will generate a search box for +# the HTML output. The underlying search engine uses javascript and DHTML and +# should work on any modern browser. Note that when using HTML help +# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) +# there is already a search function so this one should typically be disabled. +# For large projects the javascript based search engine can be slow, then +# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to +# search using the keyboard; to jump to the search box use + S +# (what the is depends on the OS and browser, but it is typically +# , /