diff --git a/.cicd/scripts/srw_ftest.sh b/.cicd/scripts/srw_ftest.sh
index 5479e8b46d..d98d20c831 100755
--- a/.cicd/scripts/srw_ftest.sh
+++ b/.cicd/scripts/srw_ftest.sh
@@ -46,7 +46,6 @@ fi
# Test directories
we2e_experiment_base_dir="${workspace}/expt_dirs"
we2e_test_dir="${workspace}/tests/WE2E"
-nco_dir="${workspace}/nco_dirs"
pwd
diff --git a/.cicd/scripts/srw_test.sh b/.cicd/scripts/srw_test.sh
index 1bffe083bd..76ddf020df 100755
--- a/.cicd/scripts/srw_test.sh
+++ b/.cicd/scripts/srw_test.sh
@@ -28,7 +28,6 @@ fi
# Test directories
we2e_experiment_base_dir="${workspace}/expt_dirs"
we2e_test_dir="${workspace}/tests/WE2E"
-nco_dir="${workspace}/nco_dirs"
# Run the end-to-end tests.
if "${SRW_WE2E_COMPREHENSIVE_TESTS}"; then
@@ -41,8 +40,7 @@ cd ${we2e_test_dir}
# Progress file
progress_file="${workspace}/we2e_test_results-${platform}-${SRW_COMPILER}.txt"
./setup_WE2E_tests.sh ${platform} ${SRW_PROJECT} ${SRW_COMPILER} ${test_type} \
- --expt_basedir=${we2e_experiment_base_dir} \
- --opsroot=${nco_dir} | tee ${progress_file}
+ --expt_basedir=${we2e_experiment_base_dir} | tee ${progress_file}
# Set exit code to number of failures
set +e
diff --git a/.gitignore b/.gitignore
index ad778d0bc1..2b362272f6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -5,6 +5,9 @@ build/
fix/
include/
lib/
+parm/aqm_utils_parm/
+parm/nexus_config/
+parm/ufs_utils_parm/
share/
sorc/*/
tests/WE2E/WE2E_tests_*.yaml
@@ -12,6 +15,8 @@ tests/WE2E/*.txt
tests/WE2E/*.log
tests/WE2E/log.*
ush/__pycache__/
+ush/aqm_utils_python/
+ush/nexus_utils/
ush/config.yaml
ush/python_utils/__pycache__/
ush/python_utils/workflow-tools/
diff --git a/Externals.cfg b/Externals.cfg
index 4bae74b316..4b54c71d72 100644
--- a/Externals.cfg
+++ b/Externals.cfg
@@ -30,7 +30,7 @@ protocol = git
repo_url = https://github.com/noaa-oar-arl/NEXUS
# Specify either a branch name or a hash but not both.
#branch = develop
-hash = 6a7a994
+hash = 40346b6
local_path = sorc/arl_nexus
required = True
@@ -39,7 +39,7 @@ protocol = git
repo_url = https://github.com/NOAA-EMC/AQM-utils
# Specify either a branch name or a hash but not both.
#branch = develop
-hash = 694a139
+hash = d953bd1
local_path = sorc/AQM-utils
required = True
diff --git a/aqm_environment.yml b/aqm_environment.yml
index 03d72f6706..afd8a7b634 100644
--- a/aqm_environment.yml
+++ b/aqm_environment.yml
@@ -9,5 +9,5 @@ dependencies:
- pylint=2.17*
- pytest=7.2*
- scipy=1.10.*
- - uwtools=1.0.0
+ - uwtools=2.1*
- xarray=2022.11.*
diff --git a/devbuild.sh b/devbuild.sh
index 05cc76312c..014fbdb3b7 100755
--- a/devbuild.sh
+++ b/devbuild.sh
@@ -502,4 +502,38 @@ else
fi
fi
+# Copy config/python directories from component to main directory (EE2 compliance)
+if [ "${BUILD_UFS_UTILS}" = "on" ]; then
+ if [ -d "${SRW_DIR}/parm/ufs_utils_parm" ]; then
+ rm -rf ${SRW_DIR}/parm/ufs_utils_parm
+ fi
+ cp -rp ${SRW_DIR}/sorc/UFS_UTILS/parm ${SRW_DIR}/parm/ufs_utils_parm
+fi
+if [ "${BUILD_UPP}" = "on" ]; then
+ if [ -d "${SRW_DIR}/parm/upp_parm" ]; then
+ rm -rf ${SRW_DIR}/parm/upp_parm
+ fi
+ cp -rp ${SRW_DIR}/sorc/UPP/parm ${SRW_DIR}/parm/upp_parm
+fi
+if [ "${BUILD_NEXUS}" = "on" ]; then
+ if [ -d "${SRW_DIR}/parm/nexus_config" ]; then
+ rm -rf ${SRW_DIR}/parm/nexus_config
+ fi
+ cp -rp ${SRW_DIR}/sorc/arl_nexus/config ${SRW_DIR}/parm/nexus_config
+ if [ -d "${SRW_DIR}/ush/nexus_utils" ]; then
+ rm -rf ${SRW_DIR}/ush/nexus_utils
+ fi
+ cp -rp ${SRW_DIR}/sorc/arl_nexus/utils ${SRW_DIR}/ush/nexus_utils
+fi
+if [ "${BUILD_AQM_UTILS}" = "on" ]; then
+ if [ -d "${SRW_DIR}/parm/aqm_utils_parm" ]; then
+ rm -rf ${SRW_DIR}/parm/aqm_utils_parm
+ fi
+ cp -rp ${SRW_DIR}/sorc/AQM-utils/parm ${SRW_DIR}/parm/aqm_utils_parm
+ if [ -d "${SRW_DIR}/ush/aqm_utils_python" ]; then
+ rm -rf ${SRW_DIR}/ush/aqm_utils_python
+ fi
+ cp -rp ${SRW_DIR}/sorc/AQM-utils/python_utils ${SRW_DIR}/ush/aqm_utils_python
+fi
+
exit 0
diff --git a/jobs/JREGIONAL_BIAS_CORRECTION_O3 b/jobs/JREGIONAL_BIAS_CORRECTION_O3
deleted file mode 100755
index ddcef59494..0000000000
--- a/jobs/JREGIONAL_BIAS_CORRECTION_O3
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/env bash
-
-#
-#-----------------------------------------------------------------------
-#
-# This script runs BIAS-CORRECTION-O3.
-#
-#-----------------------------------------------------------------------
-#
-
-#
-#-----------------------------------------------------------------------
-#
-# Source the variable definitions file and the bash utility functions.
-#
-#-----------------------------------------------------------------------
-#
-. $USHdir/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP}
-. $USHdir/job_preamble.sh
-#
-#-----------------------------------------------------------------------
-#
-# Save current shell options (in a global array). Then set new options
-# for this script/function.
-#
-#-----------------------------------------------------------------------
-#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
-#
-#-----------------------------------------------------------------------
-#
-# Get the full path to the file in which this script/function is located
-# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
-# which the file is located (scrfunc_dir).
-#
-#-----------------------------------------------------------------------
-#
-scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
-scrfunc_fn=$( basename "${scrfunc_fp}" )
-scrfunc_dir=$( dirname "${scrfunc_fp}" )
-#
-#-----------------------------------------------------------------------
-#
-# Print message indicating entry into script.
-#
-#-----------------------------------------------------------------------
-#
-print_info_msg "
-========================================================================
-Entering script: \"${scrfunc_fn}\"
-In directory: \"${scrfunc_dir}\"
-
-This is the J-job script for the task that runs BIAS-CORRECTION-O3.
-========================================================================"
-#
-#-----------------------------------------------------------------------
-#
-# Set the run directory.
-#
-#-----------------------------------------------------------------------
-#
-if [ "${RUN_ENVIR}" = "community" ]; then
- DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_BIAS_CORRECTION_O3}"
- check_for_preexist_dir_file "$DATA" "delete"
- mkdir_vrfy -p $DATA
- cd_vrfy $DATA
-fi
-
-mkdir_vrfy -p ${COMOUTwmo}
-
-export PARMaqm_utils="${PARMaqm_utils:-${HOMEdir}/sorc/AQM-utils/parm}"
-
-TMP_STDAY=`${NDATE} -8760 ${PDY}${cyc} | cut -c1-8` # 1 year back
-export BC_STDAY=${BC_STDAY:-${TMP_STDAY}}
-#
-#-----------------------------------------------------------------------
-#
-# Call the ex-script for this J-job and pass to it the necessary varia-
-# bles.
-#
-#-----------------------------------------------------------------------
-#
-$SCRIPTSdir/exregional_bias_correction_o3.sh || \
-print_err_msg_exit "\
-Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
-#
-#-----------------------------------------------------------------------
-#
-# Run job postamble.
-#
-#-----------------------------------------------------------------------
-#
-job_postamble
-#
-#-----------------------------------------------------------------------
-#
-# Restore the shell options saved at the beginning of this script/func-
-# tion.
-#
-#-----------------------------------------------------------------------
-#
-{ restore_shell_opts; } > /dev/null 2>&1
-
diff --git a/jobs/JREGIONAL_BIAS_CORRECTION_PM25 b/jobs/JREGIONAL_BIAS_CORRECTION_PM25
deleted file mode 100755
index 7e08b02a12..0000000000
--- a/jobs/JREGIONAL_BIAS_CORRECTION_PM25
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/env bash
-
-#
-#-----------------------------------------------------------------------
-#
-# This script runs BIAS-CORRECTION-PM25.
-#
-#-----------------------------------------------------------------------
-#
-
-#
-#-----------------------------------------------------------------------
-#
-# Source the variable definitions file and the bash utility functions.
-#
-#-----------------------------------------------------------------------
-#
-. $USHdir/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP}
-. $USHdir/job_preamble.sh
-#
-#-----------------------------------------------------------------------
-#
-# Save current shell options (in a global array). Then set new options
-# for this script/function.
-#
-#-----------------------------------------------------------------------
-#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
-#
-#-----------------------------------------------------------------------
-#
-# Get the full path to the file in which this script/function is located
-# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
-# which the file is located (scrfunc_dir).
-#
-#-----------------------------------------------------------------------
-#
-scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
-scrfunc_fn=$( basename "${scrfunc_fp}" )
-scrfunc_dir=$( dirname "${scrfunc_fp}" )
-#
-#-----------------------------------------------------------------------
-#
-# Print message indicating entry into script.
-#
-#-----------------------------------------------------------------------
-#
-print_info_msg "
-========================================================================
-Entering script: \"${scrfunc_fn}\"
-In directory: \"${scrfunc_dir}\"
-
-This is the J-job script for the task that runs BIAS-CORRECTION-PM25.
-========================================================================"
-#
-#-----------------------------------------------------------------------
-#
-# Set the run directory.
-#
-#-----------------------------------------------------------------------
-#
-if [ "${RUN_ENVIR}" = "community" ]; then
- DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_BIAS_CORRECTION_PM25}"
- check_for_preexist_dir_file "$DATA" "delete"
- mkdir_vrfy -p $DATA
- cd_vrfy $DATA
-fi
-
-mkdir_vrfy -p ${COMOUTwmo}
-
-export PARMaqm_utils="${PARMaqm_utils:-${HOMEdir}/sorc/AQM-utils/parm}"
-
-TMP_STDAY=`${NDATE} -8760 ${PDY}${cyc} | cut -c1-8` # 1 year back
-export BC_STDAY=${BC_STDAY:-${TMP_STDAY}}
-#
-#-----------------------------------------------------------------------
-#
-# Call the ex-script for this J-job and pass to it the necessary varia-
-# bles.
-#
-#-----------------------------------------------------------------------
-#
-$SCRIPTSdir/exregional_bias_correction_pm25.sh || \
-print_err_msg_exit "\
-Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
-#
-#-----------------------------------------------------------------------
-#
-# Run job postamble.
-#
-#-----------------------------------------------------------------------
-#
-job_postamble
-#
-#-----------------------------------------------------------------------
-#
-# Restore the shell options saved at the beginning of this script/func-
-# tion.
-#
-#-----------------------------------------------------------------------
-#
-{ restore_shell_opts; } > /dev/null 2>&1
-
diff --git a/jobs/JREGIONAL_MAKE_ICS b/jobs/JREGIONAL_MAKE_ICS
index 1e38f4058d..70306c0a87 100755
--- a/jobs/JREGIONAL_MAKE_ICS
+++ b/jobs/JREGIONAL_MAKE_ICS
@@ -56,9 +56,9 @@ for the FV3 (in NetCDF format).
#-----------------------------------------------------------------------
#
if [ $RUN_ENVIR = "nco" ]; then
- export INPUT_DATA="${COMIN}"
+ export INPUT_DATA="${COMIN}"
else
- export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT"
+ export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT"
fi
mkdir_vrfy -p "${INPUT_DATA}"
#
diff --git a/jobs/JREGIONAL_MAKE_LBCS b/jobs/JREGIONAL_MAKE_LBCS
index 4c524e26a6..16ac382fee 100755
--- a/jobs/JREGIONAL_MAKE_LBCS
+++ b/jobs/JREGIONAL_MAKE_LBCS
@@ -56,9 +56,9 @@ hour zero).
#-----------------------------------------------------------------------
#
if [ $RUN_ENVIR = "nco" ]; then
- export INPUT_DATA="${COMIN}"
+ export INPUT_DATA="${COMIN}"
else
- export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT"
+ export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT"
fi
mkdir_vrfy -p "${INPUT_DATA}"
#
diff --git a/jobs/JREGIONAL_NEXUS_POST_SPLIT b/jobs/JREGIONAL_NEXUS_POST_SPLIT
deleted file mode 100755
index 7cb8a55bf0..0000000000
--- a/jobs/JREGIONAL_NEXUS_POST_SPLIT
+++ /dev/null
@@ -1,103 +0,0 @@
-#!/usr/bin/env bash
-
-#
-#-----------------------------------------------------------------------
-#
-# Source the variable definitions file and the bash utility functions.
-#
-#-----------------------------------------------------------------------
-#
-. $USHdir/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP}
-. $USHdir/job_preamble.sh
-#
-#-----------------------------------------------------------------------
-#
-# Save current shell options (in a global array). Then set new options
-# for this script/function.
-#
-#-----------------------------------------------------------------------
-#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
-#
-#-----------------------------------------------------------------------
-#
-# Get the full path to the file in which this script/function is located
-# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
-# which the file is located (scrfunc_dir).
-#
-#-----------------------------------------------------------------------
-#
-scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
-scrfunc_fn=$( basename "${scrfunc_fp}" )
-scrfunc_dir=$( dirname "${scrfunc_fp}" )
-#
-#-----------------------------------------------------------------------
-#
-# Print message indicating entry into script.
-#
-#-----------------------------------------------------------------------
-#
-print_info_msg "
-========================================================================
-Entering script: \"${scrfunc_fn}\"
-In directory: \"${scrfunc_dir}\"
-
-This is the J-job script for the task that generates the emission files
-using NEXUS which will output for FV3 (in NetCDF format).
-========================================================================"
-#
-#-----------------------------------------------------------------------
-#
-# Set the name of and create the directory in which the output from this
-# script will be placed (if it doesn't already exist).
-#
-#-----------------------------------------------------------------------
-#
-if [ $RUN_ENVIR = "nco" ]; then
- export INPUT_DATA="${COMIN}"
-else
- export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT"
-fi
-mkdir_vrfy -p "${INPUT_DATA}"
-#
-#-----------------------------------------------------------------------
-#
-# Set the run directory
-#
-#-----------------------------------------------------------------------
-#
-if [ "${RUN_ENVIR}" = "community" ]; then
- DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_NEXUS_POST_SPLIT}"
- check_for_preexist_dir_file "$DATA" "delete"
- mkdir_vrfy -p $DATA
- cd_vrfy $DATA
-fi
-#
-#-----------------------------------------------------------------------
-#
-# Call the ex-script for this J-job.
-#
-#-----------------------------------------------------------------------
-#
-$SCRIPTSdir/exregional_nexus_post_split.sh || \
-print_err_msg_exit "\
-Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
-#
-#-----------------------------------------------------------------------
-#
-# Run job postamble.
-#
-#-----------------------------------------------------------------------
-#
-job_postamble
-#
-#-----------------------------------------------------------------------
-#
-# Restore the shell options saved at the beginning of this script/func-
-# tion.
-#
-#-----------------------------------------------------------------------
-#
-{ restore_shell_opts; } > /dev/null 2>&1
-
diff --git a/jobs/JREGIONAL_POINT_SOURCE b/jobs/JREGIONAL_POINT_SOURCE
deleted file mode 100755
index 57000dd599..0000000000
--- a/jobs/JREGIONAL_POINT_SOURCE
+++ /dev/null
@@ -1,103 +0,0 @@
-#!/usr/bin/env bash
-
-#
-#-----------------------------------------------------------------------
-#
-# Source the variable definitions file and the bash utility functions.
-#
-#-----------------------------------------------------------------------
-#
-. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP}
-. $USHdir/job_preamble.sh
-#
-#-----------------------------------------------------------------------
-#
-# Save current shell options (in a global array). Then set new options
-# for this script/function.
-#
-#-----------------------------------------------------------------------
-#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
-#
-#-----------------------------------------------------------------------
-#
-# Get the full path to the file in which this script/function is located
-# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
-# which the file is located (scrfunc_dir).
-#
-#-----------------------------------------------------------------------
-#
-scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
-scrfunc_fn=$( basename "${scrfunc_fp}" )
-scrfunc_dir=$( dirname "${scrfunc_fp}" )
-#
-#-----------------------------------------------------------------------
-#
-# Print message indicating entry into script.
-#
-#-----------------------------------------------------------------------
-#
-print_info_msg "
-========================================================================
-Entering script: \"${scrfunc_fn}\"
-In directory: \"${scrfunc_dir}\"
-
-This is the J-job script for the task that generates the point source files.
-========================================================================"
-#
-#-----------------------------------------------------------------------
-#
-# Set the name of and create the directory in which the output from this
-# script will be placed (if it doesn't already exist).
-#
-#-----------------------------------------------------------------------
-#
-if [ $RUN_ENVIR = "nco" ]; then
- export INPUT_DATA="${COMIN}"
-else
- export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT"
-fi
-mkdir_vrfy -p "${INPUT_DATA}"
-#
-#-----------------------------------------------------------------------
-#
-# Set the run directory
-#
-#-----------------------------------------------------------------------
-#
-if [ "${RUN_ENVIR}" = "community" ]; then
- DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POINT_SOURCE}"
- check_for_preexist_dir_file "$DATA" "delete"
- mkdir_vrfy -p $DATA
- cd_vrfy $DATA
-fi
-#
-#
-#-----------------------------------------------------------------------
-#
-# Call the ex-script for this J-job.
-#
-#-----------------------------------------------------------------------
-#
-$SCRIPTSdir/exregional_point_source.sh || \
-print_err_msg_exit "\
-Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
-#
-#-----------------------------------------------------------------------
-#
-# Run job postamble.
-#
-#-----------------------------------------------------------------------
-#
-job_postamble
-#
-#-----------------------------------------------------------------------
-#
-# Restore the shell options saved at the beginning of this script/func-
-# tion.
-#
-#-----------------------------------------------------------------------
-#
-{ restore_shell_opts; } > /dev/null 2>&1
-
diff --git a/jobs/JREGIONAL_POST_STAT_O3 b/jobs/JREGIONAL_POST_STAT_O3
deleted file mode 100755
index a522d00dbb..0000000000
--- a/jobs/JREGIONAL_POST_STAT_O3
+++ /dev/null
@@ -1,101 +0,0 @@
-#!/usr/bin/env bash
-
-#
-#-----------------------------------------------------------------------
-#
-# This script runs POST-STAT-O3.
-#
-#-----------------------------------------------------------------------
-#
-
-#
-#-----------------------------------------------------------------------
-#
-# Source the variable definitions file and the bash utility functions.
-#
-#-----------------------------------------------------------------------
-#
-. $USHdir/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP}
-. $USHdir/job_preamble.sh
-#
-#-----------------------------------------------------------------------
-#
-# Save current shell options (in a global array). Then set new options
-# for this script/function.
-#
-#-----------------------------------------------------------------------
-#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
-#
-#-----------------------------------------------------------------------
-#
-# Get the full path to the file in which this script/function is located
-# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
-# which the file is located (scrfunc_dir).
-#
-#-----------------------------------------------------------------------
-#
-scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
-scrfunc_fn=$( basename "${scrfunc_fp}" )
-scrfunc_dir=$( dirname "${scrfunc_fp}" )
-#
-#-----------------------------------------------------------------------
-#
-# Print message indicating entry into script.
-#
-#-----------------------------------------------------------------------
-#
-print_info_msg "
-========================================================================
-Entering script: \"${scrfunc_fn}\"
-In directory: \"${scrfunc_dir}\"
-
-This is the J-job script for the task that runs POST-STAT-O3.
-========================================================================"
-#
-#-----------------------------------------------------------------------
-#
-# Set the run directory.
-#
-#-----------------------------------------------------------------------
-#
-if [ "${RUN_ENVIR}" = "community" ]; then
- DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POST_STAT_O3}"
- check_for_preexist_dir_file "$DATA" "delete"
- mkdir_vrfy -p $DATA
- cd_vrfy $DATA
-fi
-
-mkdir_vrfy -p ${COMOUTwmo}
-
-export PARMaqm_utils="${PARMaqm_utils:-${HOMEdir}/sorc/AQM-utils/parm}"
-#
-#-----------------------------------------------------------------------
-#
-# Call the ex-script for this J-job and pass to it the necessary varia-
-# bles.
-#
-#-----------------------------------------------------------------------
-#
-$SCRIPTSdir/exregional_post_stat_o3.sh || \
-print_err_msg_exit "\
-Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
-#
-#-----------------------------------------------------------------------
-#
-# Run job postamble.
-#
-#-----------------------------------------------------------------------
-#
-job_postamble
-#
-#-----------------------------------------------------------------------
-#
-# Restore the shell options saved at the beginning of this script/func-
-# tion.
-#
-#-----------------------------------------------------------------------
-#
-{ restore_shell_opts; } > /dev/null 2>&1
-
diff --git a/jobs/JREGIONAL_POST_STAT_PM25 b/jobs/JREGIONAL_POST_STAT_PM25
deleted file mode 100755
index cd86879a73..0000000000
--- a/jobs/JREGIONAL_POST_STAT_PM25
+++ /dev/null
@@ -1,101 +0,0 @@
-#!/usr/bin/env bash
-
-#
-#-----------------------------------------------------------------------
-#
-# This script runs POST-STAT-PM25.
-#
-#-----------------------------------------------------------------------
-#
-
-#
-#-----------------------------------------------------------------------
-#
-# Source the variable definitions file and the bash utility functions.
-#
-#-----------------------------------------------------------------------
-#
-. $USHdir/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP}
-. $USHdir/job_preamble.sh
-#
-#-----------------------------------------------------------------------
-#
-# Save current shell options (in a global array). Then set new options
-# for this script/function.
-#
-#-----------------------------------------------------------------------
-#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
-#
-#-----------------------------------------------------------------------
-#
-# Get the full path to the file in which this script/function is located
-# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
-# which the file is located (scrfunc_dir).
-#
-#-----------------------------------------------------------------------
-#
-scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
-scrfunc_fn=$( basename "${scrfunc_fp}" )
-scrfunc_dir=$( dirname "${scrfunc_fp}" )
-#
-#-----------------------------------------------------------------------
-#
-# Print message indicating entry into script.
-#
-#-----------------------------------------------------------------------
-#
-print_info_msg "
-========================================================================
-Entering script: \"${scrfunc_fn}\"
-In directory: \"${scrfunc_dir}\"
-
-This is the J-job script for the task that runs POST-UPP-STAT.
-========================================================================"
-#
-#-----------------------------------------------------------------------
-#
-# Set the run directory.
-#
-#-----------------------------------------------------------------------
-#
-DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POST_STAT_PM25}"
-if [ "${RUN_ENVIR}" = "community" ]; then
- check_for_preexist_dir_file "$DATA" "delete"
- mkdir_vrfy -p $DATA
- cd_vrfy $DATA
-fi
-
-mkdir_vrfy -p ${COMOUTwmo}
-
-export PARMaqm_utils="${PARMaqm_utils:-${HOMEdir}/sorc/AQM-utils/parm}"
-#
-#-----------------------------------------------------------------------
-#
-# Call the ex-script for this J-job and pass to it the necessary varia-
-# bles.
-#
-#-----------------------------------------------------------------------
-#
-$SCRIPTSdir/exregional_post_stat_pm25.sh || \
-print_err_msg_exit "\
-Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
-#
-#-----------------------------------------------------------------------
-#
-# Run job postamble.
-#
-#-----------------------------------------------------------------------
-#
-job_postamble
-#
-#-----------------------------------------------------------------------
-#
-# Restore the shell options saved at the beginning of this script/func-
-# tion.
-#
-#-----------------------------------------------------------------------
-#
-{ restore_shell_opts; } > /dev/null 2>&1
-
diff --git a/jobs/JREGIONAL_PRE_POST_STAT b/jobs/JREGIONAL_PRE_POST_STAT
deleted file mode 100755
index 640c629bce..0000000000
--- a/jobs/JREGIONAL_PRE_POST_STAT
+++ /dev/null
@@ -1,108 +0,0 @@
-#!/usr/bin/env bash
-
-#
-#-----------------------------------------------------------------------
-#
-# This script runs PRE-POST-STAT.
-#
-#-----------------------------------------------------------------------
-#
-
-#
-#-----------------------------------------------------------------------
-#
-# Source the variable definitions file and the bash utility functions.
-#
-#-----------------------------------------------------------------------
-#
-. $USHdir/source_util_funcs.sh
-source_config_for_task "task_pre_post_stat" ${GLOBAL_VAR_DEFNS_FP}
-. $USHdir/job_preamble.sh
-#
-#-----------------------------------------------------------------------
-#
-# Save current shell options (in a global array). Then set new options
-# for this script/function.
-#
-#-----------------------------------------------------------------------
-#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
-#
-#-----------------------------------------------------------------------
-#
-# Get the full path to the file in which this script/function is located
-# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
-# which the file is located (scrfunc_dir).
-#
-#-----------------------------------------------------------------------
-#
-scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
-scrfunc_fn=$( basename "${scrfunc_fp}" )
-scrfunc_dir=$( dirname "${scrfunc_fp}" )
-#
-#-----------------------------------------------------------------------
-#
-# Print message indicating entry into script.
-#
-#-----------------------------------------------------------------------
-#
-print_info_msg "
-========================================================================
-Entering script: \"${scrfunc_fn}\"
-In directory: \"${scrfunc_dir}\"
-
-This is the J-job script for the task that runs POST-UPP-STAT.
-========================================================================"
-#
-#-----------------------------------------------------------------------
-#
-# Set the run directory.
-#
-#-----------------------------------------------------------------------
-#
-if [ "${RUN_ENVIR}" = "community" ]; then
- DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_PRE_POST_STAT}"
- check_for_preexist_dir_file "$DATA" "delete"
- mkdir_vrfy -p $DATA
- cd_vrfy $DATA
-fi
-#
-#-----------------------------------------------------------------------
-#
-# Call the ex-script for this J-job and pass to it the necessary varia-
-# bles.
-#
-#-----------------------------------------------------------------------
-#
-$SCRIPTSdir/exregional_pre_post_stat.sh || \
-print_err_msg_exit "\
-Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
-#
-#-----------------------------------------------------------------------
-#
-# Run job postamble.
-#
-#-----------------------------------------------------------------------
-#
-job_postamble
-#
-#-----------------------------------------------------------------------
-#
-# Remove post_complete flag file.
-#
-#-----------------------------------------------------------------------
-#
-post_complete_file="${COMIN}/post_${PDY}${cyc}_task_complete.txt"
-if [ -f ${post_complete_file} ] ; then
- rm_vrfy -f ${post_complete_file}
-fi
-#
-#-----------------------------------------------------------------------
-#
-# Restore the shell options saved at the beginning of this script/func-
-# tion.
-#
-#-----------------------------------------------------------------------
-#
-{ restore_shell_opts; } > /dev/null 2>&1
-
diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST
index b4327667a0..97b100967c 100755
--- a/jobs/JREGIONAL_RUN_POST
+++ b/jobs/JREGIONAL_RUN_POST
@@ -139,7 +139,7 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then
if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then
fcst_len_hrs=$( printf "%03d" "${FCST_LEN_HRS}" )
if [ "${fhr}" = "${fcst_len_hrs}" ]; then
- touch "${COMIN}/post_${PDY}${cyc}_task_complete.txt"
+ touch "${DATAROOT}/DATA_SHARE/${PDY}${cyc}/post_${PDY}${cyc}_task_complete.txt"
fi
fi
fi
diff --git a/jobs/JREGIONAL_AQM_ICS b/jobs/JSRW_AQM_ICS
similarity index 50%
rename from jobs/JREGIONAL_AQM_ICS
rename to jobs/JSRW_AQM_ICS
index 5c8ba9c8dd..0c4df8aa5b 100755
--- a/jobs/JREGIONAL_AQM_ICS
+++ b/jobs/JSRW_AQM_ICS
@@ -8,26 +8,30 @@
#
#-----------------------------------------------------------------------
#
-
+date
+export PS4='+ $SECONDS + '
+set -xue
#
#-----------------------------------------------------------------------
#
-# Source the variable definitions file and the bash utility functions.
+# Set the NCO standard environment variables (Table 1, pp.4)
#
#-----------------------------------------------------------------------
#
-. $USHdir/source_util_funcs.sh
-source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP}
-. $USHdir/job_preamble.sh
+export USHsrw="${HOMEdir}/ush"
+export EXECsrw="${HOMEdir}/exec"
+export PARMsrw="${HOMEdir}/parm"
+export SCRIPTSsrw="${HOMEdir}/scripts"
#
#-----------------------------------------------------------------------
#
-# Save current shell options (in a global array). Then set new options
-# for this script/function.
+# Source the variable definitions file and the bash utility functions.
#
#-----------------------------------------------------------------------
#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+export USHdir="${USHsrw}" # should be removed later
+. ${USHsrw}/source_util_funcs.sh
+source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
#
@@ -59,54 +63,117 @@ which the model needs.
#
#-----------------------------------------------------------------------
#
-# Set the name of and create the directory in which the output from this
-# script will be placed (if it doesn't already exist).
+# Define job and jobid by default for rocoto
#
#-----------------------------------------------------------------------
#
-if [ $RUN_ENVIR = "nco" ]; then
- export INPUT_DATA="${COMIN}"
+WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}"
+if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then
+ if [ "${SCHED}" = "slurm" ]; then
+ job=${SLURM_JOB_NAME}
+ pid=${SLURM_JOB_ID}
+ elif [ "${SCHED}" = "pbspro" ]; then
+ job=${PBS_JOBNAME}
+ pid=${PBS_JOBID}
+ else
+ job="task"
+ pid=$$
+ fi
+ jobid="${job}.${PDY}${cyc}.${pid}"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Create a temp working directory (DATA) and cd into it.
+#
+#-----------------------------------------------------------------------
+#
+export DATA="${DATA:-${DATAROOT}/${jobid}}"
+mkdir -p $DATA
+cd $DATA
+#
+#-----------------------------------------------------------------------
+#
+# Define NCO environment variables and set COM type definitions.
+#
+#-----------------------------------------------------------------------
+#
+export NET="${NET:-${NET_default}}"
+export RUN="${RUN:-${RUN_default}}"
+
+[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
+if [ "${MACHINE}" = "WCOSS2" ]; then
+ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+ export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
else
- export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT"
+ export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+ export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
fi
-mkdir_vrfy -p "${INPUT_DATA}"
+
+mkdir -p ${COMOUT}
+
+# Create a teomporary share directory
+export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}"
+mkdir -p ${DATA_SHARE}
+
+# Run setpdy to initialize PDYm and PDYp variables
+export cycle="${cycle:-t${cyc}z}"
+setpdy.sh
+. ./PDY
#
#-----------------------------------------------------------------------
#
-# Set the run directory
+# Set sub-cycle and ensemble member names in file/diectory names
#
#-----------------------------------------------------------------------
#
-if [ "${RUN_ENVIR}" = "community" ]; then
- DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_AQM_ICS}"
- check_for_preexist_dir_file "$DATA" "delete"
- mkdir_vrfy -p $DATA
- cd_vrfy $DATA
+if [ ${subcyc} -ne 0 ]; then
+ export cycle="t${cyc}${subcyc}z"
+fi
+if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+ export dot_ensmem=".mem${ENSMEM_INDX}"
+else
+ export dot_ensmem=
fi
#
#-----------------------------------------------------------------------
#
-# Call the ex-script for this J-job and pass to it the necessary variables.
+# Set the name of and create the directory in which the output from this
+# script will be placed (if it doesn't already exist).
#
#-----------------------------------------------------------------------
#
-$SCRIPTSdir/exregional_aqm_ics.sh || \
-print_err_msg_exit "\
-Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
+if [ $RUN_ENVIR = "nco" ]; then
+ export INPUT_DATA="${COMIN}"
+else
+ export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT"
+fi
+mkdir -p "${INPUT_DATA}"
#
#-----------------------------------------------------------------------
#
-# Run job postamble.
+# Call the ex-script for this J-job.
#
#-----------------------------------------------------------------------
#
-job_postamble
+export pgmout="${DATA}/OUTPUT.$$"
+env
+
+${SCRIPTSsrw}/exsrw_aqm_ics.sh
+export err=$?; err_chk
+
+if [ -e "$pgmout" ]; then
+ cat $pgmout
+fi
#
#-----------------------------------------------------------------------
#
-# Restore the shell options saved at the beginning of this script/function.
+# Whether or not working directory DATA should be kept.
#
#-----------------------------------------------------------------------
#
-{ restore_shell_opts; } > /dev/null 2>&1
+if [ "${KEEPDATA}" = "NO" ]; then
+ rm -rf ${DATA}
+fi
+date
diff --git a/jobs/JREGIONAL_AQM_LBCS b/jobs/JSRW_AQM_LBCS
similarity index 50%
rename from jobs/JREGIONAL_AQM_LBCS
rename to jobs/JSRW_AQM_LBCS
index c711f90288..11a1420d5e 100755
--- a/jobs/JREGIONAL_AQM_LBCS
+++ b/jobs/JSRW_AQM_LBCS
@@ -8,26 +8,30 @@
#
#-----------------------------------------------------------------------
#
-
+date
+export PS4='+ $SECONDS + '
+set -xue
#
#-----------------------------------------------------------------------
#
-# Source the variable definitions file and the bash utility functions.
+# Set the NCO standard environment variables (Table 1, pp.4)
#
#-----------------------------------------------------------------------
#
-. $USHdir/source_util_funcs.sh
-source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP}
-. $USHdir/job_preamble.sh
+export USHsrw="${HOMEdir}/ush"
+export EXECsrw="${HOMEdir}/exec"
+export PARMsrw="${HOMEdir}/parm"
+export SCRIPTSsrw="${HOMEdir}/scripts"
#
#-----------------------------------------------------------------------
#
-# Save current shell options (in a global array). Then set new options
-# for this script/function.
+# Source the variable definitions file and the bash utility functions.
#
#-----------------------------------------------------------------------
#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+export USHdir="${USHsrw}" # should be removed later
+. ${USHsrw}/source_util_funcs.sh
+source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
#
@@ -59,55 +63,118 @@ which the model needs.
#
#-----------------------------------------------------------------------
#
-# Set the name of and create the directory in which the output from this
-# script will be placed (if it doesn't already exist).
+# Define job and jobid by default for rocoto
#
#-----------------------------------------------------------------------
#
-if [ $RUN_ENVIR = "nco" ]; then
- export INPUT_DATA="${COMIN}"
-else
- export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT"
+WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}"
+if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then
+ if [ "${SCHED}" = "slurm" ]; then
+ job=${SLURM_JOB_NAME}
+ pid=${SLURM_JOB_ID}
+ elif [ "${SCHED}" = "pbspro" ]; then
+ job=${PBS_JOBNAME}
+ pid=${PBS_JOBID}
+ else
+ job="task"
+ pid=$$
+ fi
+ jobid="${job}.${PDY}${cyc}.${pid}"
fi
-mkdir_vrfy -p "${INPUT_DATA}"
#
#-----------------------------------------------------------------------
#
-# Set the run directory
+# Create a temp working directory (DATA) and cd into it.
#
#-----------------------------------------------------------------------
#
-if [ "${RUN_ENVIR}" = "community" ]; then
- DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_AQM_LBCS}"
- check_for_preexist_dir_file "$DATA" "delete"
- mkdir_vrfy -p $DATA
- cd_vrfy $DATA
+export DATA="${DATA:-${DATAROOT}/${jobid}}"
+mkdir -p $DATA
+cd $DATA
+#
+#-----------------------------------------------------------------------
+#
+# Define NCO environment variables and set COM type definitions.
+#
+#-----------------------------------------------------------------------
+#
+export NET="${NET:-${NET_default}}"
+export RUN="${RUN:-${RUN_default}}"
+
+[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
+if [ "${MACHINE}" = "WCOSS2" ]; then
+ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+ export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+else
+ export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+ export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
fi
+export COMINgefs="${COMINgefs:-${COMINgefs_default}}"
+
+mkdir -p ${COMOUT}
+
+# Create a teomporary share directory
+export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}"
+mkdir -p ${DATA_SHARE}
+# Run setpdy to initialize PDYm and PDYp variables
+export cycle="${cycle:-t${cyc}z}"
+setpdy.sh
+. ./PDY
#
#-----------------------------------------------------------------------
#
-# Call the ex-script for this J-job and pass to it the necessary variables.
+# Set sub-cycle and ensemble member names in file/diectory names
#
#-----------------------------------------------------------------------
#
-$SCRIPTSdir/exregional_aqm_lbcs.sh || \
-print_err_msg_exit "\
-Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
+if [ ${subcyc} -ne 0 ]; then
+ export cycle="t${cyc}${subcyc}z"
+fi
+if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+ export dot_ensmem=".mem${ENSMEM_INDX}"
+else
+ export dot_ensmem=
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Set the name of and create the directory in which the output from this
+# script will be placed (if it doesn't already exist).
+#
+#-----------------------------------------------------------------------
+#
+if [ $RUN_ENVIR = "nco" ]; then
+ export INPUT_DATA="${COMIN}"
+else
+ export INPUT_DATA="${EXPTDIR}/${PDY}${cyc}${SLASH_ENSMEM_SUBDIR}/INPUT"
+fi
+mkdir -p "${INPUT_DATA}"
#
#-----------------------------------------------------------------------
#
-# Run job postamble.
+# Call the ex-script for this J-job.
#
#-----------------------------------------------------------------------
#
-job_postamble
+export pgmout="${DATA}/OUTPUT.$$"
+env
+
+${SCRIPTSsrw}/exsrw_aqm_lbcs.sh
+export err=$?; err_chk
+
+if [ -e "$pgmout" ]; then
+ cat $pgmout
+fi
#
#-----------------------------------------------------------------------
#
-# Restore the shell options saved at the beginning of this script/function.
+# Whether or not working directory DATA should be kept.
#
#-----------------------------------------------------------------------
#
-{ restore_shell_opts; } > /dev/null 2>&1
+if [ "${KEEPDATA}" = "NO" ]; then
+ rm -rf ${DATA}
+fi
+date
diff --git a/jobs/JSRW_BIAS_CORRECTION_O3 b/jobs/JSRW_BIAS_CORRECTION_O3
new file mode 100755
index 0000000000..3ab2f2d40f
--- /dev/null
+++ b/jobs/JSRW_BIAS_CORRECTION_O3
@@ -0,0 +1,161 @@
+#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# This script runs BIAS-CORRECTION-O3.
+#
+#-----------------------------------------------------------------------
+#
+date
+export PS4='+ $SECONDS + '
+set -xue
+#
+#-----------------------------------------------------------------------
+#
+# Set the NCO standard environment variables (Table 1, pp.4)
+#
+#-----------------------------------------------------------------------
+#
+export USHsrw="${HOMEdir}/ush"
+export EXECsrw="${HOMEdir}/exec"
+export PARMsrw="${HOMEdir}/parm"
+export SCRIPTSsrw="${HOMEdir}/scripts"
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+export USHdir="${USHsrw}" # should be removed later
+. ${USHsrw}/source_util_funcs.sh
+source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP}
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the J-job script for the task that runs BIAS-CORRECTION-O3.
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Define job and jobid by default for rocoto
+#
+#-----------------------------------------------------------------------
+#
+WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}"
+if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then
+ if [ "${SCHED}" = "slurm" ]; then
+ job=${SLURM_JOB_NAME}
+ pid=${SLURM_JOB_ID}
+ elif [ "${SCHED}" = "pbspro" ]; then
+ job=${PBS_JOBNAME}
+ pid=${PBS_JOBID}
+ else
+ job="task"
+ pid=$$
+ fi
+ jobid="${job}.${PDY}${cyc}.${pid}"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Create a temp working directory (DATA) and cd into it.
+#
+#-----------------------------------------------------------------------
+#
+export DATA="${DATA:-${DATAROOT}/${jobid}}"
+mkdir -p $DATA
+cd $DATA
+#
+#-----------------------------------------------------------------------
+#
+# Define NCO environment variables and set COM type definitions.
+#
+#-----------------------------------------------------------------------
+#
+export NET="${NET:-${NET_default}}"
+export RUN="${RUN:-${RUN_default}}"
+
+[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
+if [ "${MACHINE}" = "WCOSS2" ]; then
+ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+ export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+else
+ export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+ export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+fi
+
+mkdir -p ${COMOUT}
+
+# Run setpdy to initialize PDYm and PDYp variables
+export cycle="${cycle:-t${cyc}z}"
+setpdy.sh
+. ./PDY
+#
+#-----------------------------------------------------------------------
+#
+# Set sub-cycle and ensemble member names in file/diectory names
+#
+#-----------------------------------------------------------------------
+#
+if [ ${subcyc} -ne 0 ]; then
+ export cycle="t${cyc}${subcyc}z"
+fi
+if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+ export dot_ensmem=".mem${ENSMEM_INDX}"
+else
+ export dot_ensmem=
+fi
+
+TMP_STDAY=`${NDATE} -8760 ${PDY}${cyc} | cut -c1-8` # 1 year back
+export BC_STDAY=${BC_STDAY:-${TMP_STDAY}}
+#
+#-----------------------------------------------------------------------
+#
+# Call the ex-script for this J-job.
+#
+#-----------------------------------------------------------------------
+#
+export pgmout="${DATA}/OUTPUT.$$"
+env
+
+${SCRIPTSsrw}/exsrw_bias_correction_o3.sh
+export err=$?; err_chk
+
+if [ -e "$pgmout" ]; then
+ cat $pgmout
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Whether or not working directory DATA should be kept.
+#
+#-----------------------------------------------------------------------
+#
+if [ "${KEEPDATA}" = "NO" ]; then
+ rm -rf ${DATA}
+fi
+date
+
diff --git a/jobs/JSRW_BIAS_CORRECTION_PM25 b/jobs/JSRW_BIAS_CORRECTION_PM25
new file mode 100755
index 0000000000..42210e7f29
--- /dev/null
+++ b/jobs/JSRW_BIAS_CORRECTION_PM25
@@ -0,0 +1,161 @@
+#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# This script runs BIAS-CORRECTION-PM25.
+#
+#-----------------------------------------------------------------------
+#
+date
+export PS4='+ $SECONDS + '
+set -xue
+#
+#-----------------------------------------------------------------------
+#
+# Set the NCO standard environment variables (Table 1, pp.4)
+#
+#-----------------------------------------------------------------------
+#
+export USHsrw="${HOMEdir}/ush"
+export EXECsrw="${HOMEdir}/exec"
+export PARMsrw="${HOMEdir}/parm"
+export SCRIPTSsrw="${HOMEdir}/scripts"
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+export USHdir="${USHsrw}" # should be removed later
+. ${USHsrw}/source_util_funcs.sh
+source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP}
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the J-job script for the task that runs BIAS-CORRECTION-PM25.
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Define job and jobid by default for rocoto
+#
+#-----------------------------------------------------------------------
+#
+WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}"
+if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then
+ if [ "${SCHED}" = "slurm" ]; then
+ job=${SLURM_JOB_NAME}
+ pid=${SLURM_JOB_ID}
+ elif [ "${SCHED}" = "pbspro" ]; then
+ job=${PBS_JOBNAME}
+ pid=${PBS_JOBID}
+ else
+ job="task"
+ pid=$$
+ fi
+ jobid="${job}.${PDY}${cyc}.${pid}"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Create a temp working directory (DATA) and cd into it.
+#
+#-----------------------------------------------------------------------
+#
+export DATA="${DATA:-${DATAROOT}/${jobid}}"
+mkdir -p $DATA
+cd $DATA
+#
+#-----------------------------------------------------------------------
+#
+# Define NCO environment variables and set COM type definitions.
+#
+#-----------------------------------------------------------------------
+#
+export NET="${NET:-${NET_default}}"
+export RUN="${RUN:-${RUN_default}}"
+
+[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
+if [ "${MACHINE}" = "WCOSS2" ]; then
+ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+ export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+else
+ export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+ export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+fi
+
+mkdir -p ${COMOUT}
+
+# Run setpdy to initialize PDYm and PDYp variables
+export cycle="${cycle:-t${cyc}z}"
+setpdy.sh
+. ./PDY
+#
+#-----------------------------------------------------------------------
+#
+# Set sub-cycle and ensemble member names in file/diectory names
+#
+#-----------------------------------------------------------------------
+#
+if [ ${subcyc} -ne 0 ]; then
+ export cycle="t${cyc}${subcyc}z"
+fi
+if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+ export dot_ensmem=".mem${ENSMEM_INDX}"
+else
+ export dot_ensmem=
+fi
+
+TMP_STDAY=`${NDATE} -8760 ${PDY}${cyc} | cut -c1-8` # 1 year back
+export BC_STDAY=${BC_STDAY:-${TMP_STDAY}}
+#
+#-----------------------------------------------------------------------
+#
+# Call the ex-script for this J-job.
+#
+#-----------------------------------------------------------------------
+#
+export pgmout="${DATA}/OUTPUT.$$"
+env
+
+${SCRIPTSsrw}/exsrw_bias_correction_pm25.sh
+export err=$?; err_chk
+
+if [ -e "$pgmout" ]; then
+ cat $pgmout
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Whether or not working directory DATA should be kept.
+#
+#-----------------------------------------------------------------------
+#
+if [ "${KEEPDATA}" = "NO" ]; then
+ rm -rf ${DATA}
+fi
+date
+
diff --git a/jobs/JREGIONAL_FIRE_EMISSION b/jobs/JSRW_FIRE_EMISSION
similarity index 57%
rename from jobs/JREGIONAL_FIRE_EMISSION
rename to jobs/JSRW_FIRE_EMISSION
index fdb6e57b0a..ae0343e60e 100755
--- a/jobs/JREGIONAL_FIRE_EMISSION
+++ b/jobs/JSRW_FIRE_EMISSION
@@ -7,26 +7,30 @@
#
#-----------------------------------------------------------------------
#
-
+date
+export PS4='+ $SECONDS + '
+set -xue
#
#-----------------------------------------------------------------------
#
-# Source the variable definitions file and the bash utility functions.
+# Set the NCO standard environment variables (Table 1, pp.4)
#
#-----------------------------------------------------------------------
#
-. $USHdir/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP}
-. $USHdir/job_preamble.sh
+export USHsrw="${HOMEdir}/ush"
+export EXECsrw="${HOMEdir}/exec"
+export PARMsrw="${HOMEdir}/parm"
+export SCRIPTSsrw="${HOMEdir}/scripts"
#
#-----------------------------------------------------------------------
#
-# Save current shell options (in a global array). Then set new options
-# for this script/function.
+# Source the variable definitions file and the bash utility functions.
#
#-----------------------------------------------------------------------
#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+export USHdir="${USHsrw}" # should be removed later
+. ${USHsrw}/source_util_funcs.sh
+source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
#
@@ -57,78 +61,127 @@ emission data files from disk, or HPSS.
#
#-----------------------------------------------------------------------
#
-# Set the external model start time
+# Define job and jobid by default for rocoto
#
#-----------------------------------------------------------------------
#
-export TIME_OFFSET_HRS=${AQM_FIRE_FILE_OFFSET_HRS:-0}
-yyyymmdd=${PDY}
-hh=${cyc}
-export FIRE_FILE_CDATE=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - ${TIME_OFFSET_HRS} hours" "+%Y%m%d%H" )
-
+WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}"
+if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then
+ if [ "${SCHED}" = "slurm" ]; then
+ job=${SLURM_JOB_NAME}
+ pid=${SLURM_JOB_ID}
+ elif [ "${SCHED}" = "pbspro" ]; then
+ job=${PBS_JOBNAME}
+ pid=${PBS_JOBID}
+ else
+ job="task"
+ pid=$$
+ fi
+ jobid="${job}.${PDY}${cyc}.${pid}"
+fi
#
#-----------------------------------------------------------------------
#
-# Check whether FIRE EMISSION data files are available on the specified
-# cycle date and time on HPSS (FIRE_FILE_CDATE).
+# Create a temp working directory (DATA) and cd into it.
#
#-----------------------------------------------------------------------
#
-CDATE_min="2022101500"
-if [ "$FIRE_FILE_CDATE" -lt "$CDATE_min" ]; then
- print_info_msg "
-========================================================================
-RAVE fire emission data are not available on HPSS for this date.
-CDATE: \"${FIRE_FILE_CDATE}\"
-CDATE_min: \"${CDATE_min}\"
-========================================================================"
+export DATA="${DATA:-${DATAROOT}/${jobid}}"
+mkdir -p $DATA
+cd $DATA
+#
+#-----------------------------------------------------------------------
+#
+# Define NCO environment variables and set COM type definitions.
+#
+#-----------------------------------------------------------------------
+#
+export NET="${NET:-${NET_default}}"
+export RUN="${RUN:-${RUN_default}}"
+
+[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
+if [ "${MACHINE}" = "WCOSS2" ]; then
+ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+ export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+else
+ export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+ export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
fi
+
+mkdir -p ${COMOUT}
+
+export COMINfire="${COMINfire:-${COMINfire_default}}"
+
+# Run setpdy to initialize PDYm and PDYp variables
+export cycle="${cycle:-t${cyc}z}"
+setpdy.sh
+. ./PDY
#
#-----------------------------------------------------------------------
#
-# Set the run directory
+# Set sub-cycle and ensemble member names in file/diectory names
#
#-----------------------------------------------------------------------
#
-if [ "${RUN_ENVIR}" = "community" ]; then
- DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_FIRE_EMISSION}"
- check_for_preexist_dir_file "$DATA" "delete"
- mkdir_vrfy -p $DATA
- cd_vrfy $DATA
+if [ ${subcyc} -ne 0 ]; then
+ export cycle="t${cyc}${subcyc}z"
+fi
+if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+ export dot_ensmem=".mem${ENSMEM_INDX}"
+else
+ export dot_ensmem=
fi
#
#-----------------------------------------------------------------------
#
-# Create the directory where the RAVE fire emission files should be stored
+# Set the external model start time
#
#-----------------------------------------------------------------------
#
-export FIRE_EMISSION_STAGING_DIR="${FIRE_EMISSION_STAGING_DIR:-${COMIN}/FIRE_EMISSION}"
-mkdir_vrfy -p "${FIRE_EMISSION_STAGING_DIR}"
+export TIME_OFFSET_HRS=${AQM_FIRE_FILE_OFFSET_HRS:-0}
+export FIRE_FILE_CDATE=`$NDATE -${TIME_OFFSET_HRS} ${PDY}${cyc}`
#
#-----------------------------------------------------------------------
#
-# Call the ex-script for this J-job and pass to it the necessary variables.
+# Check whether FIRE EMISSION data files are available on the specified
+# cycle date and time on HPSS (FIRE_FILE_CDATE).
#
#-----------------------------------------------------------------------
#
-$SCRIPTSdir/exregional_fire_emission.sh || \
-print_err_msg_exit "\
-Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
+CDATE_min="2022101500"
+if [ "$FIRE_FILE_CDATE" -lt "$CDATE_min" ]; then
+ print_info_msg "
+========================================================================
+RAVE fire emission data are not available on HPSS for this date.
+CDATE: \"${FIRE_FILE_CDATE}\"
+CDATE_min: \"${CDATE_min}\"
+========================================================================"
+fi
#
#-----------------------------------------------------------------------
#
-# Run job postamble.
+# Call the ex-script for this J-job.
#
#-----------------------------------------------------------------------
#
-job_postamble
+export pgmout="${DATA}/OUTPUT.$$"
+env
+
+${SCRIPTSsrw}/exsrw_fire_emission.sh
+export err=$?; err_chk
+
+if [ -e "$pgmout" ]; then
+ cat $pgmout
+fi
#
#-----------------------------------------------------------------------
#
-# Restore the shell options saved at the beginning of this script/function.
+# Whether or not working directory DATA should be kept.
#
#-----------------------------------------------------------------------
#
-{ restore_shell_opts; } > /dev/null 2>&1
+if [ "${KEEPDATA}" = "NO" ]; then
+ rm -rf ${DATA}
+fi
+date
diff --git a/jobs/JREGIONAL_NEXUS_EMISSION b/jobs/JSRW_NEXUS_EMISSION
similarity index 50%
rename from jobs/JREGIONAL_NEXUS_EMISSION
rename to jobs/JSRW_NEXUS_EMISSION
index 915de0f054..33f1aca757 100755
--- a/jobs/JREGIONAL_NEXUS_EMISSION
+++ b/jobs/JSRW_NEXUS_EMISSION
@@ -3,30 +3,34 @@
#
#-----------------------------------------------------------------------
#
-# This script generate NEXUS emission netcdf file.
+# This script generate individual NEXUS emission netcdf file.
#
#-----------------------------------------------------------------------
#
-
+date
+export PS4='+ $SECONDS + '
+set -xue
#
#-----------------------------------------------------------------------
#
-# Source the variable definitions file and the bash utility functions.
+# Set the NCO standard environment variables (Table 1, pp.4)
#
#-----------------------------------------------------------------------
#
-. $USHdir/source_util_funcs.sh
-source_config_for_task "task_run_fcst|cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP}
-. $USHdir/job_preamble.sh
+export USHsrw="${HOMEdir}/ush"
+export EXECsrw="${HOMEdir}/exec"
+export PARMsrw="${HOMEdir}/parm"
+export SCRIPTSsrw="${HOMEdir}/scripts"
#
#-----------------------------------------------------------------------
#
-# Save current shell options (in a global array). Then set new options
-# for this script/function.
+# Source the variable definitions file and the bash utility functions.
#
#-----------------------------------------------------------------------
#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+export USHdir="${USHsrw}" # should be removed later
+. ${USHsrw}/source_util_funcs.sh
+source_config_for_task "task_run_fcst|cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
#
@@ -57,55 +61,103 @@ using NEXUS which will output for FV3 (in NetCDF format).
#
#-----------------------------------------------------------------------
#
-# Set the name of and create the directory in which the output from this
-# script will be placed (if it doesn't already exist).
+# Define job and jobid by default for rocoto
#
#-----------------------------------------------------------------------
#
-if [ $RUN_ENVIR = "nco" ]; then
- export INPUT_DATA="${COMIN}/NEXUS"
-else
- export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/NEXUS"
+WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}"
+if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then
+ if [ "${SCHED}" = "slurm" ]; then
+ job=${SLURM_JOB_NAME}
+ pid=${SLURM_JOB_ID}
+ elif [ "${SCHED}" = "pbspro" ]; then
+ job=${PBS_JOBNAME}
+ pid=${PBS_JOBID}
+ else
+ job="task"
+ pid=$$
+ fi
+ jobid="${job}.${PDY}${cyc}.${pid}"
fi
-mkdir_vrfy -p "${INPUT_DATA}"
#
#-----------------------------------------------------------------------
#
-# Set the run directory
+# Create a temp working directory (DATA) and cd into it.
#
#-----------------------------------------------------------------------
#
-if [ "${RUN_ENVIR}" = "community" ]; then
- DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_NEXUS_EMISSION_${nspt}}"
- check_for_preexist_dir_file "$DATA" "delete"
- mkdir_vrfy -p $DATA
- cd_vrfy $DATA
+export DATA="${DATA:-${DATAROOT}/${jobid}}"
+mkdir -p $DATA
+cd $DATA
+#
+#-----------------------------------------------------------------------
+#
+# Define NCO environment variables and set COM type definitions.
+#
+#-----------------------------------------------------------------------
+#
+export NET="${NET:-${NET_default}}"
+export RUN="${RUN:-${RUN_default}}"
+
+[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
+if [ "${MACHINE}" = "WCOSS2" ]; then
+ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+ export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+else
+ export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+ export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
fi
+
+mkdir -p ${COMOUT}
+
+# Create a teomporary share directory
+export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}"
+mkdir -p ${DATA_SHARE}
+
+# Run setpdy to initialize PDYm and PDYp variables
+export cycle="${cycle:-t${cyc}z}"
+setpdy.sh
+. ./PDY
#
#-----------------------------------------------------------------------
#
-# Call the ex-script for this J-job.
+# Set sub-cycle and ensemble member names in file/diectory names
#
#-----------------------------------------------------------------------
#
-$SCRIPTSdir/exregional_nexus_emission.sh || \
-print_err_msg_exit "\
-Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
+if [ ${subcyc} -ne 0 ]; then
+ export cycle="t${cyc}${subcyc}z"
+fi
+if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+ export dot_ensmem=".mem${ENSMEM_INDX}"
+else
+ export dot_ensmem=
+fi
#
#-----------------------------------------------------------------------
#
-# Run job postamble.
+# Call the ex-script for this J-job.
#
#-----------------------------------------------------------------------
#
-job_postamble
+export pgmout="${DATA}/OUTPUT.$$"
+env
+
+${SCRIPTSsrw}/exsrw_nexus_emission.sh
+export err=$?; err_chk
+
+if [ -e "$pgmout" ]; then
+ cat $pgmout
+fi
#
#-----------------------------------------------------------------------
#
-# Restore the shell options saved at the beginning of this script/func-
-# tion.
+# Whether or not working directory DATA should be kept.
#
#-----------------------------------------------------------------------
#
-{ restore_shell_opts; } > /dev/null 2>&1
+if [ "${KEEPDATA}" = "NO" ]; then
+ rm -rf ${DATA}
+fi
+date
diff --git a/jobs/JREGIONAL_NEXUS_GFS_SFC b/jobs/JSRW_NEXUS_GFS_SFC
similarity index 51%
rename from jobs/JREGIONAL_NEXUS_GFS_SFC
rename to jobs/JSRW_NEXUS_GFS_SFC
index 5fc05f86db..89d84c740d 100755
--- a/jobs/JREGIONAL_NEXUS_GFS_SFC
+++ b/jobs/JSRW_NEXUS_GFS_SFC
@@ -7,26 +7,30 @@
#
#-----------------------------------------------------------------------
#
-
+date
+export PS4='+ $SECONDS + '
+set -xue
#
#-----------------------------------------------------------------------
#
-# Source the variable definitions file and the bash utility functions.
+# Set the NCO standard environment variables (Table 1, pp.4)
#
#-----------------------------------------------------------------------
#
-. $USHdir/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP}
-. $USHdir/job_preamble.sh "TRUE"
+export USHsrw="${HOMEdir}/ush"
+export EXECsrw="${HOMEdir}/exec"
+export PARMsrw="${HOMEdir}/parm"
+export SCRIPTSsrw="${HOMEdir}/scripts"
#
#-----------------------------------------------------------------------
#
-# Save current shell options (in a global array). Then set new options
-# for this script/function.
+# Source the variable definitions file and the bash utility functions.
#
#-----------------------------------------------------------------------
#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+export USHdir="${USHsrw}" # should be removed later
+. ${USHsrw}/source_util_funcs.sh
+source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
#
@@ -57,15 +61,102 @@ data files from disk, or HPSS.
#
#-----------------------------------------------------------------------
#
+# Define job and jobid by default for rocoto
+#
+#-----------------------------------------------------------------------
+#
+WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}"
+if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then
+ if [ "${SCHED}" = "slurm" ]; then
+ job=${SLURM_JOB_NAME}
+ pid=${SLURM_JOB_ID}
+ elif [ "${SCHED}" = "pbspro" ]; then
+ job=${PBS_JOBNAME}
+ pid=${PBS_JOBID}
+ else
+ job="task"
+ pid=$$
+ fi
+ jobid="${job}.${PDY}${cyc}.${pid}"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Create a temp working directory (DATA) and cd into it.
+#
+#-----------------------------------------------------------------------
+#
+export DATA="${DATA:-${DATAROOT}/${jobid}}"
+mkdir -p $DATA
+cd $DATA
+#
+#-----------------------------------------------------------------------
+#
+# Define NCO environment variables and set COM type definitions.
+#
+#-----------------------------------------------------------------------
+#
+export NET="${NET:-${NET_default}}"
+export RUN="${RUN:-${RUN_default}}"
+
+[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
+if [ "${MACHINE}" = "WCOSS2" ]; then
+ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+ export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+else
+ export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+ export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+fi
+
+mkdir -p ${COMOUT}
+
+# Create a teomporary share directory
+export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}"
+mkdir -p ${DATA_SHARE}
+
+# Run setpdy to initialize PDYm and PDYp variables
+export cycle="${cycle:-t${cyc}z}"
+setpdy.sh
+. ./PDY
+#
+#-----------------------------------------------------------------------
+#
+# Set sub-cycle and ensemble member names in file/diectory names
+#
+#-----------------------------------------------------------------------
+#
+if [ ${subcyc} -ne 0 ]; then
+ export cycle="t${cyc}${subcyc}z"
+fi
+if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+ export dot_ensmem=".mem${ENSMEM_INDX}"
+else
+ export dot_ensmem=
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Set sub-cycle and ensemble member names in file/diectory names
+#
+#-----------------------------------------------------------------------
+#
+if [ ${subcyc} -ne 0 ]; then
+ export cycle="t${cyc}${subcyc}z"
+fi
+if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+ export dot_ensmem=".mem${ENSMEM_INDX}"
+else
+ export dot_ensmem=
+fi
+#
+#-----------------------------------------------------------------------
+#
# Set the external model start time
#
#-----------------------------------------------------------------------
#
export TIME_OFFSET_HRS=${NEXUS_GFS_SFC_OFFSET_HRS:-0}
-yyyymmdd=${PDY}
-hh=${cyc}
-export GFS_SFC_CDATE=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - ${TIME_OFFSET_HRS} hours" "+%Y%m%d%H" )
-
+export GFS_SFC_CDATE=`$NDATE -${TIME_OFFSET_HRS} ${PDY}${cyc}`
#
#-----------------------------------------------------------------------
#
@@ -91,40 +182,28 @@ fi
#
#-----------------------------------------------------------------------
#
-# Set the run directory
+# Call the ex-script for this J-job.
#
#-----------------------------------------------------------------------
#
-if [ "${RUN_ENVIR}" = "community" ]; then
- DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_NEXUS_GFS_SFC}"
- check_for_preexist_dir_file "$DATA" "delete"
- mkdir_vrfy -p $DATA
- cd_vrfy $DATA
+export pgmout="${DATA}/OUTPUT.$$"
+env
+
+$SCRIPTSsrw/exsrw_nexus_gfs_sfc.sh
+export err=$?; err_chk
+
+if [ -e "$pgmout" ]; then
+ cat $pgmout
fi
#
#-----------------------------------------------------------------------
#
-# Call the ex-script for this J-job and pass to it the necessary variables.
-#
-#-----------------------------------------------------------------------
-#
-$SCRIPTSdir/exregional_nexus_gfs_sfc.sh || \
-print_err_msg_exit "\
-Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
-#
-#-----------------------------------------------------------------------
-#
-# Run job postamble.
+# Whether or not working directory DATA should be kept.
#
#-----------------------------------------------------------------------
#
-job_postamble "FALSE"
-#
-#-----------------------------------------------------------------------
-#
-# Restore the shell options saved at the beginning of this script/function.
-#
-#-----------------------------------------------------------------------
-#
-{ restore_shell_opts; } > /dev/null 2>&1
+if [ "${KEEPDATA}" = "NO" ]; then
+ rm -rf ${DATA}
+fi
+date
diff --git a/jobs/JSRW_NEXUS_POST_SPLIT b/jobs/JSRW_NEXUS_POST_SPLIT
new file mode 100755
index 0000000000..6e5a0a259a
--- /dev/null
+++ b/jobs/JSRW_NEXUS_POST_SPLIT
@@ -0,0 +1,163 @@
+#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# This script generate final NEXUS emission netcdf file.
+#
+#-----------------------------------------------------------------------
+#
+date
+export PS4='+ $SECONDS + '
+set -xue
+#
+#-----------------------------------------------------------------------
+#
+# Set the NCO standard environment variables (Table 1, pp.4)
+#
+#-----------------------------------------------------------------------
+#
+export USHsrw="${HOMEdir}/ush"
+export EXECsrw="${HOMEdir}/exec"
+export PARMsrw="${HOMEdir}/parm"
+export SCRIPTSsrw="${HOMEdir}/scripts"
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+export USHdir="${USHsrw}" # should be removed later
+. ${USHsrw}/source_util_funcs.sh
+source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP}
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the J-job script for the task that generates the emission files
+using NEXUS which will output for FV3 (in NetCDF format).
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Define job and jobid by default for rocoto
+#
+#-----------------------------------------------------------------------
+#
+WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}"
+if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then
+ if [ "${SCHED}" = "slurm" ]; then
+ job=${SLURM_JOB_NAME}
+ pid=${SLURM_JOB_ID}
+ elif [ "${SCHED}" = "pbspro" ]; then
+ job=${PBS_JOBNAME}
+ pid=${PBS_JOBID}
+ else
+ job="task"
+ pid=$$
+ fi
+ jobid="${job}.${PDY}${cyc}.${pid}"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Create a temp working directory (DATA) and cd into it.
+#
+#-----------------------------------------------------------------------
+#
+export DATA="${DATA:-${DATAROOT}/${jobid}}"
+mkdir -p $DATA
+cd $DATA
+#
+#-----------------------------------------------------------------------
+#
+# Define NCO environment variables and set COM type definitions.
+#
+#-----------------------------------------------------------------------
+#
+export NET="${NET:-${NET_default}}"
+export RUN="${RUN:-${RUN_default}}"
+
+[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
+if [ "${MACHINE}" = "WCOSS2" ]; then
+ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+ export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+else
+ export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+ export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+fi
+
+mkdir -p ${COMOUT}
+
+# Create a teomporary share directory
+export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}"
+mkdir -p ${DATA_SHARE}
+
+# Run setpdy to initialize PDYm and PDYp variables
+export cycle="${cycle:-t${cyc}z}"
+setpdy.sh
+. ./PDY
+#
+#-----------------------------------------------------------------------
+#
+# Set sub-cycle and ensemble member names in file/diectory names
+#
+#-----------------------------------------------------------------------
+#
+if [ ${subcyc} -ne 0 ]; then
+ export cycle="t${cyc}${subcyc}z"
+fi
+if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+ export dot_ensmem=".mem${ENSMEM_INDX}"
+else
+ export dot_ensmem=
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Call the ex-script for this J-job.
+#
+#-----------------------------------------------------------------------
+#
+export pgmout="${DATA}/OUTPUT.$$"
+env
+
+${SCRIPTSsrw}/exsrw_nexus_post_split.sh
+export err=$?; err_chk
+
+if [ -e "$pgmout" ]; then
+ cat $pgmout
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Whether or not working directory DATA should be kept.
+#
+#-----------------------------------------------------------------------
+#
+if [ "${KEEPDATA}" = "NO" ]; then
+ rm -rf ${DATA}
+fi
+date
+
diff --git a/jobs/JSRW_POINT_SOURCE b/jobs/JSRW_POINT_SOURCE
new file mode 100755
index 0000000000..a112a2d275
--- /dev/null
+++ b/jobs/JSRW_POINT_SOURCE
@@ -0,0 +1,159 @@
+#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# This script generate POINT SOURCE EMISSION file.
+#
+#-----------------------------------------------------------------------
+#
+date
+export PS4='+ $SECONDS + '
+set -xue
+#
+#-----------------------------------------------------------------------
+#
+# Set the NCO standard environment variables (Table 1, pp.4)
+#
+#-----------------------------------------------------------------------
+#
+export USHsrw="${HOMEdir}/ush"
+export EXECsrw="${HOMEdir}/exec"
+export PARMsrw="${HOMEdir}/parm"
+export SCRIPTSsrw="${HOMEdir}/scripts"
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+export USHdir="${USHsrw}" # should be removed later
+. ${USHsrw}/source_util_funcs.sh
+source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP}
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the J-job script for the task that generates the point source files.
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Define job and jobid by default for rocoto
+#
+#-----------------------------------------------------------------------
+#
+WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}"
+if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then
+ if [ "${SCHED}" = "slurm" ]; then
+ job=${SLURM_JOB_NAME}
+ pid=${SLURM_JOB_ID}
+ elif [ "${SCHED}" = "pbspro" ]; then
+ job=${PBS_JOBNAME}
+ pid=${PBS_JOBID}
+ else
+ job="task"
+ pid=$$
+ fi
+ jobid="${job}.${PDY}${cyc}.${pid}"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Create a temp working directory (DATA) and cd into it.
+#
+#-----------------------------------------------------------------------
+#
+export DATA="${DATA:-${DATAROOT}/${jobid}}"
+mkdir -p $DATA
+cd $DATA
+#
+#-----------------------------------------------------------------------
+#
+# Define NCO environment variables and set COM type definitions.
+#
+#-----------------------------------------------------------------------
+#
+export NET="${NET:-${NET_default}}"
+export RUN="${RUN:-${RUN_default}}"
+
+[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
+if [ "${MACHINE}" = "WCOSS2" ]; then
+ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+ export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+else
+ export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+ export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+fi
+
+mkdir -p ${COMOUT}
+
+# Run setpdy to initialize PDYm and PDYp variables
+export cycle="${cycle:-t${cyc}z}"
+setpdy.sh
+. ./PDY
+#
+#-----------------------------------------------------------------------
+#
+# Set sub-cycle and ensemble member names in file/diectory names
+#
+#-----------------------------------------------------------------------
+#
+if [ ${subcyc} -ne 0 ]; then
+ export cycle="t${cyc}${subcyc}z"
+fi
+if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+ export dot_ensmem=".mem${ENSMEM_INDX}"
+else
+ export dot_ensmem=
+fi
+#
+#
+#-----------------------------------------------------------------------
+#
+# Call the ex-script for this J-job.
+#
+#-----------------------------------------------------------------------
+#
+export pgmout="${DATA}/OUTPUT.$$"
+env
+
+$SCRIPTSsrw/exsrw_point_source.sh
+export err=$?; err_chk
+
+if [ -e "$pgmout" ]; then
+ cat $pgmout
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Whether or not working directory DATA should be kept.
+#
+#-----------------------------------------------------------------------
+#
+if [ "${KEEPDATA}" = "NO" ]; then
+ rm -rf ${DATA}
+fi
+date
+
diff --git a/jobs/JSRW_POST_STAT_O3 b/jobs/JSRW_POST_STAT_O3
new file mode 100755
index 0000000000..8924cba9e5
--- /dev/null
+++ b/jobs/JSRW_POST_STAT_O3
@@ -0,0 +1,160 @@
+#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# This script runs POST-STAT-O3.
+#
+#-----------------------------------------------------------------------
+#
+date
+export PS4='+ $SECONDS + '
+set -xue
+#
+#-----------------------------------------------------------------------
+#
+# Set the NCO standard environment variables (Table 1, pp.4)
+#
+#-----------------------------------------------------------------------
+#
+export USHsrw="${HOMEdir}/ush"
+export EXECsrw="${HOMEdir}/exec"
+export PARMsrw="${HOMEdir}/parm"
+export SCRIPTSsrw="${HOMEdir}/scripts"
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+export USHdir="${USHsrw}" # should be removed later
+. ${USHsrw}/source_util_funcs.sh
+source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP}
+. $USHdir/job_preamble.sh
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the J-job script for the task that runs POST-STAT-O3.
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Define job and jobid by default for rocoto
+#
+#-----------------------------------------------------------------------
+#
+WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}"
+if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then
+ if [ "${SCHED}" = "slurm" ]; then
+ job=${SLURM_JOB_NAME}
+ pid=${SLURM_JOB_ID}
+ elif [ "${SCHED}" = "pbspro" ]; then
+ job=${PBS_JOBNAME}
+ pid=${PBS_JOBID}
+ else
+ job="task"
+ pid=$$
+ fi
+ jobid="${job}.${PDY}${cyc}.${pid}"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Create a temp working directory (DATA) and cd into it.
+#
+#-----------------------------------------------------------------------
+#
+export DATA="${DATA:-${DATAROOT}/${jobid}}"
+mkdir -p $DATA
+cd $DATA
+#
+#-----------------------------------------------------------------------
+#
+# Define NCO environment variables and set COM type definitions.
+#
+#-----------------------------------------------------------------------
+#
+export NET="${NET:-${NET_default}}"
+export RUN="${RUN:-${RUN_default}}"
+
+[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
+if [ "${MACHINE}" = "WCOSS2" ]; then
+ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+ export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+else
+ export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+ export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+fi
+
+mkdir -p ${COMOUT}
+
+# Run setpdy to initialize PDYm and PDYp variables
+export cycle="${cycle:-t${cyc}z}"
+setpdy.sh
+. ./PDY
+#
+#-----------------------------------------------------------------------
+#
+# Set sub-cycle and ensemble member names in file/diectory names
+#
+#-----------------------------------------------------------------------
+#
+if [ ${subcyc} -ne 0 ]; then
+ export cycle="t${cyc}${subcyc}z"
+fi
+if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+ export dot_ensmem=".mem${ENSMEM_INDX}"
+else
+ export dot_ensmem=
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Call the ex-script for this J-job.
+#
+#-----------------------------------------------------------------------
+#
+export pgmout="${DATA}/OUTPUT.$$"
+env
+
+${SCRIPTSsrw}/exsrw_post_stat_o3.sh
+export err=$?; err_chk
+
+if [ -e "$pgmout" ]; then
+ cat $pgmout
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Whether or not working directory DATA should be kept.
+#
+#-----------------------------------------------------------------------
+#
+if [ "${KEEPDATA}" = "NO" ]; then
+ rm -rf ${DATA}
+fi
+date
+
+
diff --git a/jobs/JSRW_POST_STAT_PM25 b/jobs/JSRW_POST_STAT_PM25
new file mode 100755
index 0000000000..83434fa8c7
--- /dev/null
+++ b/jobs/JSRW_POST_STAT_PM25
@@ -0,0 +1,158 @@
+#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# This script runs POST-STAT-PM25.
+#
+#-----------------------------------------------------------------------
+#
+date
+export PS4='+ $SECONDS + '
+set -xue
+#
+#-----------------------------------------------------------------------
+#
+# Set the NCO standard environment variables (Table 1, pp.4)
+#
+#-----------------------------------------------------------------------
+#
+export USHsrw="${HOMEdir}/ush"
+export EXECsrw="${HOMEdir}/exec"
+export PARMsrw="${HOMEdir}/parm"
+export SCRIPTSsrw="${HOMEdir}/scripts"
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+export USHdir="${USHsrw}" # should be removed later
+. ${USHsrw}/source_util_funcs.sh
+source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP}
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the J-job script for the task that runs POST-UPP-STAT.
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Define job and jobid by default for rocoto
+#
+#-----------------------------------------------------------------------
+#
+WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}"
+if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then
+ if [ "${SCHED}" = "slurm" ]; then
+ job=${SLURM_JOB_NAME}
+ pid=${SLURM_JOB_ID}
+ elif [ "${SCHED}" = "pbspro" ]; then
+ job=${PBS_JOBNAME}
+ pid=${PBS_JOBID}
+ else
+ job="task"
+ pid=$$
+ fi
+ jobid="${job}.${PDY}${cyc}.${pid}"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Create a temp working directory (DATA) and cd into it.
+#
+#-----------------------------------------------------------------------
+#
+export DATA="${DATA:-${DATAROOT}/${jobid}}"
+mkdir -p $DATA
+cd $DATA
+#
+#-----------------------------------------------------------------------
+#
+# Define NCO environment variables and set COM type definitions.
+#
+#-----------------------------------------------------------------------
+#
+export NET="${NET:-${NET_default}}"
+export RUN="${RUN:-${RUN_default}}"
+
+[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
+if [ "${MACHINE}" = "WCOSS2" ]; then
+ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+ export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+else
+ export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+ export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+fi
+
+mkdir -p ${COMOUT}
+
+# Run setpdy to initialize PDYm and PDYp variables
+export cycle="${cycle:-t${cyc}z}"
+setpdy.sh
+. ./PDY
+#
+#-----------------------------------------------------------------------
+#
+# Set sub-cycle and ensemble member names in file/diectory names
+#
+#-----------------------------------------------------------------------
+#
+if [ ${subcyc} -ne 0 ]; then
+ export cycle="t${cyc}${subcyc}z"
+fi
+if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+ export dot_ensmem=".mem${ENSMEM_INDX}"
+else
+ export dot_ensmem=
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Call the ex-script for this J-job.
+#
+#-----------------------------------------------------------------------
+#
+export pgmout="${DATA}/OUTPUT.$$"
+env
+
+${SCRIPTSsrw}/exsrw_post_stat_pm25.sh
+export err=$?; err_chk
+
+if [ -e "$pgmout" ]; then
+ cat $pgmout
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Whether or not working directory DATA should be kept.
+#
+#-----------------------------------------------------------------------
+#
+if [ "${KEEPDATA}" = "NO" ]; then
+ rm -rf ${DATA}
+fi
+date
+
diff --git a/jobs/JSRW_PRE_POST_STAT b/jobs/JSRW_PRE_POST_STAT
new file mode 100755
index 0000000000..12561085c2
--- /dev/null
+++ b/jobs/JSRW_PRE_POST_STAT
@@ -0,0 +1,173 @@
+#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# This script runs PRE-POST-STAT.
+#
+#-----------------------------------------------------------------------
+#
+date
+export PS4='+ $SECONDS + '
+set -xue
+#
+#-----------------------------------------------------------------------
+#
+# Set the NCO standard environment variables (Table 1, pp.4)
+#
+#-----------------------------------------------------------------------
+#
+export USHsrw="${HOMEdir}/ush"
+export EXECsrw="${HOMEdir}/exec"
+export PARMsrw="${HOMEdir}/parm"
+export SCRIPTSsrw="${HOMEdir}/scripts"
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+export USHdir="${USHsrw}" # should be removed later
+. ${USHsrw}/source_util_funcs.sh
+source_config_for_task "task_pre_post_stat" ${GLOBAL_VAR_DEFNS_FP}
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the J-job script for the task that runs PRE-POST-STAT.
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Define job and jobid by default for rocoto
+#
+#-----------------------------------------------------------------------
+#
+WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}"
+if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then
+ if [ "${SCHED}" = "slurm" ]; then
+ job=${SLURM_JOB_NAME}
+ pid=${SLURM_JOB_ID}
+ elif [ "${SCHED}" = "pbspro" ]; then
+ job=${PBS_JOBNAME}
+ pid=${PBS_JOBID}
+ else
+ job="task"
+ pid=$$
+ fi
+ jobid="${job}.${PDY}${cyc}.${pid}"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Create a temp working directory (DATA) and cd into it.
+#
+#-----------------------------------------------------------------------
+#
+export DATA="${DATA:-${DATAROOT}/${jobid}}"
+mkdir -p $DATA
+cd $DATA
+#
+#-----------------------------------------------------------------------
+#
+# Define NCO environment variables and set COM type definitions.
+#
+#-----------------------------------------------------------------------
+#
+export NET="${NET:-${NET_default}}"
+export RUN="${RUN:-${RUN_default}}"
+
+[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
+if [ "${MACHINE}" = "WCOSS2" ]; then
+ export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+ export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}"
+else
+ export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+ export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}"
+fi
+
+mkdir -p ${COMOUT}
+
+# Create a teomporary share directory
+export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}"
+mkdir -p ${DATA_SHARE}
+
+# Run setpdy to initialize PDYm and PDYp variables
+export cycle="${cycle:-t${cyc}z}"
+setpdy.sh
+. ./PDY
+#
+#-----------------------------------------------------------------------
+#
+# Set sub-cycle and ensemble member names in file/diectory names
+#
+#-----------------------------------------------------------------------
+#
+if [ ${subcyc} -ne 0 ]; then
+ export cycle="t${cyc}${subcyc}z"
+fi
+if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then
+ export dot_ensmem=".mem${ENSMEM_INDX}"
+else
+ export dot_ensmem=
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Call the ex-script for this J-job.
+#
+#-----------------------------------------------------------------------
+#
+export pgmout="${DATA}/OUTPUT.$$"
+env
+
+${SCRIPTSsrw}/exsrw_pre_post_stat.sh
+export err=$?; err_chk
+
+if [ -e "$pgmout" ]; then
+ cat $pgmout
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Remove post_complete flag file.
+#
+#-----------------------------------------------------------------------
+#
+post_complete_file="${DATA_SHARE}/post_${PDY}${cyc}_task_complete.txt"
+if [ -f ${post_complete_file} ] ; then
+ rm -f ${post_complete_file}
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Whether or not working directory DATA should be kept.
+#
+#-----------------------------------------------------------------------
+#
+if [ "${KEEPDATA}" = "NO" ]; then
+ rm -rf ${DATA}
+fi
+date
+
diff --git a/modulefiles/build_hera_intel.lua b/modulefiles/build_hera_intel.lua
index 2121d303dc..061feef67b 100644
--- a/modulefiles/build_hera_intel.lua
+++ b/modulefiles/build_hera_intel.lua
@@ -27,6 +27,7 @@ load("srw_common")
load(pathJoin("nccmp", os.getenv("nccmp_ver") or "1.9.0.1"))
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
+load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2"))
setenv("CMAKE_C_COMPILER","mpiicc")
setenv("CMAKE_CXX_COMPILER","mpiicpc")
diff --git a/modulefiles/python_srw_cmaq.lua b/modulefiles/python_srw_aqm.lua
similarity index 100%
rename from modulefiles/python_srw_cmaq.lua
rename to modulefiles/python_srw_aqm.lua
diff --git a/modulefiles/tasks/cheyenne/aqm_ics.local.lua b/modulefiles/tasks/cheyenne/aqm_ics.local.lua
index 1040aab9a6..9c9f0ca3d5 100644
--- a/modulefiles/tasks/cheyenne/aqm_ics.local.lua
+++ b/modulefiles/tasks/cheyenne/aqm_ics.local.lua
@@ -1,3 +1,3 @@
load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0"))
load("nco/4.9.5")
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua b/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua
index 1040aab9a6..9c9f0ca3d5 100644
--- a/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua
+++ b/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua
@@ -1,3 +1,3 @@
load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0"))
load("nco/4.9.5")
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/cheyenne/fire_emission.local.lua b/modulefiles/tasks/cheyenne/fire_emission.local.lua
index b62670156f..86252a9a4f 100644
--- a/modulefiles/tasks/cheyenne/fire_emission.local.lua
+++ b/modulefiles/tasks/cheyenne/fire_emission.local.lua
@@ -1,2 +1,2 @@
load("ncarenv")
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/cheyenne/nexus_emission.local.lua b/modulefiles/tasks/cheyenne/nexus_emission.local.lua
index c46ead59a9..3c690fa12a 100644
--- a/modulefiles/tasks/cheyenne/nexus_emission.local.lua
+++ b/modulefiles/tasks/cheyenne/nexus_emission.local.lua
@@ -2,4 +2,4 @@ load("nco/4.9.5")
load("mpt/2.25")
load("ncarenv")
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua b/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua
index b62670156f..86252a9a4f 100644
--- a/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua
+++ b/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua
@@ -1,2 +1,2 @@
load("ncarenv")
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/cheyenne/nexus_post_split.local.lua b/modulefiles/tasks/cheyenne/nexus_post_split.local.lua
index c957eff552..e3f4bbe95d 100644
--- a/modulefiles/tasks/cheyenne/nexus_post_split.local.lua
+++ b/modulefiles/tasks/cheyenne/nexus_post_split.local.lua
@@ -1,3 +1,3 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.5"))
load("ncarenv")
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/cheyenne/point_source.local.lua b/modulefiles/tasks/cheyenne/point_source.local.lua
index b62670156f..86252a9a4f 100644
--- a/modulefiles/tasks/cheyenne/point_source.local.lua
+++ b/modulefiles/tasks/cheyenne/point_source.local.lua
@@ -1,2 +1,2 @@
load("ncarenv")
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/cheyenne/pre_post_stat.local.lua b/modulefiles/tasks/cheyenne/pre_post_stat.local.lua
index 7dcdc5969b..042eb2f732 100644
--- a/modulefiles/tasks/cheyenne/pre_post_stat.local.lua
+++ b/modulefiles/tasks/cheyenne/pre_post_stat.local.lua
@@ -1,2 +1,2 @@
load("nco/4.9.5")
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/derecho/aqm_ics.local.lua b/modulefiles/tasks/derecho/aqm_ics.local.lua
index 26b28db2c5..30f1157fbb 100644
--- a/modulefiles/tasks/derecho/aqm_ics.local.lua
+++ b/modulefiles/tasks/derecho/aqm_ics.local.lua
@@ -1,2 +1,2 @@
load("nco/5.0.6")
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/derecho/aqm_lbcs.local.lua b/modulefiles/tasks/derecho/aqm_lbcs.local.lua
index 26b28db2c5..30f1157fbb 100644
--- a/modulefiles/tasks/derecho/aqm_lbcs.local.lua
+++ b/modulefiles/tasks/derecho/aqm_lbcs.local.lua
@@ -1,2 +1,2 @@
load("nco/5.0.6")
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/derecho/fire_emission.local.lua b/modulefiles/tasks/derecho/fire_emission.local.lua
index b62670156f..86252a9a4f 100644
--- a/modulefiles/tasks/derecho/fire_emission.local.lua
+++ b/modulefiles/tasks/derecho/fire_emission.local.lua
@@ -1,2 +1,2 @@
load("ncarenv")
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/derecho/nexus_emission.local.lua b/modulefiles/tasks/derecho/nexus_emission.local.lua
index 09f38a17dd..e7f216375c 100644
--- a/modulefiles/tasks/derecho/nexus_emission.local.lua
+++ b/modulefiles/tasks/derecho/nexus_emission.local.lua
@@ -1,4 +1,4 @@
load("nco/5.0.6")
load("ncarenv")
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua b/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua
index b62670156f..86252a9a4f 100644
--- a/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua
+++ b/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua
@@ -1,2 +1,2 @@
load("ncarenv")
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/derecho/nexus_post_split.local.lua b/modulefiles/tasks/derecho/nexus_post_split.local.lua
index a03758c9c6..07d126ff0b 100644
--- a/modulefiles/tasks/derecho/nexus_post_split.local.lua
+++ b/modulefiles/tasks/derecho/nexus_post_split.local.lua
@@ -1,3 +1,3 @@
load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6"))
load("ncarenv")
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/derecho/point_source.local.lua b/modulefiles/tasks/derecho/point_source.local.lua
index b62670156f..86252a9a4f 100644
--- a/modulefiles/tasks/derecho/point_source.local.lua
+++ b/modulefiles/tasks/derecho/point_source.local.lua
@@ -1,2 +1,2 @@
load("ncarenv")
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/derecho/pre_post_stat.local.lua b/modulefiles/tasks/derecho/pre_post_stat.local.lua
index 26b28db2c5..30f1157fbb 100644
--- a/modulefiles/tasks/derecho/pre_post_stat.local.lua
+++ b/modulefiles/tasks/derecho/pre_post_stat.local.lua
@@ -1,2 +1,2 @@
load("nco/5.0.6")
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/hera/aqm_ics.local.lua b/modulefiles/tasks/hera/aqm_ics.local.lua
index 0e7132d749..2eb2ea2ee0 100644
--- a/modulefiles/tasks/hera/aqm_ics.local.lua
+++ b/modulefiles/tasks/hera/aqm_ics.local.lua
@@ -1,2 +1,2 @@
-load("python_srw_cmaq")
-load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
+load("python_srw_aqm")
+load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6"))
diff --git a/modulefiles/tasks/hera/aqm_lbcs.local.lua b/modulefiles/tasks/hera/aqm_lbcs.local.lua
index 0e7132d749..2eb2ea2ee0 100644
--- a/modulefiles/tasks/hera/aqm_lbcs.local.lua
+++ b/modulefiles/tasks/hera/aqm_lbcs.local.lua
@@ -1,2 +1,2 @@
-load("python_srw_cmaq")
-load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
+load("python_srw_aqm")
+load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6"))
diff --git a/modulefiles/tasks/hera/fire_emission.local.lua b/modulefiles/tasks/hera/fire_emission.local.lua
index 8aa737aa65..68d6f14832 100644
--- a/modulefiles/tasks/hera/fire_emission.local.lua
+++ b/modulefiles/tasks/hera/fire_emission.local.lua
@@ -1,3 +1,3 @@
load("hpss")
-load("python_srw_cmaq")
-load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
+load("python_srw_aqm")
+load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6"))
diff --git a/modulefiles/tasks/hera/nexus_emission.local.lua b/modulefiles/tasks/hera/nexus_emission.local.lua
index c7ac9dcb90..d1f95e6d31 100644
--- a/modulefiles/tasks/hera/nexus_emission.local.lua
+++ b/modulefiles/tasks/hera/nexus_emission.local.lua
@@ -1,2 +1,2 @@
-load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("python_srw_cmaq")
+load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6"))
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/hera/nexus_post_split.local.lua b/modulefiles/tasks/hera/nexus_post_split.local.lua
index 0e7132d749..2eb2ea2ee0 100644
--- a/modulefiles/tasks/hera/nexus_post_split.local.lua
+++ b/modulefiles/tasks/hera/nexus_post_split.local.lua
@@ -1,2 +1,2 @@
-load("python_srw_cmaq")
-load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
+load("python_srw_aqm")
+load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6"))
diff --git a/modulefiles/tasks/hera/point_source.local.lua b/modulefiles/tasks/hera/point_source.local.lua
index 89feda226c..df0e35d5da 100644
--- a/modulefiles/tasks/hera/point_source.local.lua
+++ b/modulefiles/tasks/hera/point_source.local.lua
@@ -1 +1 @@
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/hera/pre_post_stat.local.lua b/modulefiles/tasks/hera/pre_post_stat.local.lua
index 23370a8d60..ede4c61606 100644
--- a/modulefiles/tasks/hera/pre_post_stat.local.lua
+++ b/modulefiles/tasks/hera/pre_post_stat.local.lua
@@ -1 +1 @@
-load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
+load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6"))
diff --git a/modulefiles/tasks/hercules/aqm_ics.local.lua b/modulefiles/tasks/hercules/aqm_ics.local.lua
index c7ac9dcb90..2aac950d8d 100644
--- a/modulefiles/tasks/hercules/aqm_ics.local.lua
+++ b/modulefiles/tasks/hercules/aqm_ics.local.lua
@@ -1,2 +1,2 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/hercules/fire_emission.local.lua b/modulefiles/tasks/hercules/fire_emission.local.lua
index c7ac9dcb90..2aac950d8d 100644
--- a/modulefiles/tasks/hercules/fire_emission.local.lua
+++ b/modulefiles/tasks/hercules/fire_emission.local.lua
@@ -1,2 +1,2 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/hercules/nexus_emission.local.lua b/modulefiles/tasks/hercules/nexus_emission.local.lua
index c7ac9dcb90..2aac950d8d 100644
--- a/modulefiles/tasks/hercules/nexus_emission.local.lua
+++ b/modulefiles/tasks/hercules/nexus_emission.local.lua
@@ -1,2 +1,2 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/hercules/nexus_post_split.local.lua b/modulefiles/tasks/hercules/nexus_post_split.local.lua
index c7ac9dcb90..2aac950d8d 100644
--- a/modulefiles/tasks/hercules/nexus_post_split.local.lua
+++ b/modulefiles/tasks/hercules/nexus_post_split.local.lua
@@ -1,2 +1,2 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/hercules/point_source.local.lua b/modulefiles/tasks/hercules/point_source.local.lua
index 89feda226c..df0e35d5da 100644
--- a/modulefiles/tasks/hercules/point_source.local.lua
+++ b/modulefiles/tasks/hercules/point_source.local.lua
@@ -1 +1 @@
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/orion/aqm_ics.local.lua b/modulefiles/tasks/orion/aqm_ics.local.lua
index c7ac9dcb90..2aac950d8d 100644
--- a/modulefiles/tasks/orion/aqm_ics.local.lua
+++ b/modulefiles/tasks/orion/aqm_ics.local.lua
@@ -1,2 +1,2 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/orion/fire_emission.local.lua b/modulefiles/tasks/orion/fire_emission.local.lua
index c7ac9dcb90..2aac950d8d 100644
--- a/modulefiles/tasks/orion/fire_emission.local.lua
+++ b/modulefiles/tasks/orion/fire_emission.local.lua
@@ -1,2 +1,2 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/orion/nexus_emission.local.lua b/modulefiles/tasks/orion/nexus_emission.local.lua
index c7ac9dcb90..2aac950d8d 100644
--- a/modulefiles/tasks/orion/nexus_emission.local.lua
+++ b/modulefiles/tasks/orion/nexus_emission.local.lua
@@ -1,2 +1,2 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/orion/nexus_post_split.local.lua b/modulefiles/tasks/orion/nexus_post_split.local.lua
index c7ac9dcb90..2aac950d8d 100644
--- a/modulefiles/tasks/orion/nexus_post_split.local.lua
+++ b/modulefiles/tasks/orion/nexus_post_split.local.lua
@@ -1,2 +1,2 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/modulefiles/tasks/orion/point_source.local.lua b/modulefiles/tasks/orion/point_source.local.lua
index 89feda226c..df0e35d5da 100644
--- a/modulefiles/tasks/orion/point_source.local.lua
+++ b/modulefiles/tasks/orion/point_source.local.lua
@@ -1 +1 @@
-load("python_srw_cmaq")
+load("python_srw_aqm")
diff --git a/parm/aqm.rc b/parm/aqm.rc
index 3d2ad32711..4ffaf5095e 100644
--- a/parm/aqm.rc
+++ b/parm/aqm.rc
@@ -7,14 +7,14 @@
#
# General settings
#
-ae_matrix_nml: {{ aqm_config_dir }}/AE_cb6r3_ae6_aq.nml
-gc_matrix_nml: {{ aqm_config_dir }}/GC_cb6r3_ae6_aq.nml
-nr_matrix_nml: {{ aqm_config_dir }}/NR_cb6r3_ae6_aq.nml
-tr_matrix_nml: {{ aqm_config_dir }}/Species_Table_TR_0.nml
+ae_matrix_nml: {{ fixaqm }}/epa/AE_cb6r3_ae6_aq.nml
+gc_matrix_nml: {{ fixaqm }}/epa/GC_cb6r3_ae6_aq.nml
+nr_matrix_nml: {{ fixaqm }}/epa/NR_cb6r3_ae6_aq.nml
+tr_matrix_nml: {{ fixaqm }}/epa/Species_Table_TR_0.nml
-csqy_data: {{ aqm_config_dir }}/CSQY_DATA_cb6r3_ae6_aq
-optics_data: {{ aqm_config_dir }}/PHOT_OPTICS.dat
-omi_data: {{ aqm_config_dir }}/omi_cmaq_2015_361X179.dat
+csqy_data: {{ fixaqm }}/epa/CSQY_DATA_cb6r3_ae6_aq
+optics_data: {{ fixaqm }}/epa/PHOT_OPTICS.dat
+omi_data: {{ fixaqm }}/epa/omi_cmaq_2015_361X179.dat
init_concentrations: {{ init_concentrations | lower }}
@@ -172,7 +172,7 @@ bio_format: netcdf
bio_file: {{ aqm_rc_bio_file_fp }}
bio_frequency: static
bio_period: summer
-bio_speciation_file: {{ dcominbio }}/gspro_biogenics_1mar2017.txt
+bio_speciation_file: {{ fixaqm }}/bio/gspro_biogenics_1mar2017.txt
bio_speciation_profile: B10C6
bio_species::
AVG_NOAG_GROW 1.00000 AVG_NOAG_GROW gmN/hr
diff --git a/parm/wflow/aqm_post.yaml b/parm/wflow/aqm_post.yaml
index 31b7b34848..5f307184d3 100644
--- a/parm/wflow/aqm_post.yaml
+++ b/parm/wflow/aqm_post.yaml
@@ -5,7 +5,7 @@ default_aqm_task: &default_aqm
maxtries: '2'
envars: &default_vars
GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;'
- USHdir: '&USHdir;'
+ HOMEdir: '&HOMEdir;'
PDY: !cycstr "@Y@m@d"
cyc: !cycstr "@H"
nprocs: '{{ parent.nnodes * parent.ppn // 1 }}'
@@ -22,21 +22,21 @@ default_aqm_task: &default_aqm
task_pre_post_stat:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "pre_post_stat" "&JOBSdir;/JREGIONAL_PRE_POST_STAT"'
+ command: '&LOAD_MODULES_RUN_TASK_FP; "pre_post_stat" "&HOMEdir;/jobs/JSRW_PRE_POST_STAT"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
dependency:
or:
datadep:
attrs:
age: 00:00:00:05
- text: !cycstr '&COMIN_DIR;/post_@Y@m@d@H_task_complete.txt'
+ text: !cycstr '&DATAROOT;/DATA_SHARE/@Y@m@d@H/post_@Y@m@d@H_task_complete.txt'
metataskdep:
attrs:
metatask: run_ens_post
task_post_stat_o3:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_o3" "&JOBSdir;/JREGIONAL_POST_STAT_O3"'
+ command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_o3" "&HOMEdir;/jobs/JSRW_POST_STAT_O3"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
memory: 120G
dependency:
@@ -46,7 +46,7 @@ task_post_stat_o3:
task_post_stat_pm25:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_pm25" "&JOBSdir;/JREGIONAL_POST_STAT_PM25"'
+ command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_pm25" "&HOMEdir;/jobs/JSRW_POST_STAT_PM25"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
memory: 120G
dependency:
@@ -56,7 +56,7 @@ task_post_stat_pm25:
task_bias_correction_o3:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_o3" "&JOBSdir;/JREGIONAL_BIAS_CORRECTION_O3"'
+ command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_o3" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_O3"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
memory: 120G
dependency:
@@ -66,7 +66,7 @@ task_bias_correction_o3:
task_bias_correction_pm25:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_pm25" "&JOBSdir;/JREGIONAL_BIAS_CORRECTION_PM25"'
+ command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_pm25" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_PM25"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
memory: 120G
dependency:
diff --git a/parm/wflow/aqm_prep.yaml b/parm/wflow/aqm_prep.yaml
index 6cfab161d7..d8f01d2c82 100644
--- a/parm/wflow/aqm_prep.yaml
+++ b/parm/wflow/aqm_prep.yaml
@@ -5,12 +5,19 @@ default_aqm_task: &default_aqm
maxtries: '2'
envars: &default_vars
GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;'
- USHdir: '&USHdir;'
+ HOMEdir: '&HOMEdir;'
+ envir: '&envir;'
+ model_ver: '&model_ver;'
+ KEEPDATA: '&KEEPDATA;'
+ SENDCOM: '&SENDCOM;'
+ COMROOT: '&COMROOT;'
+ DATAROOT: '&DATAROOT;'
+ DCOMROOT: '&DCOMROOT;'
+ LOGDIR: !cycstr "&LOGDIR;"
PDY: !cycstr "@Y@m@d"
cyc: !cycstr "@H"
nprocs: '{{ parent.nnodes * parent.ppn // 1 }}'
subcyc: !cycstr "@M"
- LOGDIR: !cycstr "&LOGDIR;"
SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;'
native: '{{ platform.SCHED_NATIVE_CMD }}'
nnodes: 1
@@ -22,7 +29,7 @@ default_aqm_task: &default_aqm
task_nexus_gfs_sfc:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_gfs_sfc" "&JOBSdir;/JREGIONAL_NEXUS_GFS_SFC"'
+ command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_gfs_sfc" "&HOMEdir;/jobs/JSRW_NEXUS_GFS_SFC"'
native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}'
partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
@@ -46,9 +53,9 @@ metatask_nexus_emission:
nspt: '{% for h in range(0, cpl_aqm_parm.NUM_SPLIT_NEXUS) %}{{ " %02d" % h }}{% endfor %}'
task_nexus_emission_#nspt#:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_emission" "&JOBSdir;/JREGIONAL_NEXUS_EMISSION"'
+ command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_emission" "&HOMEdir;/jobs/JSRW_NEXUS_EMISSION"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
- nnodes: 4
+ nnodes: '{{ task_nexus_emission.NNODES_NEXUS_EMISSION }}'
ppn: '{{ task_nexus_emission.PPN_NEXUS_EMISSION // 1 }}'
walltime: 01:00:00
envars:
@@ -61,7 +68,7 @@ metatask_nexus_emission:
task_nexus_post_split:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_post_split" "&JOBSdir;/JREGIONAL_NEXUS_POST_SPLIT"'
+ command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_post_split" "&HOMEdir;/jobs/JSRW_NEXUS_POST_SPLIT"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
dependency:
metataskdep:
@@ -70,13 +77,13 @@ task_nexus_post_split:
task_fire_emission:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "fire_emission" "&JOBSdir;/JREGIONAL_FIRE_EMISSION"'
+ command: '&LOAD_MODULES_RUN_TASK_FP; "fire_emission" "&HOMEdir;/jobs/JSRW_FIRE_EMISSION"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
memory: 2G
task_point_source:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "point_source" "&JOBSdir;/JREGIONAL_POINT_SOURCE"'
+ command: '&LOAD_MODULES_RUN_TASK_FP; "point_source" "&HOMEdir;/jobs/JSRW_POINT_SOURCE"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
walltime: 01:00:00
dependency:
@@ -94,7 +101,7 @@ task_aqm_ics_ext:
attrs:
cycledefs: at_start
maxtries: '2'
- command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&JOBSdir;/JREGIONAL_AQM_ICS"'
+ command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"'
envars:
<<: *default_vars
PREV_CYCLE_DIR: '&WARMSTART_CYCLE_DIR;'
@@ -119,7 +126,7 @@ task_aqm_ics:
attrs:
cycledefs: cycled_from_second
maxtries: '2'
- command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&JOBSdir;/JREGIONAL_AQM_ICS"'
+ command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"'
envars:
<<: *default_vars
PREV_CYCLE_DIR: '&COMIN_DIR;'
@@ -137,11 +144,11 @@ task_aqm_ics:
datadep_tracer:
attrs:
age: 00:00:00:05
- text: &COMIN_DIR;/RESTART/fv_tracer.res.tile1.nc
+ text: '&COMIN_DIR;/RESTART/fv_tracer.res.tile1.nc'
task_aqm_lbcs:
<<: *default_aqm
- command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_lbcs" "&JOBSdir;/JREGIONAL_AQM_LBCS"'
+ command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_lbcs" "&HOMEdir;/jobs/JSRW_AQM_LBCS"'
join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;'
ppn: 24
dependency:
diff --git a/parm/wflow/default_workflow.yaml b/parm/wflow/default_workflow.yaml
index b70ad8dbb2..c79415b3be 100644
--- a/parm/wflow/default_workflow.yaml
+++ b/parm/wflow/default_workflow.yaml
@@ -5,18 +5,18 @@ rocoto:
entities:
ACCOUNT: '{{ user.ACCOUNT }}'
CCPA_OBS_DIR: '{{ platform.CCPA_OBS_DIR }}'
- COMIN_DIR: '{% if user.RUN_ENVIR == "nco" %}{{"{}/{}.@Y@m@d/@H".format(nco.COMIN_BASEDIR,nco.RUN_default)}}{% else %}{{"{}/@Y@m@d@H".format(workflow.EXPTDIR)}}{% endif %}'
+ COLDSTART: '{{ workflow.COLDSTART }}'
COMINgfs: '{{ platform.get("COMINgfs") }}'
- FCST_DIR: '{% if user.RUN_ENVIR == "nco" %}{{"{}/run_fcst_mem#mem#.{}_@Y@m@d@H".format(nco.DATAROOT_default,workflow.WORKFLOW_ID)}}{% else %}{{"{}/@Y@m@d@H".format(workflow.EXPTDIR)}}{% endif %}'
GLOBAL_VAR_DEFNS_FP: '{{ workflow.GLOBAL_VAR_DEFNS_FP }}'
+ HOMEdir: '{{ user.HOMEdir }}'
JOBSdir: '{{ user.JOBSdir }}'
+ KEEPDATA: '{{ nco.KEEPDATA_default }}'
LOAD_MODULES_RUN_TASK_FP: '{{ workflow.LOAD_MODULES_RUN_TASK_FP }}'
- LOGDIR: '{% if user.RUN_ENVIR == "nco" %}{{"{}/@Y@m@d".format(nco.LOGBASEDIR_default)}}{% else %}{{nco.LOGBASEDIR_default }}{% endif %}'
- LOGEXT: '{% if user.RUN_ENVIR == "nco" %}{{".{}.log".format(workflow.WORKFLOW_ID)}}{% else %}{{".log"}}{% endif %}'
+ LOGEXT: ".log"
+ NET: '{{ nco.NET_default }}'
MRMS_OBS_DIR: '{{ platform.MRMS_OBS_DIR }}'
NCORES_PER_NODE: '{{ platform.NCORES_PER_NODE }}'
NDAS_OBS_DIR: '{{ platform.NDAS_OBS_DIR }}'
- NET: '{{ nco.NET_default }}'
NOHRSC_OBS_DIR: '{{ platform.NOHRSC_OBS_DIR }}'
PARTITION_DEFAULT: '{{ platform.get("PARTITION_DEFAULT") }}'
PARTITION_FCST: '{{ platform.get("PARTITION_FCST") }}'
@@ -26,11 +26,20 @@ rocoto:
QUEUE_HPSS: '{{ platform.get("QUEUE_HPSS") }}'
RUN: '{{ nco.RUN_default }}'
SCRIPTSdir: '{{ user.SCRIPTSdir }}'
+ SENDCOM: '{{ nco.SENDCOM_default }}'
SLASH_ENSMEM_SUBDIR: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% else %}{{ "/" }}{% endif %}'
USHdir: '{{ user.USHdir }}'
- COLDSTART: '{{ workflow.COLDSTART }}'
WARMSTART_CYCLE_DIR: '{{ workflow.WARMSTART_CYCLE_DIR }}'
WORKFLOW_ID: '{{ workflow.WORKFLOW_ID }}'
+
+ envir: '{{ nco.envir_default }}'
+ model_ver: '{{ nco.model_ver_default }}'
+ COMROOT: '{{ nco.PTMP }}/&envir;/com'
+ DATAROOT: '{{ nco.PTMP }}/&envir;/tmp'
+ DCOMROOT: '{{ nco.PTMP }}/&envir;/dcom'
+ COMIN_DIR: '{% if user.RUN_ENVIR == "nco" %}&COMROOT;/&NET;/&model_ver;/&RUN;.@Y@m@d/@H{% else %}{{ workflow.EXPTDIR }}/@Y@m@d@H{% endif %}'
+ FCST_DIR: '{% if user.RUN_ENVIR == "nco" %}&DATAROOT;/run_fcst_mem#mem#_@Y@m@d@H{% else %}{{ workflow.EXPTDIR }}/@Y@m@d@H{% endif %}'
+ LOGDIR: '{% if user.RUN_ENVIR == "nco" %}&COMROOT;/output/logs/@Y@m@d{% else %}{{ workflow.EXPTDIR }}/log{% endif %}'
attrs:
cyclethrottle: "200"
realtime: "F"
diff --git a/scripts/exregional_fire_emission.sh b/scripts/exregional_fire_emission.sh
deleted file mode 100755
index ef1b4e291d..0000000000
--- a/scripts/exregional_fire_emission.sh
+++ /dev/null
@@ -1,198 +0,0 @@
-#!/usr/bin/env bash
-
-#
-#-----------------------------------------------------------------------
-#
-# Source the variable definitions file and the bash utility functions.
-#
-#-----------------------------------------------------------------------
-#
-. $USHdir/source_util_funcs.sh
-source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP}
-#
-#-----------------------------------------------------------------------
-#
-# Save current shell options (in a global array). Then set new options
-# for this script/function.
-#
-#-----------------------------------------------------------------------
-#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
-#
-#-----------------------------------------------------------------------
-#
-# Get the full path to the file in which this script/function is located
-# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
-# which the file is located (scrfunc_dir).
-#
-#-----------------------------------------------------------------------
-#
-scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
-scrfunc_fn=$( basename "${scrfunc_fp}" )
-scrfunc_dir=$( dirname "${scrfunc_fp}" )
-#
-#-----------------------------------------------------------------------
-#
-# Print message indicating entry into script.
-#
-#-----------------------------------------------------------------------
-#
-print_info_msg "
-========================================================================
-Entering script: \"${scrfunc_fn}\"
-In directory: \"${scrfunc_dir}\"
-
-This is the ex-script for the task that fetches fire emission
-data files from disk or generates model-ready RAVE emission file from raw
-data files.
-========================================================================"
-#
-#-----------------------------------------------------------------------
-#
-# Set up variables for call to retrieve_data.py
-#
-#-----------------------------------------------------------------------
-#
-yyyymmdd=${FIRE_FILE_CDATE:0:8}
-hh=${FIRE_FILE_CDATE:8:2}
-
-CDATE_mh1=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - 1 hours" "+%Y%m%d%H" )
-
-yyyymmdd_mh1=${CDATE_mh1:0:8}
-hh_mh1=${CDATE_mh1:8:2}
-#
-#-----------------------------------------------------------------------
-#
-# Retrieve fire file to FIRE_EMISSION_STAGING_DIR
-#
-#-----------------------------------------------------------------------
-#
-aqm_fire_file_fn="${AQM_FIRE_FILE_PREFIX}_${yyyymmdd}_t${hh}z${AQM_FIRE_FILE_SUFFIX}"
-
-# Check if the fire file exists in the designated directory
-if [ -e "${DCOMINfire}/${aqm_fire_file_fn}" ]; then
- cp_vrfy "${DCOMINfire}/${aqm_fire_file_fn}" "${FIRE_EMISSION_STAGING_DIR}"
-else
- # Copy raw data
- for ihr in {0..23}; do
- download_time=$( $DATE_UTIL --utc --date "${yyyymmdd_mh1} ${hh_mh1} UTC - $ihr hours" "+%Y%m%d%H" )
- FILE_13km="Hourly_Emissions_13km_${download_time}00_${download_time}00.nc"
- yyyymmdd_dn=${download_time:0:8}
- hh_dn=${download_time:8:2}
- missing_download_time=$( $DATE_UTIL --utc --date "${yyyymmdd_dn} ${hh_dn} UTC - 24 hours" "+%Y%m%d%H" )
- yyyymmdd_dn_md1=${missing_download_time:0:8}
- FILE_13km_md1=Hourly_Emissions_13km_${missing_download_time}00_${missing_download_time}00.nc
- if [ -e "${DCOMINfire}/${yyyymmdd_dn}/rave/${FILE_13km}" ]; then
- cp_vrfy "${DCOMINfire}/${yyyymmdd_dn}/rave/${FILE_13km}" .
- elif [ -e "${DCOMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1}" ]; then
- echo "WARNING: ${FILE_13km} does not exist. Replacing with the file of previous date ..."
- cp_vrfy "${DCOMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1}" "${FILE_13km}"
- else
- message_txt="Fire Emission RAW data does not exist:
- FILE_13km_md1 = \"${FILE_13km_md1}\"
- DCOMINfire = \"${DCOMINfire}\""
-
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- cp_vrfy "${DCOMINfire}/Hourly_Emissions_13km_dummy.nc" "${FILE_13km}"
- message_warning="WARNING: ${message_txt}. Replacing with the dummy file :: AQM RUN SOFT FAILED."
- print_info_msg "${message_warning}"
- if [ ! -z "${maillist}" ]; then
- echo "${message_warning}" | mail.py $maillist
- fi
- else
- print_err_msg_exit "${message_txt}"
- fi
- fi
- done
-
- ncks -O -h --mk_rec_dmn time Hourly_Emissions_13km_${download_time}00_${download_time}00.nc temp.nc
- export err=$?
- if [ $err -ne 0 ]; then
- message_txt="Call to NCKS returned with nonzero exit code."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
- fi
-
- mv_vrfy temp.nc Hourly_Emissions_13km_${download_time}00_${download_time}00.nc
-
- ncrcat -h Hourly_Emissions_13km_*.nc Hourly_Emissions_13km_${yyyymmdd}0000_${yyyymmdd}2300.t${cyc}z.nc
- export err=$?
- if [ $err -ne 0 ]; then
- message_txt="Call to NCRCAT returned with nonzero exit code."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
- fi
-
- input_fire="${DATA}/Hourly_Emissions_13km_${yyyymmdd}0000_${yyyymmdd}2300.t${cyc}z.nc"
- output_fire="${DATA}/Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_new24.t${cyc}z.nc"
-
- python3 ${HOMEdir}/sorc/AQM-utils/python_utils/RAVE_remake.allspecies.aqmna13km.g793.py --date "${yyyymmdd}" --cyc "${hh}" --input_fire "${input_fire}" --output_fire "${output_fire}"
- export err=$?
- if [ $err -ne 0 ]; then
- message_txt="Call to python script \"RAVE_remake.allspecies.py\" returned with nonzero exit code."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
- fi
-
- ncks --mk_rec_dmn Time Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_new24.t${cyc}z.nc -o Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc
- export err=$?
- if [ $err -ne 0 ]; then
- message_txt="Call to NCKS returned with nonzero exit code."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
- fi
-
- ncrcat Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc ${aqm_fire_file_fn}
- export err=$?
- if [ $err -ne 0 ]; then
- message_txt="Call to NCRCAT returned with nonzero exit code."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
- fi
-
- # Copy the final fire emission file to STAGING_DIR
- cp_vrfy "${DATA}/${aqm_fire_file_fn}" "${FIRE_EMISSION_STAGING_DIR}"
-
- # Archive the final fire emission file to disk and HPSS
- if [ "${DO_AQM_SAVE_FIRE}" = "TRUE" ]; then
- cp "${DATA}/${aqm_fire_file_fn}" ${DCOMINfire}
-
- hsi_log_fn="log.hsi_put.${yyyymmdd}_${hh}"
- hsi put ${aqm_fire_file_fn} : ${AQM_FIRE_ARCHV_DIR}/${aqm_fire_file_fn} >& ${hsi_log_fn}
- export err=$?
- if [ $err -ne 0 ]; then
- message_txt="htar file writing operation (\"hsi put ...\") failed. Check the log
-file hsi_log_fn in the DATA directory for details:
- DATA = \"${DATA}\"
- hsi_log_fn = \"${hsi_log_fn}\""
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
- fi
- fi
-fi
-#
-#-----------------------------------------------------------------------
-#
-# Restore the shell options saved at the beginning of this script/function.
-#
-#-----------------------------------------------------------------------
-#
-{ restore_shell_opts; } > /dev/null 2>&1
diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh
index 60852095ee..0fd6b0884d 100755
--- a/scripts/exregional_make_ics.sh
+++ b/scripts/exregional_make_ics.sh
@@ -643,15 +643,23 @@ POST_STEP
#
#-----------------------------------------------------------------------
#
-mv_vrfy out.atm.tile${TILE_RGNL}.nc \
- ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc
-
-mv_vrfy out.sfc.tile${TILE_RGNL}.nc \
- ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc
-
-mv_vrfy gfs_ctrl.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc
-
-mv_vrfy gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc
+if [ "${CPL_AQM}" = "TRUE" ]; then
+ COMOUT="${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}" #temporary path, should be removed later
+ if [ "${COLDSTART}" = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ]; then
+ data_trans_path="${COMOUT}"
+ else
+ data_trans_path="${DATA_SHARE}"
+ fi
+ cpreq -p out.atm.tile${TILE_RGNL}.nc "${data_trans_path}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc"
+ cpreq -p out.sfc.tile${TILE_RGNL}.nc "${COMOUT}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc"
+ cpreq -p gfs_ctrl.nc "${COMOUT}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc"
+ cpreq -p gfs.bndy.nc "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc"
+else
+ mv_vrfy out.atm.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc
+ mv_vrfy out.sfc.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc
+ mv_vrfy gfs_ctrl.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc
+ mv_vrfy gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc
+fi
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh
index fcde8e6f46..3a7f586051 100755
--- a/scripts/exregional_make_lbcs.sh
+++ b/scripts/exregional_make_lbcs.sh
@@ -559,7 +559,11 @@ located in the following directory:
lbc_spec_fhrs=( "${EXTRN_MDL_FHRS[$i]}" )
fcst_hhh=$(( ${lbc_spec_fhrs} - ${EXTRN_MDL_LBCS_OFFSET_HRS} ))
fcst_hhh_FV3LAM=$( printf "%03d" "$fcst_hhh" )
- mv_vrfy gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc
+ if [ "${CPL_AQM}" = "TRUE" ]; then
+ cpreq -p gfs.bndy.nc ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc
+ else
+ mv_vrfy gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc
+ fi
fi
done
diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh
index 723086b077..0013fad47d 100755
--- a/scripts/exregional_run_fcst.sh
+++ b/scripts/exregional_run_fcst.sh
@@ -233,42 +233,59 @@ cd_vrfy ${DATA}/INPUT
#
relative_link_flag="FALSE"
-target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc"
-symlink="gfs_data.nc"
-create_symlink_to_file target="$target" symlink="$symlink" \
- relative="${relative_link_flag}"
+if [ "${CPL_AQM}" = "TRUE" ]; then
+ COMIN="${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}" #temporary path, should be removed later
-target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc"
-symlink="sfc_data.nc"
-create_symlink_to_file target="$target" symlink="$symlink" \
- relative="${relative_link_flag}"
+ target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc"
+ symlink="gfs_data.nc"
+ create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}"
-target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc"
-symlink="gfs_ctrl.nc"
-create_symlink_to_file target="$target" symlink="$symlink" \
- relative="${relative_link_flag}"
+ target="${COMIN}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc"
+ symlink="sfc_data.nc"
+ create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}"
+
+ target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc"
+ symlink="gfs_ctrl.nc"
+ create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}"
+
+ for fhr in $(seq -f "%03g" 0 ${LBC_SPEC_INTVL_HRS} ${FCST_LEN_HRS}); do
+ target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f${fhr}.nc"
+ symlink="gfs_bndy.tile${TILE_RGNL}.${fhr}.nc"
+ create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}"
+ done
+ target="${COMIN}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt.nc"
+ symlink="NEXUS_Expt.nc"
+ create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}"
+
+ # create symlink to PT for point source in SRW-AQM
+ target="${COMIN}/${NET}.${cycle}${dot_ensmem}.PT.nc"
+ if [ -f ${target} ]; then
+ symlink="PT.nc"
+ create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}"
+ fi
+else
+ target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc"
+ symlink="gfs_data.nc"
+ create_symlink_to_file target="$target" symlink="$symlink" \
+ relative="${relative_link_flag}"
-for fhr in $(seq -f "%03g" 0 ${LBC_SPEC_INTVL_HRS} ${FCST_LEN_HRS}); do
- target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f${fhr}.nc"
- symlink="gfs_bndy.tile${TILE_RGNL}.${fhr}.nc"
+ target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc"
+ symlink="sfc_data.nc"
create_symlink_to_file target="$target" symlink="$symlink" \
- relative="${relative_link_flag}"
-done
+ relative="${relative_link_flag}"
-if [ "${CPL_AQM}" = "TRUE" ]; then
- target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt.nc"
- symlink="NEXUS_Expt.nc"
+ target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc"
+ symlink="gfs_ctrl.nc"
create_symlink_to_file target="$target" symlink="$symlink" \
relative="${relative_link_flag}"
- # create symlink to PT for point source in Online-CMAQ
- target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.PT.nc"
- if [ -f ${target} ]; then
- symlink="PT.nc"
+ for fhr in $(seq -f "%03g" 0 ${LBC_SPEC_INTVL_HRS} ${FCST_LEN_HRS}); do
+ target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f${fhr}.nc"
+ symlink="gfs_bndy.tile${TILE_RGNL}.${fhr}.nc"
create_symlink_to_file target="$target" symlink="$symlink" \
- relative="${relative_link_flag}"
- fi
+ relative="${relative_link_flag}"
+ done
fi
#
#-----------------------------------------------------------------------
diff --git a/scripts/exregional_aqm_ics.sh b/scripts/exsrw_aqm_ics.sh
similarity index 68%
rename from scripts/exregional_aqm_ics.sh
rename to scripts/exsrw_aqm_ics.sh
index 676cc4ed90..9104374705 100755
--- a/scripts/exregional_aqm_ics.sh
+++ b/scripts/exsrw_aqm_ics.sh
@@ -7,7 +7,7 @@
#
#-----------------------------------------------------------------------
#
-. $USHdir/source_util_funcs.sh
+. ${USHsrw}/source_util_funcs.sh
source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
@@ -17,7 +17,7 @@ source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+{ save_shell_opts; set -xue; } > /dev/null 2>&1
#
#-----------------------------------------------------------------------
#
@@ -53,31 +53,31 @@ tial or boundary condition files for the FV3 will be generated.
#
#-----------------------------------------------------------------------
#
-rst_dir=${PREV_CYCLE_DIR}/RESTART
-rst_file=fv_tracer.res.tile1.nc
-fv_tracer_file=${rst_dir}/${PDY}.${cyc}0000.${rst_file}
-print_info_msg "
- Looking for tracer restart file: \"${fv_tracer_file}\""
+rst_dir="${PREV_CYCLE_DIR}/RESTART"
+rst_file="fv_tracer.res.tile1.nc"
+fv_tracer_file="${rst_dir}/${PDY}.${cyc}0000.${rst_file}"
+print_info_msg "Looking for tracer restart file: \"${fv_tracer_file}\""
if [ ! -r ${fv_tracer_file} ]; then
if [ -r ${rst_dir}/coupler.res ]; then
rst_info=( $( tail -n 1 ${rst_dir}/coupler.res ) )
- rst_date=$( printf "%04d%02d%02d%02d" ${rst_info[@]:0:4} )
+ # Remove leading zeros from ${rst_info[1]}
+ month="${rst_info[1]#"${rst_info[1]%%[!0]*}"}"
+ # Remove leading zeros from ${rst_info[2]}
+ day="${rst_info[2]#"${rst_info[2]%%[!0]*}"}"
+ # Format the date without leading zeros
+ rst_date=$(printf "%04d%02d%02d%02d" ${rst_info[0]} $((10#$month)) $((10#$day)) ${rst_info[3]})
print_info_msg "
Tracer file not found. Checking available restart date:
requested date: \"${PDY}${cyc}\"
available date: \"${rst_date}\""
if [ "${rst_date}" = "${PDY}${cyc}" ] ; then
- fv_tracer_file=${rst_dir}/${rst_file}
+ fv_tracer_file="${rst_dir}/${rst_file}"
if [ -r ${fv_tracer_file} ]; then
- print_info_msg "
- Tracer file found: \"${fv_tracer_file}\""
+ print_info_msg "Tracer file found: \"${fv_tracer_file}\""
else
- message_txt="No suitable tracer restart file found."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2"]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
+ message_txt="FATAL ERROR No suitable tracer restart file ${rst_dir}/${rst_file} found."
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
fi
fi
fi
@@ -88,46 +88,44 @@ fi
# Add air quality tracer variables from previous cycle's restart output
# to atmosphere's initial condition file according to the steps below:
#
-# a. Python script to manipulate the files (see comments inside for
-# details)
+# a. Python script to manipulate the files (see comments inside for details)
# b. Remove checksum attribute to prevent overflow
-#
# c. Rename reulting file as the expected atmospheric IC file
#
#-----------------------------------------------------------------------
#
-gfs_ic_file=${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc
-wrk_ic_file=${DATA}/gfs.nc
+gfs_ic_fn="${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc"
+gfs_ic_fp="${DATA_SHARE}/${gfs_ic_fn}"
+wrk_ic_fp="${DATA}/gfs.nc"
print_info_msg "
Adding air quality tracers to atmospheric initial condition file:
tracer file: \"${fv_tracer_file}\"
- FV3 IC file: \"${gfs_ic_file}\""
+ FV3 IC file: \"${gfs_ic_fp}\""
-cp_vrfy ${gfs_ic_file} ${wrk_ic_file}
-python3 ${HOMEdir}/sorc/AQM-utils/python_utils/add_aqm_ics.py --fv_tracer_file "${fv_tracer_file}" --wrk_ic_file "${wrk_ic_file}"
+cpreq ${gfs_ic_fp} ${wrk_ic_fp}
+${USHsrw}/aqm_utils_python/add_aqm_ics.py --fv_tracer_file "${fv_tracer_file}" --wrk_ic_file "${wrk_ic_fp}"
export err=$?
if [ $err -ne 0 ]; then
message_txt="Call to python script \"add_aqm_ics.py\" failed."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
fi
ncatted -a checksum,,d,s, tmp1.nc
export err=$?
if [ $err -ne 0 ]; then
message_txt="Call to NCATTED returned with nonzero exit code."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
fi
-cp_vrfy tmp1.nc ${gfs_ic_file}
+mv tmp1.nc ${gfs_ic_fn}
+
+cpreq -p ${gfs_ic_fn} ${COMOUT}
+cpreq -p "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" ${COMOUT}
+cpreq -p "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" ${COMOUT}
+cpreq -p "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc" ${COMOUT}
unset fv_tracer_file
unset wrk_ic_file
@@ -138,20 +136,17 @@ unset wrk_ic_file
#
#-----------------------------------------------------------------------
#
- print_info_msg "
+print_info_msg "
========================================================================
-Successfully added air quality tracers to atmospheric initial condition
-file!!!
+Successfully added air quality tracers to atmospheric IC file!!!
Exiting script: \"${scrfunc_fn}\"
In directory: \"${scrfunc_dir}\"
========================================================================"
-
#
#-----------------------------------------------------------------------
#
-# Restore the shell options saved at the beginning of this script/func-
-# tion.
+# Restore the shell options saved at the beginning of this script/function.
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_aqm_lbcs.sh b/scripts/exsrw_aqm_lbcs.sh
similarity index 67%
rename from scripts/exregional_aqm_lbcs.sh
rename to scripts/exsrw_aqm_lbcs.sh
index 09a33d40a2..f6d932962e 100755
--- a/scripts/exregional_aqm_lbcs.sh
+++ b/scripts/exsrw_aqm_lbcs.sh
@@ -7,7 +7,7 @@
#
#-----------------------------------------------------------------------
#
-. $USHdir/source_util_funcs.sh
+. ${USHsrw}/source_util_funcs.sh
source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
@@ -17,7 +17,7 @@ source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aq
#
#-----------------------------------------------------------------------
#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+{ save_shell_opts; set -xue; } > /dev/null 2>&1
#
#-----------------------------------------------------------------------
#
@@ -77,10 +77,10 @@ fi
#
#-----------------------------------------------------------------------
#
-CDATE_MOD=$( $DATE_UTIL --utc --date "${PDY} ${cyc} UTC - ${EXTRN_MDL_LBCS_OFFSET_HRS} hours" "+%Y%m%d%H" )
-yyyymmdd=${CDATE_MOD:0:8}
-mm="${CDATE_MOD:4:2}"
-hh="${CDATE_MOD:8:2}"
+CDATE_MOD=`$NDATE -${EXTRN_MDL_LBCS_OFFSET_HRS} ${PDY}${cyc}`
+YYYYMMDD="${CDATE_MOD:0:8}"
+MM="${CDATE_MOD:4:2}"
+HH="${CDATE_MOD:8:2}"
if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then
cyc_mod=$(( ${cyc} - ${DATE_FIRST_CYCL:8:2} ))
@@ -92,38 +92,40 @@ for i_lbc in $(seq ${LBC_SPEC_INTVL_HRS} ${LBC_SPEC_INTVL_HRS} ${FCST_LEN_HRS} )
LBC_SPEC_FCST_HRS+=("$i_lbc")
done
-if [ ${DO_AQM_CHEM_LBCS} = "TRUE" ]; then
-
- ext_lbcs_file=${AQM_LBCS_FILES}
- chem_lbcs_fn=${ext_lbcs_file///${mm}}
+# Copy lbcs files from DATA_SHARE
+aqm_lbcs_fn_prefix="${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f"
+for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do
+ fhr=$( printf "%03d" "${hr}" )
+ aqm_lbcs_fn="${aqm_lbcs_fn_prefix}${fhr}.nc"
+ cpreq "${DATA_SHARE}/${aqm_lbcs_fn}" ${DATA}
+done
- chem_lbcs_fp=${DCOMINchem_lbcs}/${chem_lbcs_fn}
+if [ "${DO_AQM_CHEM_LBCS}" = "TRUE" ]; then
+ ext_lbcs_file="${AQM_LBCS_FILES}"
+ chem_lbcs_fn=${ext_lbcs_file///${MM}}
+ chem_lbcs_fp="${FIXaqm}/chemlbc/${chem_lbcs_fn}"
if [ -f ${chem_lbcs_fp} ]; then
#Copy the boundary condition file to the current location
- cp_vrfy ${chem_lbcs_fp} .
+ cpreq ${chem_lbcs_fp} .
else
message_txt="The chemical LBC files do not exist:
CHEM_BOUNDARY_CONDITION_FILE = \"${chem_lbcs_fp}\""
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
fi
for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do
fhr=$( printf "%03d" "${hr}" )
- if [ -r ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fhr}.nc ]; then
- ncks -A ${chem_lbcs_fn} ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fhr}.nc
+ aqm_lbcs_fn="${aqm_lbcs_fn_prefix}${fhr}.nc"
+ if [ -r "${aqm_lbcs_fn}" ]; then
+ ncks -A ${chem_lbcs_fn} ${aqm_lbcs_fn}
export err=$?
if [ $err -ne 0 ]; then
message_txt="Call to NCKS returned with nonzero exit code."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
fi
+ cpreq ${aqm_lbcs_fn} "${aqm_lbcs_fn}_chemlbc"
fi
done
@@ -139,54 +141,49 @@ fi
#
#-----------------------------------------------------------------------
#
-if [ ${DO_AQM_GEFS_LBCS} = "TRUE" ]; then
-
- AQM_GEFS_FILE_CYC=${AQM_GEFS_FILE_CYC:-"${hh}"}
+if [ "${DO_AQM_GEFS_LBCS}" = "TRUE" ]; then
+ AQM_GEFS_FILE_CYC=${AQM_GEFS_FILE_CYC:-"${HH}"}
AQM_GEFS_FILE_CYC=$( printf "%02d" "${AQM_GEFS_FILE_CYC}" )
- GEFS_CYC_DIFF=$(( cyc - AQM_GEFS_FILE_CYC ))
- if [ "${GEFS_CYC_DIFF}" -lt "0" ]; then
- TSTEPDIFF=$( printf "%02d" $(( 24 + ${GEFS_CYC_DIFF} )) )
+ gefs_cyc_diff=$(( cyc - AQM_GEFS_FILE_CYC ))
+ if [ "${YYYYMMDD}" = "${PDY}" ]; then
+ tstepdiff=$( printf "%02d" ${gefs_cyc_diff} )
else
- TSTEPDIFF=$( printf "%02d" ${GEFS_CYC_DIFF} )
+ tstepdiff=$( printf "%02d" $(( 24 + ${gefs_cyc_diff} )) )
fi
- AQM_MOFILE_FN="${AQM_GEFS_FILE_PREFIX}.t${AQM_GEFS_FILE_CYC}z.atmf"
+ aqm_mofile_fn="${AQM_GEFS_FILE_PREFIX}.t${AQM_GEFS_FILE_CYC}z.atmf"
if [ "${DO_REAL_TIME}" = "TRUE" ]; then
- AQM_MOFILE_FP="${COMINgefs}/gefs.${yyyymmdd}/${AQM_GEFS_FILE_CYC}/chem/sfcsig/${AQM_MOFILE_FN}"
+ aqm_mofile_fp="${COMINgefs}/gefs.${YYYYMMDD}/${AQM_GEFS_FILE_CYC}/chem/sfcsig/${aqm_mofile_fn}"
else
- AQM_MOFILE_FP="${DCOMINgefs}/${yyyymmdd}/${AQM_GEFS_FILE_CYC}/${AQM_MOFILE_FN}"
+ aqm_mofile_fp="${COMINgefs}/${YYYYMMDD}/${AQM_GEFS_FILE_CYC}/${aqm_mofile_fn}"
fi
# Check if GEFS aerosol files exist
for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do
hr_mod=$(( hr + EXTRN_MDL_LBCS_OFFSET_HRS ))
fhr=$( printf "%03d" "${hr_mod}" )
- AQM_MOFILE_FHR_FP="${AQM_MOFILE_FP}${fhr}.nemsio"
- if [ ! -e "${AQM_MOFILE_FHR_FP}" ]; then
- message_txt="The GEFS file (AQM_MOFILE_FHR_FP) for LBCs of \"${cycle}\" does not exist:
- AQM_MOFILE_FHR_FP = \"${AQM_MOFILE_FHR_FP}\""
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- message_warning="WARNING: ${message_txt}"
- print_info_msg "${message_warning}"
- if [ ! -z "${maillist}" ]; then
- echo "${message_warning}" | mail.py $maillist
- fi
+ aqm_mofile_fhr_fp="${aqm_mofile_fp}${fhr}.nemsio"
+ if [ ! -e "${aqm_mofile_fhr_fp}" ]; then
+ message_txt="WARNING: The GEFS file (AQM_MOFILE_FHR_FP) for LBCs of \"${cycle}\" does not exist:
+ aqm_mofile_fhr_fp = \"${aqm_mofile_fhr_fp}\""
+ if [ ! -z "${MAILTO}" ] && [ "${MACHINE}" = "WCOSS2" ]; then
+ echo "${message_txt}" | mail.py $maillist
else
print_err_msg_exit "${message_txt}"
- fi
+ fi
fi
done
- NUMTS="$(( FCST_LEN_HRS / LBC_SPEC_INTVL_HRS + 1 ))"
+ numts="$(( FCST_LEN_HRS / LBC_SPEC_INTVL_HRS + 1 ))"
cat > gefs2lbc-nemsio.ini <>$pgmout 2>errfile
+ export err=$?; err_chk
print_info_msg "
========================================================================
Successfully added GEFS aerosol LBCs !!!
========================================================================"
-#
fi
+
+for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do
+ fhr=$( printf "%03d" "${hr}" )
+ aqm_lbcs_fn="${aqm_lbcs_fn_prefix}${fhr}.nc"
+ cpreq -p "${DATA}/${aqm_lbcs_fn}" ${COMOUT}
+done
#
print_info_msg "
========================================================================
diff --git a/scripts/exregional_bias_correction_o3.sh b/scripts/exsrw_bias_correction_o3.sh
similarity index 68%
rename from scripts/exregional_bias_correction_o3.sh
rename to scripts/exsrw_bias_correction_o3.sh
index 709cc1957d..1ef4012528 100755
--- a/scripts/exregional_bias_correction_o3.sh
+++ b/scripts/exsrw_bias_correction_o3.sh
@@ -7,7 +7,7 @@
#
#-----------------------------------------------------------------------
#
-. $USHdir/source_util_funcs.sh
+. ${USHsrw}/source_util_funcs.sh
source_config_for_task "cpl_aqm_parm|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
@@ -17,7 +17,7 @@ source_config_for_task "cpl_aqm_parm|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS
#
#-----------------------------------------------------------------------
#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+{ save_shell_opts; set -xue; } > /dev/null 2>&1
#
#-----------------------------------------------------------------------
#
@@ -80,8 +80,8 @@ yyyymm_m1=${PDYm1:0:6}
yyyy_m2=${PDYm2:0:4}
yyyymm_m2=${PDYm2:0:6}
yyyy_m3=${PDYm3:0:4}
-yyyymm_m3=${PDYm3:0:6}
-
+yyyymm_m3=${PDYm3:0:6}
+
#
#-----------------------------------------------------------------------
#
@@ -103,13 +103,11 @@ fi
# STEP 1: Retrieve AIRNOW observation data
#-----------------------------------------------------------------------------
-mkdir_vrfy -p "${DATA}/data"
+mkdir -p "${DATA}/data"
-# Retrieve real-time airnow data for the last three days and convert them into netcdf.
-# In the following for-loop, pdym stands for previous (m) day of the present day (PDY)
-# in the NCO standards, i.e. PDYm1: 1day ago, PDYm2: 2days ago, PDYm3: 3days ago
-for i_pdym in {1..3}; do
- case $i_pdym in
+# Retrieve real-time airnow data for the last three days and convert them into netcdf
+ for ipdym in {1..3}; do
+ case $ipdym in
1)
cvt_yyyy="${yyyy_m1}"
cvt_yyyymm="${yyyymm_m1}"
@@ -134,22 +132,22 @@ for i_pdym in {1..3}; do
cvt_input_fp="${cvt_input_dir}/YYYY/YYYYMMDD/${cvt_input_fn}"
cvt_output_fp="${cvt_output_dir}/YYYY/YYYYMMDD/${cvt_output_fn}"
- mkdir_vrfy -p "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}"
- mkdir_vrfy -p "${cvt_output_dir}/${cvt_yyyy}/${cvt_pdy}"
- cp_vrfy ${DCOMINairnow}/${cvt_pdy}/airnow/HourlyAQObs_${cvt_pdy}*.dat "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}"
+ mkdir -p "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}"
+ mkdir -p "${cvt_output_dir}/${cvt_yyyy}/${cvt_pdy}"
+
+ if [ "$(ls -A ${DCOMINairnow}/${cvt_pdy}/airnow)" ]; then
+ cp ${DCOMINairnow}/${cvt_pdy}/airnow/HourlyAQObs_${cvt_pdy}*.dat "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}"
+ else
+ message_warning="WARNING: airnow data missing. skip this date ${cvt_pdy}"
+ print_info_msg "${message_warning}"
+ fi
PREP_STEP
eval ${RUN_CMD_SERIAL} ${EXECdir}/convert_airnow_csv ${cvt_input_fp} ${cvt_output_fp} ${cvt_pdy} ${cvt_pdy} ${REDIRECT_OUT_ERR}
export err=$?
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
err_chk
- else
- if [ $err -ne 0 ]; then
- print_err_msg_exit "Call to executable to run CONVERT_AIRNOW_CSV returned with nonzero exit code."
- fi
- fi
POST_STEP
-done
+ done
#-----------------------------------------------------------------------------
# STEP 2: Extracting PM2.5, O3, and met variables from CMAQ input and outputs
@@ -158,7 +156,7 @@ done
FCST_LEN_HRS=$( printf "%03d" ${FCST_LEN_HRS} )
ic=1
while [ $ic -lt 120 ]; do
- if [ -s ${COMIN}/${NET}.${cycle}.chem_sfc.f${FCST_LEN_HRS}.nc ]; then
+ if [ -s ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.f${FCST_LEN_HRS}.nc ]; then
echo "cycle ${cyc} post1 is done!"
break
else
@@ -173,113 +171,88 @@ fi
# remove any pre-exit ${NET}.${cycle}.chem_sfc/met_sfc.nc for 2-stage post processing
DATA_grid="${DATA}/data/bcdata.${yyyymm}/grid"
if [ -d "${DATA_grid}/${cyc}z/${PDY}" ]; then
- rm_vrfy -rf "${DATA_grid}/${cyc}z/${PDY}"
+ rm -rf "${DATA_grid}/${cyc}z/${PDY}"
fi
-mkdir_vrfy -p "${DATA_grid}/${cyc}z/${PDY}"
-ln_vrfy -sf ${COMIN}/${NET}.${cycle}.chem_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY}
-ln_vrfy -sf ${COMIN}/${NET}.${cycle}.met_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY}
+mkdir -p "${DATA_grid}/${cyc}z/${PDY}"
+ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY}
+ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.met_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY}
#-----------------------------------------------------------------------------
# STEP 3: Intepolating CMAQ O3 into AIRNow sites
#-----------------------------------------------------------------------------
-mkdir_vrfy -p ${DATA}/data/coords
-mkdir_vrfy -p ${DATA}/data/site-lists.interp
-mkdir_vrfy -p ${DATA}/out/ozone/${yyyy}
-mkdir_vrfy -p ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy}
+mkdir -p ${DATA}/data/coords
+mkdir -p ${DATA}/data/site-lists.interp
+mkdir -p ${DATA}/out/ozone/${yyyy}
+mkdir -p ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy}
-cp_vrfy ${PARMaqm_utils}/bias_correction/sites.valid.ozone.20230331.12z.list ${DATA}/data/site-lists.interp
-cp_vrfy ${PARMaqm_utils}/bias_correction/aqm.t12z.chem_sfc.f000.nc ${DATA}/data/coords
-cp_vrfy ${PARMaqm_utils}/bias_correction/config.interp.ozone.7-vars_${id_domain}.${cyc}z ${DATA}
+cp ${PARMdir}/aqm_utils/bias_correction/sites.valid.ozone.20230331.12z.list ${DATA}/data/site-lists.interp
+cp ${PARMdir}/aqm_utils/bias_correction/aqm.t12z.chem_sfc.f000.nc ${DATA}/data/coords
+cp ${PARMdir}/aqm_utils/bias_correction/config.interp.ozone.7-vars_${id_domain}.${cyc}z ${DATA}
PREP_STEP
eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_interpolate config.interp.ozone.7-vars_${id_domain}.${cyc}z ${cyc}z ${PDY} ${PDY} ${REDIRECT_OUT_ERR}
export err=$?
-if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
err_chk
-else
- if [ $err -ne 0 ]; then
- print_err_msg_exit "Call to executable to run AQM_BIAS_INTERPOLATE returned with nonzero exit code."
- fi
-fi
POST_STEP
-cp_vrfy ${DATA}/out/ozone/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy}
+cp ${DATA}/out/ozone/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy}
if [ "${DO_AQM_SAVE_AIRNOW_HIST}" = "TRUE" ]; then
- mkdir_vrfy -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy}
- cp_vrfy ${DATA}/out/ozone/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy}
-
- for i_pdym in {0..3}; do
- case $i_pdym in
- 0)
- cvt_yyyy="${yyyy}"
- cvt_yyyymm="${yyyymm}"
- cvt_pdy="${PDY}"
- ;;
- 1)
- cvt_yyyy="${yyyy_m1}"
- cvt_yyyymm="${yyyymm_m1}"
- cvt_pdy="${PDYm1}"
- ;;
- 2)
- cvt_yyyy="${yyyy_m2}"
- cvt_yyyymm="${yyyymm_m2}"
- cvt_pdy="${PDYm2}"
- ;;
- 3)
- cvt_yyyy="${yyyy_m3}"
- cvt_yyyymm="${yyyymm_m3}"
- cvt_pdy="${PDYm3}"
- ;;
- esac
- # CSV and NetCDF files
- mkdir_vrfy -p ${COMOUTbicor}/bcdata.${cvt_yyyymm}/airnow/csv/${cvt_yyyy}/${cvt_pdy}
- mkdir_vrfy -p ${COMOUTbicor}/bcdata.${cvt_yyyymm}/airnow/netcdf/${cvt_yyyy}/${cvt_pdy}
- if [ "${i_pdym}" != "0" ]; then
- cp_vrfy ${DCOMINairnow}/${cvt_pdy}/airnow/HourlyAQObs_${cvt_pdy}*.dat ${COMOUTbicor}/bcdata.${cvt_yyyymm}/airnow/csv/${cvt_yyyy}/${cvt_pdy}
- cp_vrfy ${DATA}/data/bcdata.${cvt_yyyymm}/airnow/netcdf/${cvt_yyyy}/${cvt_pdy}/HourlyAQObs.${cvt_pdy}.nc ${COMOUTbicor}/bcdata.${cvt_yyyymm}/airnow/netcdf/${cvt_yyyy}/${cvt_pdy}
- fi
- done
- mkdir_vrfy -p ${COMOUTbicor}/bcdata.${yyyymm}/grid/${cyc}z/${PDY}
- cp_vrfy ${COMIN}/${NET}.${cycle}.*sfc*.nc ${COMOUTbicor}/bcdata.${yyyymm}/grid/${cyc}z/${PDY}
+ mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy}
+ cp ${DATA}/out/ozone/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy}
+
+ # CSV files
+ mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/airnow/csv/${yyyy}/${PDY}
+ mkdir -p ${COMOUTbicor}/bcdata.${yyyymm_m1}/airnow/csv/${yyyy_m1}/${PDYm1}
+ mkdir -p ${COMOUTbicor}/bcdata.${yyyymm_m2}/airnow/csv/${yyyy_m2}/${PDYm2}
+ mkdir -p ${COMOUTbicor}/bcdata.${yyyymm_m3}/airnow/csv/${yyyy_m3}/${PDYm3}
+ cp ${DCOMINairnow}/${PDYm1}/airnow/HourlyAQObs_${PDYm1}*.dat ${COMOUTbicor}/bcdata.${yyyymm_m1}/airnow/csv/${yyyy_m1}/${PDYm1}
+ cp ${DCOMINairnow}/${PDYm2}/airnow/HourlyAQObs_${PDYm2}*.dat ${COMOUTbicor}/bcdata.${yyyymm_m2}/airnow/csv/${yyyy_m2}/${PDYm2}
+ cp ${DCOMINairnow}/${PDYm3}/airnow/HourlyAQObs_${PDYm3}*.dat ${COMOUTbicor}/bcdata.${yyyymm_m3}/airnow/csv/${yyyy_m3}/${PDYm3}
+
+ # NetCDF files
+ mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/airnow/netcdf/${yyyy}/${PDY}
+ mkdir -p ${COMOUTbicor}/bcdata.${yyyymm_m1}/airnow/netcdf/${yyyy_m1}/${PDYm1}
+ mkdir -p ${COMOUTbicor}/bcdata.${yyyymm_m2}/airnow/netcdf/${yyyy_m2}/${PDYm2}
+ mkdir -p ${COMOUTbicor}/bcdata.${yyyymm_m3}/airnow/netcdf/${yyyy_m3}/${PDYm3}
+ cp ${DATA}/data/bcdata.${yyyymm_m1}/airnow/netcdf/${yyyy_m1}/${PDYm1}/HourlyAQObs.${PDYm1}.nc ${COMOUTbicor}/bcdata.${yyyymm_m1}/airnow/netcdf/${yyyy_m1}/${PDYm1}
+ cp ${DATA}/data/bcdata.${yyyymm_m2}/airnow/netcdf/${yyyy_m2}/${PDYm2}/HourlyAQObs.${PDYm2}.nc ${COMOUTbicor}/bcdata.${yyyymm_m2}/airnow/netcdf/${yyyy_m2}/${PDYm2}
+ cp ${DATA}/data/bcdata.${yyyymm_m3}/airnow/netcdf/${yyyy_m3}/${PDYm3}/HourlyAQObs.${PDYm3}.nc ${COMOUTbicor}/bcdata.${yyyymm_m3}/airnow/netcdf/${yyyy_m3}/${PDYm3}
+
+ mkdir -p "${COMOUTbicor}/bcdata.${yyyymm}/grid/${cyc}z/${PDY}"
+ cp ${COMIN}/${cyc}/${NET}.${cycle}.*_sfc.f*.nc ${COMOUTbicor}/bcdata.${yyyymm}/grid/${cyc}z/${PDY}
fi
#-----------------------------------------------------------------------------
# STEP 4: Performing Bias Correction for Ozone
#-----------------------------------------------------------------------------
-rm_vrfy -rf ${DATA}/data/bcdata*
+rm -rf ${DATA}/data/bcdata*
-ln_vrfy -sf ${COMINbicor}/bcdata* "${DATA}/data"
+ln -sf ${COMINbicor}/bcdata* "${DATA}/data"
-mkdir_vrfy -p ${DATA}/data/sites
-cp_vrfy ${PARMaqm_utils}/bias_correction/config.ozone.bias_corr_${id_domain}.${cyc}z ${DATA}
+mkdir -p ${DATA}/data/sites
+cp ${PARMdir}/aqm_utils/bias_correction/config.ozone.bias_corr_${id_domain}.${cyc}z ${DATA}
PREP_STEP
eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_correct config.ozone.bias_corr_${id_domain}.${cyc}z ${cyc}z ${BC_STDAY} ${PDY} ${REDIRECT_OUT_ERR}
export err=$?
-if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
err_chk
-else
- if [ $err -ne 0 ]; then
- print_err_msg_exit "Call to executable to run AQM_BIAS_CORRECT returned with nonzero exit code."
- fi
-fi
POST_STEP
-cp_vrfy ${DATA}/out/ozone.corrected* ${COMIN}
+cp ${DATA}/out/ozone.corrected* ${COMOUT}
if [ "${cyc}" = "12" ]; then
- cp_vrfy ${DATA}/data/sites/sites.valid.ozone.${PDY}.${cyc}z.list ${DATA}
+ cp ${DATA}/data/sites/sites.valid.ozone.${PDY}.${cyc}z.list ${DATA}
fi
#-----------------------------------------------------------------------------
# STEP 5: converting netcdf to grib format
#-----------------------------------------------------------------------------
-ln_vrfy -sf ${COMIN}/ozone.corrected.${PDY}.${cyc}z.nc .
+ln -sf ${COMIN}/${cyc}/ozone.corrected.${PDY}.${cyc}z.nc .
#
cat >bias_cor.ini < filesize
export XLFRTEOPTS="unit_vars=yes"
@@ -408,11 +369,11 @@ EOF1
export FORT12="filesize"
export FORT31=
export FORT51=awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.227
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.227
done
# Post Files to COMOUTwmo
- cp_vrfy awpaqm.${cycle}.*o3-max-bc.227.grib2 ${COMOUTwmo}
+ cp awpaqm.${cycle}.*o3-max-bc.227.grib2 ${COMOUTwmo}
# Distribute Data
if [ "${SENDDBN_NTC}" = "TRUE" ] ; then
@@ -423,13 +384,13 @@ EOF1
fi
#-------------------------------------
-rm_vrfy -rf tmpfile
+rm -rf tmpfile
fhr=01
while [ "${fhr}" -le "${FCST_LEN_HRS}" ]; do
fhr3d=$( printf "%03d" "${fhr}" )
- cp_vrfy ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr3d}.${id_domain}.grib2 ${COMOUT}
+ cp ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr3d}.${id_domain}.grib2 ${COMOUT}
# create GRIB file to convert to grid 227 then to GRIB2 for NDFD
cat ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr3d}.${id_domain}.grib2 >> tmpfile
@@ -453,13 +414,13 @@ newgrib2file2=${NET}.${cycle}.ave_8hr_o3_bc.227.grib2
grid227="lambert:265.0000:25.0000:25.0000 226.5410:1473:5079.000 12.1900:1025:5079.000"
wgrib2 tmpfile.1hr -set_grib_type c3b -new_grid_winds earth -new_grid ${grid227} ${newgrib2file1}
-cp_vrfy tmpfile.1hr ${COMOUT}/${NET}.${cycle}.ave_1hr_o3_bc.${id_domain}.grib2
-cp_vrfy ${NET}.${cycle}.ave_1hr_o3_bc.227.grib2 ${COMOUT}
+cp tmpfile.1hr ${COMOUT}/${NET}.${cycle}.ave_1hr_o3_bc.${id_domain}.grib2
+cp ${NET}.${cycle}.ave_1hr_o3_bc.227.grib2 ${COMOUT}
if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then
wgrib2 tmpfile.8hr -set_grib_type c3b -new_grid_winds earth -new_grid ${grid227} ${newgrib2file2}
- cp_vrfy tmpfile.8hr ${COMOUT}/${NET}.${cycle}.ave_8hr_o3_bc.${id_domain}.grib2
- cp_vrfy ${NET}.${cycle}.ave_8hr_o3_bc.227.grib2 ${COMOUT}
+ cp tmpfile.8hr ${COMOUT}/${NET}.${cycle}.ave_8hr_o3_bc.${id_domain}.grib2
+ cp ${NET}.${cycle}.ave_8hr_o3_bc.227.grib2 ${COMOUT}
fi
if [ "${SENDDBN}" = "TRUE" ] ; then
@@ -482,7 +443,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then
export FORT12="filesize"
export FORT31=
export FORT51=grib2.${cycle}.awpcsozcon_aqm_${hr}-bc.temp
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_${hr}hr_o3_bc-awpozcon.${cycle}.227
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_${hr}hr_o3_bc-awpozcon.${cycle}.227
echo `ls -l grib2.${cycle}.awpcsozcon_aqm_${hr}-bc.temp | awk '{print $5} '` > filesize
export XLFRTEOPTS="unit_vars=yes"
@@ -490,7 +451,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then
export FORT12="filesize"
export FORT31=
export FORT51=awpaqm.${cycle}.${hr}ho3-bc.227.grib2
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_${hr}hr_o3_bc-awpozcon.${cycle}.227
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_${hr}hr_o3_bc-awpozcon.${cycle}.227
# Create AWIPS GRIB data for dailly 1-hr and 8hr max ozone
echo 0 > filesize
@@ -499,7 +460,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then
export FORT12="filesize"
export FORT31=
export FORT51=${NET}.${cycle}.max_${hr}hr_o3-bc.227.grib2.temp
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3_bc-maxi.${cycle}.227
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm-${hr}hro3_bc-maxi.${cycle}.227
echo `ls -l ${NET}.${cycle}.max_${hr}hr_o3-bc.227.grib2.temp | awk '{print $5} '` > filesize
export XLFRTEOPTS="unit_vars=yes"
@@ -507,11 +468,11 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then
export FORT12="filesize"
export FORT31=
export FORT51=awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3_bc-maxi.${cycle}.227
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm-${hr}hro3_bc-maxi.${cycle}.227
# Post Files to COMOUTwmo
- cp_vrfy awpaqm.${cycle}.${hr}ho3-bc.227.grib2 ${COMOUTwmo}
- cp_vrfy awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 ${COMOUTwmo}
+ cp awpaqm.${cycle}.${hr}ho3-bc.227.grib2 ${COMOUTwmo}
+ cp awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 ${COMOUTwmo}
# Distribute Data
if [ "${SENDDBN}" = "TRUE" ]; then
@@ -520,7 +481,6 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then
fi
done
fi
-
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_bias_correction_pm25.sh b/scripts/exsrw_bias_correction_pm25.sh
similarity index 68%
rename from scripts/exregional_bias_correction_pm25.sh
rename to scripts/exsrw_bias_correction_pm25.sh
index 9503f744c9..ae1a2d6f65 100755
--- a/scripts/exregional_bias_correction_pm25.sh
+++ b/scripts/exsrw_bias_correction_pm25.sh
@@ -7,7 +7,7 @@
#
#-----------------------------------------------------------------------
#
-. $USHdir/source_util_funcs.sh
+. ${USHsrw}/source_util_funcs.sh
source_config_for_task "cpl_aqm_parm|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
@@ -17,7 +17,7 @@ source_config_for_task "cpl_aqm_parm|task_bias_correction_pm25" ${GLOBAL_VAR_DEF
#
#-----------------------------------------------------------------------
#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+{ save_shell_opts; set -xue; } > /dev/null 2>&1
#
#-----------------------------------------------------------------------
#
@@ -103,13 +103,11 @@ fi
# STEP 1: Retrieve AIRNOW observation data
#-----------------------------------------------------------------------------
-mkdir_vrfy -p "${DATA}/data"
+mkdir -p "${DATA}/data"
-# Retrieve real-time airnow data for the last three days.
-# In the following for-loop, pdym stands for previous (m) day of the present day (PDY)
-# in the NCO standards, i.e. PDYm1: 1day ago, PDYm2: 2days ago, PDYm3: 3days ago
-for i_pdym in {1..3}; do
- case $i_pdym in
+# Retrieve real-time airnow data for the last three days
+ for ipdym in {1..3}; do
+ case $ipdym in
1)
cvt_yyyy="${yyyy_m1}"
cvt_yyyymm="${yyyymm_m1}"
@@ -134,22 +132,21 @@ for i_pdym in {1..3}; do
cvt_input_fp="${cvt_input_dir}/YYYY/YYYYMMDD/${cvt_input_fn}"
cvt_output_fp="${cvt_output_dir}/YYYY/YYYYMMDD/${cvt_output_fn}"
- mkdir_vrfy -p "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}"
- mkdir_vrfy -p "${cvt_output_dir}/${cvt_yyyy}/${cvt_pdy}"
- cp_vrfy ${DCOMINairnow}/${cvt_pdy}/airnow/HourlyAQObs_${cvt_pdy}*.dat "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}"
-
+ mkdir -p "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}"
+ mkdir -p "${cvt_output_dir}/${cvt_yyyy}/${cvt_pdy}"
+ if [ "$(ls -A ${DCOMINairnow}/${cvt_pdy}/airnow)" ]; then
+ cp ${DCOMINairnow}/${cvt_pdy}/airnow/HourlyAQObs_${cvt_pdy}*.dat "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}"
+ else
+ message_warning="WARNING: airnow data missing. skip this date ${cvt_pdy}"
+ print_info_msg "${message_warning}"
+ fi
+
PREP_STEP
eval ${RUN_CMD_SERIAL} ${EXECdir}/convert_airnow_csv ${cvt_input_fp} ${cvt_output_fp} ${cvt_pdy} ${cvt_pdy} ${REDIRECT_OUT_ERR}
export err=$?
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
err_chk
- else
- if [ $err -ne 0 ]; then
- print_err_msg_exit "Call to executable to run CONVERT_AIRNOW_CSV returned with nonzero exit code."
- fi
- fi
POST_STEP
-done
+ done
#-----------------------------------------------------------------------------
# STEP 2: Extracting PM2.5, O3, and met variables from CMAQ input and outputs
@@ -158,7 +155,7 @@ done
FCST_LEN_HRS=$( printf "%03d" ${FCST_LEN_HRS} )
ic=1
while [ $ic -lt 120 ]; do
- if [ -s ${COMIN}/${NET}.${cycle}.chem_sfc.f${FCST_LEN_HRS}.nc ]; then
+ if [ -s ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.f${FCST_LEN_HRS}.nc ]; then
echo "cycle ${cyc} post1 is done!"
break
else
@@ -173,82 +170,70 @@ fi
# remove any pre-exit ${NET}.${cycle}.chem_sfc/met_sfc.nc for 2-stage post processing
DATA_grid="${DATA}/data/bcdata.${yyyymm}/grid"
if [ -d "${DATA_grid}/${cyc}z/${PDY}" ]; then
- rm_vrfy -rf "${DATA_grid}/${cyc}z/${PDY}"
+ rm -rf "${DATA_grid}/${cyc}z/${PDY}"
fi
-mkdir_vrfy -p "${DATA_grid}/${cyc}z/${PDY}"
-ln_vrfy -sf ${COMIN}/${NET}.${cycle}.chem_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY}
-ln_vrfy -sf ${COMIN}/${NET}.${cycle}.met_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY}
+mkdir -p "${DATA_grid}/${cyc}z/${PDY}"
+ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY}
+ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.met_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY}
#-----------------------------------------------------------------------
# STEP 3: Intepolating CMAQ PM2.5 into AIRNow sites
#-----------------------------------------------------------------------
-mkdir_vrfy -p ${DATA}/data/coords
-mkdir_vrfy -p ${DATA}/data/site-lists.interp
-mkdir_vrfy -p ${DATA}/out/pm25/${yyyy}
-mkdir_vrfy -p ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy}
+mkdir -p ${DATA}/data/coords
+mkdir -p ${DATA}/data/site-lists.interp
+mkdir -p ${DATA}/out/pm25/${yyyy}
+mkdir -p ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy}
-cp_vrfy ${PARMaqm_utils}/bias_correction/sites.valid.pm25.20230331.12z.list ${DATA}/data/site-lists.interp
-cp_vrfy ${PARMaqm_utils}/bias_correction/aqm.t12z.chem_sfc.f000.nc ${DATA}/data/coords
-cp_vrfy ${PARMaqm_utils}/bias_correction/config.interp.pm2.5.5-vars_${id_domain}.${cyc}z ${DATA}
+cp ${PARMdir}/aqm_utils/bias_correction/sites.valid.pm25.20230331.12z.list ${DATA}/data/site-lists.interp
+cp ${PARMdir}/aqm_utils/bias_correction/aqm.t12z.chem_sfc.f000.nc ${DATA}/data/coords
+cp ${PARMdir}/aqm_utils/bias_correction/config.interp.pm2.5.5-vars_${id_domain}.${cyc}z ${DATA}
PREP_STEP
eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_interpolate config.interp.pm2.5.5-vars_${id_domain}.${cyc}z ${cyc}z ${PDY} ${PDY} ${REDIRECT_OUT_ERR}
export err=$?
-if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
err_chk
-else
- if [ $err -ne 0 ]; then
- print_err_msg_exit "Call to executable to run CONVERT_AIRNOW_CSV returned with nonzero exit code."
- fi
-fi
POST_STEP
-cp_vrfy ${DATA}/out/pm25/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy}
+cp ${DATA}/out/pm25/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy}
if [ "${DO_AQM_SAVE_AIRNOW_HIST}" = "TRUE" ]; then
- mkdir_vrfy -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy}
- cp_vrfy ${DATA}/out/pm25/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy}
+mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy}
+cp ${DATA}/out/pm25/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy}
fi
#-----------------------------------------------------------------------
# STEP 4: Performing Bias Correction for PM2.5
#-----------------------------------------------------------------------
-rm_vrfy -rf ${DATA}/data/bcdata*
+rm -rf ${DATA}/data/bcdata*
-ln_vrfy -sf ${COMINbicor}/bcdata* "${DATA}/data"
+ln -sf ${COMINbicor}/bcdata* "${DATA}/data"
-mkdir_vrfy -p ${DATA}/data/sites
+mkdir -p ${DATA}/data/sites
-cp_vrfy ${PARMaqm_utils}/bias_correction/config.pm2.5.bias_corr_${id_domain}.${cyc}z ${DATA}
-cp_vrfy ${PARMaqm_utils}/bias_correction/site_blocking.pm2.5.2021.0427.2-sites.txt ${DATA}
-cp_vrfy ${PARMaqm_utils}/bias_correction/bias_thresholds.pm2.5.2015.1030.32-sites.txt ${DATA}
+cp ${PARMdir}/aqm_utils/bias_correction/config.pm2.5.bias_corr_${id_domain}.${cyc}z ${DATA}
+cp ${PARMdir}/aqm_utils/bias_correction/site_blocking.pm2.5.2021.0427.2-sites.txt ${DATA}
+cp ${PARMdir}/aqm_utils/bias_correction/bias_thresholds.pm2.5.2015.1030.32-sites.txt ${DATA}
PREP_STEP
eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_correct config.pm2.5.bias_corr_${id_domain}.${cyc}z ${cyc}z ${BC_STDAY} ${PDY} ${REDIRECT_OUT_ERR}
export err=$?
-if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
err_chk
-else
- if [ $err -ne 0 ]; then
- print_err_msg_exit "Call to executable to run AQM_BIAS_CORRECT returned with nonzero exit code."
- fi
-fi
POST_STEP
-cp_vrfy $DATA/out/pm2.5.corrected* ${COMIN}
+cp $DATA/out/pm2.5.corrected* ${COMOUT}
if [ "${cyc}" = "12" ]; then
- cp_vrfy ${DATA}/data/sites/sites.valid.pm25.${PDY}.${cyc}z.list ${DATA}
+ cp ${DATA}/data/sites/sites.valid.pm25.${PDY}.${cyc}z.list ${DATA}
fi
#------------------------------------------------------------------------
# STEP 5: converting netcdf to grib format
#------------------------------------------------------------------------
-ln_vrfy -sf ${COMIN}/pm2.5.corrected.${PDY}.${cyc}z.nc .
+ln -sf ${COMIN}/${cyc}/pm2.5.corrected.${PDY}.${cyc}z.nc .
# convert from netcdf to grib2 format
cat >bias_cor.ini < filesize
export XLFRTEOPTS="unit_vars=yes"
@@ -412,17 +382,17 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then
export FORT12="filesize"
export FORT31=
export FORT51=awpaqm.${cycle}.1hpm25-bc.227.grib2
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_pm25_bc.${cycle}.227
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_pm25_bc.${cycle}.227
####################################################
- rm_vrfy -f filesize
+ rm -f filesize
echo 0 > filesize
export XLFRTEOPTS="unit_vars=yes"
export FORT11=${NET}.${cycle}.max_1hr_pm25_bc.227.grib2
export FORT12="filesize"
export FORT31=
export FORT51=${NET}.${cycle}.max_1hr_pm25_bc.227.grib2.temp
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_max_1hr_pm25_bc.${cycle}.227
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_max_1hr_pm25_bc.${cycle}.227
echo `ls -l ${NET}.${cycle}.max_1hr_pm25_bc.227.grib2.temp | awk '{print $5} '` > filesize
export XLFRTEOPTS="unit_vars=yes"
@@ -430,9 +400,9 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then
export FORT12="filesize"
export FORT31=
export FORT51=awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_max_1hr_pm25_bc.${cycle}.227
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_max_1hr_pm25_bc.${cycle}.227
- rm_vrfy -f filesize
+ rm -f filesize
# daily_24hr_ave_PM2.5
echo 0 > filesize
export XLFRTEOPTS="unit_vars=yes"
@@ -440,7 +410,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then
export FORT12="filesize"
export FORT31=
export FORT51=${NET}.${cycle}.ave_24hr_pm25_bc.227.grib2.temp
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_24hrpm25_bc_awp.${cycle}.227
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_24hrpm25_bc_awp.${cycle}.227
echo `ls -l ${NET}.${cycle}.ave_24hr_pm25_bc.227.grib2.temp | awk '{print $5} '` > filesize
export XLFRTEOPTS="unit_vars=yes"
@@ -448,21 +418,20 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then
export FORT12="filesize"
export FORT31=
export FORT51=awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_24hrpm25_bc_awp.${cycle}.227
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_24hrpm25_bc_awp.${cycle}.227
# Post Files to COMOUTwmo
- cp_vrfy awpaqm.${cycle}.1hpm25-bc.227.grib2 ${COMOUTwmo}
- cp_vrfy awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 ${COMOUTwmo}
- cp_vrfy awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 ${COMOUTwmo}
+ cp awpaqm.${cycle}.1hpm25-bc.227.grib2 ${COMOUTwmo}
+ cp awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 ${COMOUTwmo}
+ cp awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 ${COMOUTwmo}
# Distribute Data
if [ "${SENDDBN_NTC}" = "TRUE" ] ; then
- ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.1hpm25-bc.227.grib2
- ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2
- ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2
+ ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1hpm25-bc.227.grib2
+ ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2
+ ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2
fi
fi
-
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exsrw_fire_emission.sh b/scripts/exsrw_fire_emission.sh
new file mode 100755
index 0000000000..68178016e7
--- /dev/null
+++ b/scripts/exsrw_fire_emission.sh
@@ -0,0 +1,167 @@
+#!/usr/bin/env bash
+
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+. ${USHsrw}/source_util_funcs.sh
+source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP}
+#
+#-----------------------------------------------------------------------
+#
+# Save current shell options (in a global array). Then set new options
+# for this script/function.
+#
+#-----------------------------------------------------------------------
+#
+{ save_shell_opts; set -xue; } > /dev/null 2>&1
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the ex-script for the task that fetches fire emission
+data files from disk or generates model-ready RAVE emission file from raw
+data files.
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Set up variables for call to retrieve_data.py
+#
+#-----------------------------------------------------------------------
+#
+YYYYMMDD=${FIRE_FILE_CDATE:0:8}
+HH=${FIRE_FILE_CDATE:8:2}
+
+CDATE_mh1=`$NDATE -1 ${YYYYMMDD}${HH}`
+yyyymmdd_mh1=${CDATE_mh1:0:8}
+hh_mh1=${CDATE_mh1:8:2}
+#
+#-----------------------------------------------------------------------
+#
+# Retrieve fire file to FIRE_EMISSION_STAGING_DIR
+#
+#-----------------------------------------------------------------------
+#
+aqm_fire_file_fn="${AQM_FIRE_FILE_PREFIX}_${YYYYMMDD}_t${HH}z${AQM_FIRE_FILE_SUFFIX}"
+
+# Check if the fire file exists in the designated directory
+if [ -e "${COMINfire}/${aqm_fire_file_fn}" ]; then
+ cpreq "${COMINfire}/${aqm_fire_file_fn}" ${COMOUT}
+else
+ # Copy raw data
+ for ihr in {0..23}; do
+ download_time=`$NDATE -$ihr ${yyyymmdd_mh1}${hh_mh1}`
+ FILE_curr="Hourly_Emissions_13km_${download_time}00_${download_time}00.nc"
+ FILE_13km="RAVE-HrlyEmiss-13km_v*_blend_s${download_time}00000_e${download_time}59590_c*.nc"
+ yyyymmdd_dn="${download_time:0:8}"
+ hh_dn="${download_time:8:2}"
+ missing_download_time=`$NDATE -24 ${yyyymmdd_dn}${hh_dn}`
+ yyyymmdd_dn_md1="${missing_download_time:0:8}"
+ FILE_13km_md1="RAVE-HrlyEmiss-13km_v*_blend_s${missing_download_time}00000_e${missing_download_time}59590_c*.nc"
+ if [ -s `ls ${COMINfire}/${yyyymmdd_dn}/rave/${FILE_13km}` ] && [ $(stat -c %s `ls ${COMINfire}/${yyyymmdd_dn}/rave/${FILE_13km}`) -gt 4000000 ]; then
+ cpreq -p ${COMINfire}/${yyyymmdd_dn}/rave/${FILE_13km} ${FILE_curr}
+ elif [ -s `ls ${COMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1}` ] && [ $(stat -c %s `ls ${COMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1}`) -gt 4000000 ]; then
+ echo "WARNING: ${FILE_13km} does not exist or broken. Replacing with the file of previous date ..."
+ cpreq -p ${COMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1} ${FILE_curr}
+ else
+ message_txt="WARNING Fire Emission RAW data does not exist or broken:
+ FILE_13km_md1 = \"${FILE_13km_md1}\"
+ DCOMINfire = \"${DCOMINfire}\""
+
+ cpreq -p ${FIXaqm}/fire/Hourly_Emissions_13km_dummy.nc ${FILE_curr}
+ print_info_msg "WARNING: ${message_txt}. Replacing with the dummy file :: AQM RUN SOFT FAILED."
+ fi
+ done
+
+ ncks -O -h --mk_rec_dmn time Hourly_Emissions_13km_${download_time}00_${download_time}00.nc temp.nc
+ export err=$?
+ if [ $err -ne 0 ]; then
+ message_txt="Call to NCKS returned with nonzero exit code."
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
+ fi
+
+ mv temp.nc Hourly_Emissions_13km_${download_time}00_${download_time}00.nc
+
+ ncrcat -h Hourly_Emissions_13km_*.nc Hourly_Emissions_13km_${YYYYMMDD}0000_${YYYYMMDD}2300.t${HH}z.nc
+ export err=$?
+ if [ $err -ne 0 ]; then
+ message_txt="Call to NCRCAT returned with nonzero exit code."
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
+ fi
+
+ input_fire="${DATA}/Hourly_Emissions_13km_${YYYYMMDD}0000_${YYYYMMDD}2300.t${HH}z.nc"
+ output_fire="${DATA}/Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_new24.t${HH}z.nc"
+
+ ${USHsrw}/aqm_utils_python/RAVE_remake.allspecies.aqmna13km.g793.py --date "${YYYYMMDD}" --cyc "${HH}" --input_fire "${input_fire}" --output_fire "${output_fire}"
+ export err=$?
+ if [ $err -ne 0 ]; then
+ message_txt="Call to python script \"RAVE_remake.allspecies.py\" returned with nonzero exit code."
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
+ fi
+
+ ncks --mk_rec_dmn Time Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_new24.t${HH}z.nc -o Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24.nc
+ export err=$?
+ if [ $err -ne 0 ]; then
+ message_txt="Call to NCKS returned with nonzero exit code."
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
+ fi
+
+ cpreq Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24.nc Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24_1.nc
+ cpreq Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24.nc Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24_2.nc
+
+ ncrcat -O -D 2 Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24.nc Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24_1.nc Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24_2.nc ${aqm_fire_file_fn}
+ export err=$?
+ if [ $err -ne 0 ]; then
+ message_txt="Call to NCRCAT returned with nonzero exit code."
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
+ fi
+
+ mv ${aqm_fire_file_fn} temp.nc
+ ncrename -v PM2.5,PM25 temp.nc temp1.nc
+ ncap2 -s 'where(Latitude > 30 && Latitude <=49 && land_cover == 1 ) PM25 = PM25 * 0.44444' temp1.nc temp2.nc
+ ncap2 -s 'where(Latitude <=30 && land_cover == 1 ) PM25 = PM25 * 0.8' temp2.nc temp3.nc
+ ncap2 -s 'where(Latitude <=49 && land_cover == 3 ) PM25 = PM25 * 1.11111' temp3.nc temp4.nc
+ ncap2 -s 'where(Latitude <=49 && land_cover == 4 ) PM25 = PM25 * 1.11111' temp4.nc temp5.nc
+ ncrename -v PM25,PM2.5 temp5.nc temp6.nc
+ mv temp6.nc ${aqm_fire_file_fn}
+
+ # Copy the final fire emission file to data share directory
+ cpreq "${DATA}/${aqm_fire_file_fn}" ${COMOUT}
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Restore the shell options saved at the beginning of this script/function.
+#
+#-----------------------------------------------------------------------
+#
+{ restore_shell_opts; } > /dev/null 2>&1
diff --git a/scripts/exregional_nexus_emission.sh b/scripts/exsrw_nexus_emission.sh
similarity index 63%
rename from scripts/exregional_nexus_emission.sh
rename to scripts/exsrw_nexus_emission.sh
index d1153d95b7..7edd18ce42 100755
--- a/scripts/exregional_nexus_emission.sh
+++ b/scripts/exsrw_nexus_emission.sh
@@ -7,7 +7,7 @@
#
#-----------------------------------------------------------------------
#
-. $USHdir/source_util_funcs.sh
+. ${USHsrw}/source_util_funcs.sh
source_config_for_task "cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
@@ -17,7 +17,7 @@ source_config_for_task "cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+{ save_shell_opts; set -xue; } > /dev/null 2>&1
#
#-----------------------------------------------------------------------
#
@@ -42,7 +42,7 @@ print_info_msg "
Entering script: \"${scrfunc_fn}\"
In directory: \"${scrfunc_dir}\"
-This is the ex-script for the task that runs NEXUS.
+This is the ex-script for the task that runs NEXUS EMISSION.
========================================================================"
#
#-----------------------------------------------------------------------
@@ -75,13 +75,12 @@ fi
#
#-----------------------------------------------------------------------
#
-# Move to the NEXUS working directory
+# Create NEXUS input directory in working directory
#
#-----------------------------------------------------------------------
#
DATAinput="${DATA}/input"
-mkdir_vrfy -p "$DATAinput"
-
+mkdir -p "$DATAinput"
#
#-----------------------------------------------------------------------
#
@@ -90,19 +89,13 @@ mkdir_vrfy -p "$DATAinput"
#-----------------------------------------------------------------------
#
USE_GFS_SFC="FALSE"
-if [ "${RUN_ENVIR}" = "nco" ]; then
- GFS_SFC_INPUT="${DATAROOT}/nexus_gfs_sfc.${share_pid}"
-else
- GFS_SFC_INPUT="${COMIN}/GFS_SFC"
-fi
-
+GFS_SFC_INPUT="${DATA_SHARE}"
if [ -d "${GFS_SFC_INPUT}" ]; then
- if [ "$(ls -A ${GFS_SFC_INPUT})" ]; then
+ if [ "$(ls -A ${GFS_SFC_INPUT}/gfs*.nc)" ]; then
ln -sf "${GFS_SFC_INPUT}" "GFS_SFC"
USE_GFS_SFC="TRUE"
fi
fi
-
#
#-----------------------------------------------------------------------
#
@@ -110,14 +103,12 @@ fi
#
#-----------------------------------------------------------------------
#
-cp_vrfy ${EXECdir}/nexus ${DATA}
-cp_vrfy ${NEXUS_FIX_DIR}/${NEXUS_GRID_FN} ${DATA}/grid_spec.nc
-
+cpreq ${FIXaqm}/nexus/${NEXUS_GRID_FN} ${DATA}/grid_spec.nc
if [ "${USE_GFS_SFC}" = "TRUE" ]; then
- cp_vrfy ${ARL_NEXUS_DIR}/config/cmaq_gfs_megan/*.rc ${DATA}
+ cpreq ${PARMsrw}/nexus_config/cmaq_gfs_megan/*.rc ${DATA}
else
- cp_vrfy ${ARL_NEXUS_DIR}/config/cmaq/*.rc ${DATA}
+ cpreq ${PARMsrw}/nexus_config/cmaq/*.rc ${DATA}
fi
#
#-----------------------------------------------------------------------
@@ -127,10 +118,10 @@ fi
#
#-----------------------------------------------------------------------
#
-mm="${PDY:4:2}"
-dd="${PDY:6:2}"
-hh="${cyc}"
-yyyymmdd="${PDY}"
+MM="${PDY:4:2}"
+DD="${PDY:6:2}"
+HH="${cyc}"
+YYYYMMDD="${PDY}"
NUM_SPLIT_NEXUS=$( printf "%02d" ${NUM_SPLIT_NEXUS} )
@@ -141,28 +132,33 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then
fi
if [ "${NUM_SPLIT_NEXUS}" = "01" ]; then
- start_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC" "+%Y%m%d%H" )
- end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${FCST_LEN_HRS} hours" "+%Y%m%d%H" )
+ start_date="${YYYYMMDD}${HH}"
+ end_date=`$NDATE +${FCST_LEN_HRS} ${YYYYMMDD}${HH}`
else
len_per_split=$(( FCST_LEN_HRS / NUM_SPLIT_NEXUS ))
nsptp=$(( nspt+1 ))
# Compute start and end dates for nexus split option
start_del_hr=$(( len_per_split * nspt ))
- start_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${start_del_hr} hours " "+%Y%m%d%H" )
+ start_date=`$NDATE +${start_del_hr} ${YYYYMMDD}${HH}`
if [ "${nsptp}" = "${NUM_SPLIT_NEXUS}" ];then
- end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + $(expr $FCST_LEN_HRS + 1) hours" "+%Y%m%d%H" )
+ end_date=`$NDATE +$(expr $FCST_LEN_HRS + 1) ${YYYYMMDD}${HH}`
else
end_del_hr=$(( len_per_split * nsptp ))
- end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + $(expr $end_del_hr + 1) hours" "+%Y%m%d%H" )
+ end_del_hr1=$(( $end_del_hr + 1 ))
+ end_date=`$NDATE +${end_del_hr1} ${YYYYMMDD}${HH}`
fi
fi
#
-#######################################################################
+#----------------------------------------------------------------------
+#
# This will be the section to set the datasets used in $workdir/NEXUS_Config.rc
# All Datasets in that file need to be placed here as it will link the files
# necessary to that folder. In the future this will be done by a get_nexus_input
# script
+#
+#----------------------------------------------------------------------
+#
NEI2016="TRUE"
TIMEZONES="TRUE"
CEDS="TRUE"
@@ -173,148 +169,138 @@ NOAAGMD="TRUE"
SOA="TRUE"
EDGAR="TRUE"
MEGAN="TRUE"
-MODIS_XLAI="TRUE"
+MODIS_XLAI="FALSE"
OLSON_MAP="TRUE"
Yuan_XLAI="TRUE"
GEOS="TRUE"
AnnualScalar="TRUE"
-
-NEXUS_INPUT_BASE_DIR=${NEXUS_INPUT_DIR}
-########################################################################
-
+OFFLINE_SOILNOX="TRUE"
#
#----------------------------------------------------------------------
#
# modify time configuration file
#
-python3 ${ARL_NEXUS_DIR}/utils/python/nexus_time_parser.py -f ${DATA}/HEMCO_sa_Time.rc -s $start_date -e $end_date
+#----------------------------------------------------------------------
+#
+${USHsrw}/nexus_utils/python/nexus_time_parser.py -f ${DATA}/HEMCO_sa_Time.rc -s $start_date -e $end_date
export err=$?
if [ $err -ne 0 ]; then
message_txt="Call to python script \"nexus_time_parser.py\" failed."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
fi
-
#
#---------------------------------------------------------------------
#
# set the root directory to the temporary directory
#
-python3 ${ARL_NEXUS_DIR}/utils/python/nexus_root_parser.py -f ${DATA}/NEXUS_Config.rc -d ${DATAinput}
+#----------------------------------------------------------------------
+#
+${USHsrw}/nexus_utils/python/nexus_root_parser.py -f ${DATA}/NEXUS_Config.rc -d ${DATAinput}
export err=$?
if [ $err -ne 0 ]; then
message_txt="Call to python script \"nexus_root_parser.py\" failed."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
fi
-
#
#----------------------------------------------------------------------
+#
# Get all the files needed (TEMPORARILY JUST COPY FROM THE DIRECTORY)
#
+#----------------------------------------------------------------------
+#
if [ "${NEI2016}" = "TRUE" ]; then #NEI2016
- mkdir_vrfy -p ${DATAinput}/NEI2016v1
- mkdir_vrfy -p ${DATAinput}/NEI2016v1/v2022-07
- mkdir_vrfy -p ${DATAinput}/NEI2016v1/v2022-07/${mm}
- python3 ${ARL_NEXUS_DIR}/utils/python/nexus_nei2016_linker.py --src_dir ${NEXUS_INPUT_BASE_DIR} --date ${yyyymmdd} --work_dir ${DATAinput} -v "v2022-07"
+ mkdir -p ${DATAinput}/NEI2016v1
+ mkdir -p ${DATAinput}/NEI2016v1/v2022-07
+ mkdir -p ${DATAinput}/NEI2016v1/v2022-07/${MM}
+ ${USHsrw}/nexus_utils/python/nexus_nei2016_linker.py --src_dir ${FIXemis} --date ${YYYYMMDD} --work_dir ${DATAinput} -v "v2022-07"
export err=$?
if [ $err -ne 0 ]; then
message_txt="Call to python script \"nexus_nei2016_linker.py\" failed."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
fi
- python3 ${ARL_NEXUS_DIR}/utils/python/nexus_nei2016_control_tilefix.py -f ${DATA}/NEXUS_Config.rc -t ${DATA}/HEMCO_sa_Time.rc # -d ${yyyymmdd}
+ ${USHsrw}/nexus_utils/python/nexus_nei2016_control_tilefix.py -f ${DATA}/NEXUS_Config.rc -t ${DATA}/HEMCO_sa_Time.rc # -d ${yyyymmdd}
export err=$?
if [ $err -ne 0 ]; then
message_txt="Call to python script \"nexus_nei2016_control_tilefix.py\" failed."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
fi
fi
if [ "${TIMEZONES}" = "TRUE" ]; then # TIME ZONES
- ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/TIMEZONES ${DATAinput}/
+ ln -sf ${FIXemis}/TIMEZONES ${DATAinput}
fi
if [ "${MASKS}" = "TRUE" ]; then # MASKS
- ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/MASKS ${DATAinput}/
+ ln -sf ${FIXemis}/MASKS ${DATAinput}
fi
if [ "${CEDS}" = "TRUE" ]; then #CEDS
- ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/CEDS ${DATAinput}/
+ ln -sf ${FIXemis}/CEDS ${DATAinput}
fi
if [ "${HTAP2010}" = "TRUE" ]; then #CEDS2014
- ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/HTAP ${DATAinput}/
+ ln -sf ${FIXemis}/HTAP ${DATAinput}
fi
if [ "${OMIHTAP}" = "TRUE" ]; then #CEDS2014
- ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/OMI-HTAP_2019 ${DATAinput}/
+ ln -sf ${FIXemis}/OMI-HTAP_2019 ${DATAinput}
fi
if [ "${NOAAGMD}" = "TRUE" ]; then #NOAA_GMD
- ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/NOAA_GMD ${DATAinput}/
+ ln -sf ${FIXemis}/NOAA_GMD ${DATAinput}
fi
if [ "${SOA}" = "TRUE" ]; then #SOA
- ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/SOA ${DATAinput}/
+ ln -sf ${FIXemis}/SOA ${DATAinput}
fi
if [ "${EDGAR}" = "TRUE" ]; then #EDGARv42
- ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/EDGARv42 ${DATAinput}/
+ ln -sf ${FIXemis}/EDGARv42 ${DATAinput}
fi
if [ "${MEGAN}" = "TRUE" ]; then #MEGAN
- ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/MEGAN ${DATAinput}/
+ ln -sf ${FIXemis}/MEGAN ${DATAinput}
fi
if [ "${OLSON_MAP}" = "TRUE" ]; then #OLSON_MAP
- ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/OLSON_MAP ${DATAinput}/
+ ln -sf ${FIXemis}/OLSON_MAP ${DATAinput}
fi
if [ "${Yuan_XLAI}" = "TRUE" ]; then #Yuan_XLAI
- ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/Yuan_XLAI ${DATAinput}/
+ ln -sf ${FIXemis}/Yuan_XLAI ${DATAinput}
fi
if [ "${GEOS}" = "TRUE" ]; then #GEOS
- ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/GEOS_0.5x0.625 ${DATAinput}/
+ ln -sf ${FIXemis}/GEOS_0.5x0.625 ${DATAinput}
fi
if [ "${AnnualScalar}" = "TRUE" ]; then #ANNUAL_SCALAR
- ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/AnnualScalar ${DATAinput}/
+ ln -sf ${FIXemis}/AnnualScalar ${DATAinput}
fi
if [ "${MODIS_XLAI}" = "TRUE" ]; then #MODIS_XLAI
- ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/MODIS_XLAI ${DATAinput}/
+ ln -sf ${FIXemis}/MODIS_XLAI ${DATAinput}
+fi
+
+if [ "${OFFLINE_SOILNOX}" = "TRUE" ]; then #OFFLINE_SOILNOX
+ ln -sf ${FIXemis}/OFFLINE_SOILNOX ${DATAinput}
fi
if [ "${USE_GFS_SFC}" = "TRUE" ]; then # GFS INPUT
- mkdir_vrfy -p ${DATAinput}/GFS_SFC
- python3 ${ARL_NEXUS_DIR}/utils/python/nexus_gfs_bio.py -i ${DATA}/GFS_SFC/gfs.t??z.sfcf???.nc -o ${DATA}/GFS_SFC_MEGAN_INPUT.nc
+ mkdir -p ${DATAinput}/GFS_SFC
+ ${USHsrw}/nexus_utils/python/nexus_gfs_bio.py -i ${DATA}/GFS_SFC/gfs.t??z.sfcf???.nc -o ${DATA}/GFS_SFC_MEGAN_INPUT.nc
export err=$?
if [ $err -ne 0 ]; then
message_txt="Call to python script \"nexus_gfs_bio.py\" failed."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
fi
fi
-
#
#----------------------------------------------------------------------
#
@@ -322,18 +308,14 @@ fi
#
#-----------------------------------------------------------------------
#
-PREP_STEP
-eval ${RUN_CMD_NEXUS} ${EXECdir}/nexus -c NEXUS_Config.rc -r grid_spec.nc -o NEXUS_Expt_split.nc ${REDIRECT_OUT_ERR}
-export err=$?
-if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_chk
-else
- if [ $err -ne 0 ]; then
- print_err_msg_exit "Call to execute nexus standalone for the FV3LAM failed."
- fi
-fi
-POST_STEP
+export pgm="nexus"
+. prep_step
+eval ${RUN_CMD_NEXUS} ${EXECdir}/$pgm -c NEXUS_Config.rc -r grid_spec.nc -o NEXUS_Expt_split.nc >>$pgmout 2>${DATA}/errfile
+export err=$?; err_chk
+if [ $err -ne 0 ]; then
+ print_err_msg_exit "Call to execute nexus failed."
+fi
#
#-----------------------------------------------------------------------
#
@@ -341,15 +323,12 @@ POST_STEP
#
#-----------------------------------------------------------------------
#
-python3 ${ARL_NEXUS_DIR}/utils/python/make_nexus_output_pretty.py --src ${DATA}/NEXUS_Expt_split.nc --grid ${DATA}/grid_spec.nc -o ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.${nspt}.nc -t ${DATA}/HEMCO_sa_Time.rc
+${USHsrw}/nexus_utils/python/make_nexus_output_pretty.py --src ${DATA}/NEXUS_Expt_split.nc --grid ${DATA}/grid_spec.nc -o ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.${nspt}.nc -t ${DATA}/HEMCO_sa_Time.rc
export err=$?
if [ $err -ne 0 ]; then
message_txt="Call to python script \"make_nexus_output_pretty.py\" failed."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "wcoss2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
fi
#
#-----------------------------------------------------------------------
diff --git a/scripts/exregional_nexus_gfs_sfc.sh b/scripts/exsrw_nexus_gfs_sfc.sh
similarity index 68%
rename from scripts/exregional_nexus_gfs_sfc.sh
rename to scripts/exsrw_nexus_gfs_sfc.sh
index c34d2c30ae..103842d46f 100755
--- a/scripts/exregional_nexus_gfs_sfc.sh
+++ b/scripts/exsrw_nexus_gfs_sfc.sh
@@ -7,7 +7,7 @@
#
#-----------------------------------------------------------------------
#
-. $USHdir/source_util_funcs.sh
+. ${USHsrw}/source_util_funcs.sh
source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
@@ -17,7 +17,7 @@ source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+{ save_shell_opts; set -xue; } > /dev/null 2>&1
#
#-----------------------------------------------------------------------
#
@@ -52,10 +52,10 @@ data files from disk or HPSS.
#
#-----------------------------------------------------------------------
#
-yyyymmdd=${GFS_SFC_CDATE:0:8}
-yyyymm=${GFS_SFC_CDATE:0:6}
-yyyy=${GFS_SFC_CDATE:0:4}
-hh=${GFS_SFC_CDATE:8:2}
+YYYYMMDD=${GFS_SFC_CDATE:0:8}
+YYYYMM=${GFS_SFC_CDATE:0:6}
+YYYY=${GFS_SFC_CDATE:0:4}
+HH=${GFS_SFC_CDATE:8:2}
if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then
cyc_mod=$(( ${cyc} - ${DATE_FIRST_CYCL:8:2} ))
@@ -70,8 +70,8 @@ fcst_len_hrs_offset=$(( FCST_LEN_HRS + TIME_OFFSET_HRS ))
#
#-----------------------------------------------------------------------
#
-GFS_SFC_TAR_DIR="${NEXUS_GFS_SFC_ARCHV_DIR}/rh${yyyy}/${yyyymm}/${yyyymmdd}"
-GFS_SFC_TAR_SUB_DIR="gfs.${yyyymmdd}/${hh}/atmos"
+GFS_SFC_TAR_DIR="${NEXUS_GFS_SFC_ARCHV_DIR}/rh${YYYY}/${YYYYMM}/${YYYYMMDD}"
+GFS_SFC_TAR_SUB_DIR="gfs.${YYYYMMDD}/${HH}/atmos"
if [ "${DO_REAL_TIME}" = "TRUE" ]; then
GFS_SFC_LOCAL_DIR="${COMINgfs}/${GFS_SFC_TAR_SUB_DIR}"
@@ -83,40 +83,28 @@ GFS_SFC_DATA_INTVL="3"
# copy files from local directory
if [ -d ${GFS_SFC_LOCAL_DIR} ]; then
- gfs_sfc_fn="gfs.t${hh}z.sfcanl.nc"
+ gfs_sfc_fn="gfs.t${HH}z.sfcanl.nc"
- relative_link_flag="FALSE"
gfs_sfc_fp="${GFS_SFC_LOCAL_DIR}/${gfs_sfc_fn}"
- create_symlink_to_file target="${gfs_sfc_fp}" symlink="${gfs_sfc_fn}" \
- relative="${relative_link_flag}"
+ ln -sf ${gfs_sfc_fp} ${DATA_SHARE}/${gfs_sfc_fn}
for fhr in $(seq -f "%03g" 0 ${GFS_SFC_DATA_INTVL} ${fcst_len_hrs_offset}); do
- gfs_sfc_fn="gfs.t${hh}z.sfcf${fhr}.nc"
+ gfs_sfc_fn="gfs.t${HH}z.sfcf${fhr}.nc"
if [ -e "${GFS_SFC_LOCAL_DIR}/${gfs_sfc_fn}" ]; then
gfs_sfc_fp="${GFS_SFC_LOCAL_DIR}/${gfs_sfc_fn}"
- create_symlink_to_file target="${gfs_sfc_fp}" symlink="${gfs_sfc_fn}" \
- relative="${relative_link_flag}"
+ ln -nsf ${gfs_sfc_fp} ${DATA_SHARE}/${gfs_sfc_fn}
else
message_txt="SFC file for nexus emission for \"${cycle}\" does not exist in the directory:
GFS_SFC_LOCAL_DIR = \"${GFS_SFC_LOCAL_DIR}\"
gfs_sfc_fn = \"${gfs_sfc_fn}\""
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- message_warning="WARNING: ${message_txt}"
- print_info_msg "${message_warning}"
- if [ ! -z "${maillist}" ]; then
- echo "${message_warning}" | mail.py $maillist
- fi
- else
- print_err_msg_exit "${message_txt}"
- fi
+ print_err_msg_exit "${message_txt}"
fi
- done
-
+ done
# retrieve files from HPSS
else
- if [ "${yyyymmdd}" -lt "20220627" ]; then
+ if [ "${YYYYMMDD}" -lt "20220627" ]; then
GFS_SFC_TAR_FN_VER="prod"
- elif [ "${yyyymmdd}" -lt "20221129" ]; then
+ elif [ "${YYYYMMDD}" -lt "20221129" ]; then
GFS_SFC_TAR_FN_VER="v16.2"
else
GFS_SFC_TAR_FN_VER="v16.3"
@@ -126,63 +114,51 @@ else
GFS_SFC_TAR_FN_SUFFIX_B="gfs_ncb.tar"
# Check if the sfcanl file exists in the staging directory
- gfs_sfc_tar_fn="${GFS_SFC_TAR_FN_PREFIX}.${yyyymmdd}_${hh}.${GFS_SFC_TAR_FN_SUFFIX_A}"
+ gfs_sfc_tar_fn="${GFS_SFC_TAR_FN_PREFIX}.${YYYYMMDD}_${HH}.${GFS_SFC_TAR_FN_SUFFIX_A}"
gfs_sfc_tar_fp="${GFS_SFC_TAR_DIR}/${gfs_sfc_tar_fn}"
- gfs_sfc_fns=("gfs.t${hh}z.sfcanl.nc")
- gfs_sfc_fps="./${GFS_SFC_TAR_SUB_DIR}/gfs.t${hh}z.sfcanl.nc"
+ gfs_sfc_fns=("gfs.t${HH}z.sfcanl.nc")
+ gfs_sfc_fps="./${GFS_SFC_TAR_SUB_DIR}/gfs.t${HH}z.sfcanl.nc"
if [ "${fcst_len_hrs_offset}" -lt "40" ]; then
ARCHV_LEN_HRS="${fcst_len_hrs_offset}"
else
ARCHV_LEN_HRS="39"
fi
for fhr in $(seq -f "%03g" 0 ${GFS_SFC_DATA_INTVL} ${ARCHV_LEN_HRS}); do
- gfs_sfc_fns+="gfs.t${hh}z.sfcf${fhr}.nc"
- gfs_sfc_fps+=" ./${GFS_SFC_TAR_SUB_DIR}/gfs.t${hh}z.sfcf${fhr}.nc"
+ gfs_sfc_fns+="gfs.t${HH}z.sfcf${fhr}.nc"
+ gfs_sfc_fps+=" ./${GFS_SFC_TAR_SUB_DIR}/gfs.t${HH}z.sfcf${fhr}.nc"
done
# Retrieve data from A file up to fcst_len_hrs_offset=39
htar -tvf ${gfs_sfc_tar_fp}
- PREP_STEP
- htar -xvf ${gfs_sfc_tar_fp} ${gfs_sfc_fps} ${REDIRECT_OUT_ERR}
+ htar -xvf ${gfs_sfc_tar_fp} ${gfs_sfc_fps}
export err=$?
if [ $err -ne 0 ]; then
message_txt="htar file reading operation (\"htar -xvf ...\") failed."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
+ print_err_msg_exit "${message_txt}"
fi
- POST_STEP
# Retireve data from B file when fcst_len_hrs_offset>=40
if [ "${fcst_len_hrs_offset}" -ge "40" ]; then
- gfs_sfc_tar_fn="${GFS_SFC_TAR_FN_PREFIX}.${yyyymmdd}_${hh}.${GFS_SFC_TAR_FN_SUFFIX_B}"
+ gfs_sfc_tar_fn="${GFS_SFC_TAR_FN_PREFIX}.${YYYYMMDD}_${HH}.${GFS_SFC_TAR_FN_SUFFIX_B}"
gfs_sfc_tar_fp="${GFS_SFC_TAR_DIR}/${gfs_sfc_tar_fn}"
gfs_sfc_fns=()
gfs_sfc_fps=""
for fhr in $(seq -f "%03g" 42 ${GFS_SFC_DATA_INTVL} ${fcst_len_hrs_offset}); do
- gfs_sfc_fns+="gfs.t${hh}z.sfcf${fhr}.nc"
- gfs_sfc_fps+=" ./${GFS_SFC_TAR_SUB_DIR}/gfs.t${hh}z.sfcf${fhr}.nc"
+ gfs_sfc_fns+="gfs.t${HH}z.sfcf${fhr}.nc"
+ gfs_sfc_fps+=" ./${GFS_SFC_TAR_SUB_DIR}/gfs.t${HH}z.sfcf${fhr}.nc"
done
htar -tvf ${gfs_sfc_tar_fp}
- PREP_STEP
- htar -xvf ${gfs_sfc_tar_fp} ${gfs_sfc_fps} ${REDIRECT_OUT_ERR}
+ htar -xvf ${gfs_sfc_tar_fp} ${gfs_sfc_fps}
export err=$?
if [ $err -ne 0 ]; then
message_txt="htar file reading operation (\"htar -xvf ...\") failed."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
+ print_err_msg_exit "${message_txt}"
fi
- POST_STEP
fi
# Link retrieved files to staging directory
- ln_vrfy -sf ${GFS_SFC_TAR_SUB_DIR}/gfs.*.nc .
+ ln -sf ${DATA}/${GFS_SFC_TAR_SUB_DIR}/gfs.*.nc ${DATA_SHARE}
+fi
-fi
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_nexus_post_split.sh b/scripts/exsrw_nexus_post_split.sh
similarity index 73%
rename from scripts/exregional_nexus_post_split.sh
rename to scripts/exsrw_nexus_post_split.sh
index 390e0dcce6..3b83dee523 100755
--- a/scripts/exregional_nexus_post_split.sh
+++ b/scripts/exsrw_nexus_post_split.sh
@@ -7,7 +7,7 @@
#
#-----------------------------------------------------------------------
#
-. $USHdir/source_util_funcs.sh
+. ${USHsrw}/source_util_funcs.sh
source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
@@ -17,7 +17,7 @@ source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_F
#
#-----------------------------------------------------------------------
#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+{ save_shell_opts; sex -xue; } > /dev/null 2>&1
#
#-----------------------------------------------------------------------
#
@@ -42,7 +42,7 @@ print_info_msg "
Entering script: \"${scrfunc_fn}\"
In directory: \"${scrfunc_dir}\"
-This is the ex-script for the task that runs NEXUS.
+This is the ex-script for the task that runs NEXUS POST SPLIT.
========================================================================"
#
#-----------------------------------------------------------------------
@@ -53,10 +53,10 @@ This is the ex-script for the task that runs NEXUS.
#
eval ${PRE_TASK_CMDS}
-mm="${PDY:4:2}"
-dd="${PDY:6:2}"
-hh="${cyc}"
-yyyymmdd="${PDY}"
+YYYYMMDD="${PDY}"
+MM="${PDY:4:2}"
+DD="${PDY:6:2}"
+HH="${cyc}"
NUM_SPLIT_NEXUS=$( printf "%02d" ${NUM_SPLIT_NEXUS} )
@@ -65,9 +65,8 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then
CYCLE_IDX=$(( ${cyc_mod} / ${INCR_CYCL_FREQ} ))
FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]}
fi
-start_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC" "+%Y%m%d%H" )
-end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${FCST_LEN_HRS} hours" "+%Y%m%d%H" )
-
+start_date=${YYYYMMDD}${HH}
+end_date=`$NDATE +${FCST_LEN_HRS} ${YYYYMMDD}${HH}`
#
#-----------------------------------------------------------------------
#
@@ -75,25 +74,21 @@ end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${FCST_LEN_HRS} hou
#
#-----------------------------------------------------------------------
#
-cp_vrfy ${ARL_NEXUS_DIR}/config/cmaq/HEMCO_sa_Time.rc ${DATA}/HEMCO_sa_Time.rc
+cpreq ${PARMsrw}/nexus_config/cmaq/HEMCO_sa_Time.rc ${DATA}/HEMCO_sa_Time.rc
+cpreq ${FIXaqm}/nexus/${NEXUS_GRID_FN} ${DATA}/grid_spec.nc
-cp_vrfy ${NEXUS_FIX_DIR}/${NEXUS_GRID_FN} ${DATA}/grid_spec.nc
if [ "${NUM_SPLIT_NEXUS}" = "01" ]; then
nspt="00"
- cp_vrfy ${COMIN}/NEXUS/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.${nspt}.nc ${DATA}/NEXUS_Expt_combined.nc
+ cpreq ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.${nspt}.nc ${DATA}/NEXUS_Expt_combined.nc
else
- python3 ${ARL_NEXUS_DIR}/utils/python/concatenate_nexus_post_split.py "${COMIN}/NEXUS/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.*.nc" "${DATA}/NEXUS_Expt_combined.nc"
+ ${USHsrw}/nexus_utils/python/concatenate_nexus_post_split.py "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.*.nc" "${DATA}/NEXUS_Expt_combined.nc"
export err=$?
if [ $err -ne 0 ]; then
message_txt="Call to python script \"concatenate_nexus_post_split.py\" failed."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
fi
fi
-
#
#-----------------------------------------------------------------------
#
@@ -101,17 +96,13 @@ fi
#
#-----------------------------------------------------------------------
#
-python3 ${ARL_NEXUS_DIR}/utils/combine_ant_bio.py "${DATA}/NEXUS_Expt_combined.nc" ${DATA}/NEXUS_Expt.nc
+${USHsrw}/nexus_utils/combine_ant_bio.py "${DATA}/NEXUS_Expt_combined.nc" ${DATA}/NEXUS_Expt.nc
export err=$?
if [ $err -ne 0 ]; then
message_txt="Call to python script \"NEXUS_Expt_pretty.py\" failed."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
fi
-
#
#-----------------------------------------------------------------------
#
@@ -119,7 +110,7 @@ fi
#
#-----------------------------------------------------------------------
#
-mv_vrfy ${DATA}/NEXUS_Expt.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt.nc
+mv ${DATA}/NEXUS_Expt.nc ${COMOUT}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt.nc
#
# Print message indicating successful completion of script.
#
diff --git a/scripts/exregional_point_source.sh b/scripts/exsrw_point_source.sh
similarity index 83%
rename from scripts/exregional_point_source.sh
rename to scripts/exsrw_point_source.sh
index aeec8f3925..7acbc946f7 100755
--- a/scripts/exregional_point_source.sh
+++ b/scripts/exsrw_point_source.sh
@@ -7,7 +7,7 @@
#
#-----------------------------------------------------------------------
#
-. $USHdir/source_util_funcs.sh
+. ${USHsrw}/source_util_funcs.sh
source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
@@ -17,7 +17,7 @@ source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_V
#
#-----------------------------------------------------------------------
#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+{ save_shell_opts; set -xue; } > /dev/null 2>&1
#
#-----------------------------------------------------------------------
#
@@ -59,16 +59,15 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then
FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]}
fi
nstep=$(( FCST_LEN_HRS+1 ))
-yyyymmddhh="${PDY}${cyc}"
-
+YYYYMMDDHH="${PDY}${cyc}"
#
#-----------------------------------------------------------------------
#
-# Set the directories for CONUS/HI/AK
+# Path to the point source data files
#
#-----------------------------------------------------------------------
#
-PT_SRC_PRECOMB="${DCOMINpt_src}"
+PT_SRC_PRECOMB="${FIXemis}/${PT_SRC_SUBDIR}"
#
#-----------------------------------------------------------------------
#
@@ -76,22 +75,17 @@ PT_SRC_PRECOMB="${DCOMINpt_src}"
#
#-----------------------------------------------------------------------
#
-if [ ! -s "${DATA}/pt-${yyyymmddhh}.nc" ]; then
- python3 ${HOMEdir}/sorc/AQM-utils/python_utils/stack-pt-merge.py -s ${yyyymmddhh} -n ${nstep} -i ${PT_SRC_PRECOMB}
+if [ ! -s "${DATA}/pt-${YYYYMMDDHH}.nc" ]; then
+ ${USHsrw}/aqm_utils_python/stack-pt-merge.py -s ${YYYYMMDDHH} -n ${nstep} -i ${PT_SRC_PRECOMB}
export err=$?
if [ $err -ne 0 ]; then
message_txt="Call to python script \"stack-pt-merge.py\" failed."
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
- err_exit "${message_txt}"
- else
- print_err_msg_exit "${message_txt}"
- fi
+ err_exit "${message_txt}"
+ print_err_msg_exit "${message_txt}"
fi
fi
-
# Move to COMIN
-mv_vrfy ${DATA}/pt-${yyyymmddhh}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.PT.nc
-
+mv ${DATA}/pt-${YYYYMMDDHH}.nc ${COMOUT}/${NET}.${cycle}${dot_ensmem}.PT.nc
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_post_stat_o3.sh b/scripts/exsrw_post_stat_o3.sh
similarity index 81%
rename from scripts/exregional_post_stat_o3.sh
rename to scripts/exsrw_post_stat_o3.sh
index 94306d7336..6fa1db7f8f 100755
--- a/scripts/exregional_post_stat_o3.sh
+++ b/scripts/exsrw_post_stat_o3.sh
@@ -7,7 +7,7 @@
#
#-----------------------------------------------------------------------
#
-. $USHdir/source_util_funcs.sh
+. ${USHsrw}/source_util_funcs.sh
source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
@@ -17,7 +17,7 @@ source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_V
#
#-----------------------------------------------------------------------
#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+{ save_shell_opts; set -xue; } > /dev/null 2>&1
#
#-----------------------------------------------------------------------
#
@@ -74,7 +74,7 @@ if [ "${PREDEF_GRID_NAME}" = "AQM_NA_13km" ]; then
id_domain=793
fi
-ln_vrfy -sf ${COMIN}/${NET}.${cycle}.chem_sfc.nc .
+ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.nc .
#
cat >aqm_post.ini < filesize
export XLFRTEOPTS="unit_vars=yes"
@@ -145,18 +139,18 @@ for grid in 227 196 198;do
export FORT12="filesize"
export FORT31=
export FORT51=awpaqm.${cycle}.${hr}ho3.${grid}.grib2
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_${hr}hr_o3-awpozcon.${cycle}.${grid}
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_${hr}hr_o3-awpozcon.${cycle}.${grid}
done
for var in 1ho3 8ho3;do
- cp_vrfy ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT}
- cp_vrfy ${DATA}/awpaqm.${cycle}.${var}*grib2 ${COMOUTwmo}
+ cp ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT}
+ cp ${DATA}/awpaqm.${cycle}.${var}*grib2 ${COMOUTwmo}
done
for var in awpozcon;do
- cp_vrfy ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT}
+ cp ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT}
done
else
for var in 1ho3 awpozcon;do
- cp_vrfy ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT}
+ cp ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT}
done
fi
done
@@ -166,7 +160,7 @@ done
#------------------------------------------------------------
if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then
- ln_vrfy -sf ${COMIN}/${NET}.${cycle}.chem_sfc.nc a.nc
+ ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.nc a.nc
export chk=1
export chk1=1
@@ -185,10 +179,10 @@ EOF1
## 06z needs b.nc to find current day output from 04Z to 06Z
if [ "${cyc}" = "06" ]; then
- if [ -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc ]; then
- ln_vrfy -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc b.nc
+ if [ -s ${COMIN}/00/${NET}.t00z.chem_sfc.nc ]; then
+ ln -s ${COMIN}/00/${NET}.t00z.chem_sfc.nc b.nc
elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then
- ln_vrfy -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc
+ ln -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc
chk=0
else
flag_run_bicor_max=no
@@ -197,20 +191,20 @@ EOF1
if [ "${cyc}" = "12" ]; then
## 12z needs b.nc to find current day output from 04Z to 06Z
- if [ -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc ]; then
- ln_vrfy -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc b.nc
+ if [ -s ${COMIN}/00/${NET}.t00z.chem_sfc.nc ]; then
+ ln -s ${COMIN}/00/${NET}.t00z.chem_sfc.nc b.nc
elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then
- ln_vrfy -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc
+ ln -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc
chk=0
else
flag_run_bicor_max=no
fi
## 12z needs c.nc to find current day output from 07Z to 12z
- if [ -s ${COMIN}/../06/${NET}.t06z.chem_sfc.nc ]; then
- ln_vrfy -s ${COMIN}/../06/${NET}.t06z.chem_sfc.nc c.nc
+ if [ -s ${COMIN}/06/${NET}.t06z.chem_sfc.nc ]; then
+ ln -s ${COMIN}/06/${NET}.t06z.chem_sfc.nc c.nc
elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then
- ln_vrfy -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc c.nc
+ ln -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc c.nc
chk1=0
else
flag_run_bicor_max=no
@@ -220,13 +214,7 @@ EOF1
PREP_STEP
eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_maxi_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR}
export err=$?
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
err_chk
- else
- if [ $err -ne 0 ]; then
- print_err_msg_exit "Call to executable to run AQM_POST_MAXI_GRIB2 returned with nonzero exit code."
- fi
- fi
POST_STEP
# split into max_1h and max_8h files and copy to grib227
@@ -234,7 +222,6 @@ EOF1
wgrib2 aqm-maxi.${id_domain}.grib2 |grep "OZMAX8" | wgrib2 -i aqm-maxi.${id_domain}.grib2 -grib ${NET}.${cycle}.max_8hr_o3.${id_domain}.grib2
grid227="lambert:265.0000:25.0000:25.0000 226.5410:1473:5079.000 12.1900:1025:5079.000"
- #export grid148="lambert:263.0000:33.0000:45.0000 239.3720:442:12000.000 21.8210:265:12000.000"
grid196="mercator:20.0000 198.4750:321:2500.000:206.1310 18.0730:255:2500.000:23.0880"
grid198="nps:210.0000:60.0000 181.4290:825:5953.000 40.5300:553:5953.000"
@@ -243,7 +230,7 @@ EOF1
wgrib2 ${NET}.${cycle}.max_8hr_o3.${id_domain}.grib2 -set_grib_type c3b -new_grid_winds earth -new_grid ${!gg} ${NET}.${cycle}.max_8hr_o3.${grid}.grib2
wgrib2 ${NET}.${cycle}.max_1hr_o3.${id_domain}.grib2 -set_grib_type c3b -new_grid_winds earth -new_grid ${!gg} ${NET}.${cycle}.max_1hr_o3.${grid}.grib2
- cp_vrfy ${DATA}/${NET}.${cycle}.max_*hr_o3.*.grib2 ${COMOUT}
+ cp ${DATA}/${NET}.${cycle}.max_*hr_o3.*.grib2 ${COMOUT}
if [ "$SENDDBN" = "TRUE" ]; then
${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUT}/${NET}.${cycle}.max_1hr_o3.${grid}.grib2
${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUT}/${NET}.${cycle}.max_8hr_o3.${grid}.grib2
@@ -257,24 +244,23 @@ EOF1
export FORT12="filesize"
export FORT31=
export FORT51=aqm-${hr}hro3-maxi.${grid}.grib2.temp
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.${grid}
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.${grid}
echo `ls -l aqm-${hr}hro3-maxi.${grid}.grib2.temp | awk '{print $5} '` > filesize
export XLFRTEOPTS="unit_vars=yes"
export FORT11=aqm-${hr}hro3-maxi.${grid}.grib2.temp
export FORT12="filesize"
export FORT31=
export FORT51=awpaqm.${cycle}.${hr}ho3-max.${grid}.grib2
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.${grid}
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.${grid}
done
- cp_vrfy awpaqm.${cycle}.*o3-max.${grid}.grib2 ${COMOUTwmo}
+ cp awpaqm.${cycle}.*o3-max.${grid}.grib2 ${COMOUTwmo}
if [ "${SENDDBN_NTC}" = "TRUE" ]; then
${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1ho3-max.${grid}.grib2
${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.8ho3-max.${grid}.grib2
fi
done
fi
-
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_post_stat_pm25.sh b/scripts/exsrw_post_stat_pm25.sh
similarity index 79%
rename from scripts/exregional_post_stat_pm25.sh
rename to scripts/exsrw_post_stat_pm25.sh
index dc054b87a3..ea7c1717c3 100755
--- a/scripts/exregional_post_stat_pm25.sh
+++ b/scripts/exsrw_post_stat_pm25.sh
@@ -7,7 +7,7 @@
#
#-----------------------------------------------------------------------
#
-. $USHdir/source_util_funcs.sh
+. ${USHsrw}/source_util_funcs.sh
source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
@@ -17,7 +17,7 @@ source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL
#
#-----------------------------------------------------------------------
#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+{ save_shell_opts; set -xue; } > /dev/null 2>&1
#
#-----------------------------------------------------------------------
#
@@ -62,7 +62,6 @@ else
print_info_msg "$VERBOSE" "
All executables will be submitted with command \'${RUN_CMD_SERIAL}\'."
fi
-
#
#-----------------------------------------------------------------------
#
@@ -79,7 +78,7 @@ fi
# aqm_pm25_post
#---------------------------------------------------------------
-ln_vrfy -sf ${COMIN}/${NET}.${cycle}.chem_sfc.nc .
+ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.nc .
cat >aqm_post.ini <> ${NET}.${cycle}.1hpm25.${id_domain}.grib2
@@ -115,7 +108,7 @@ for grid in 227 196 198; do
wgrib2 ${NET}.${cycle}.1hpm25.${id_domain}.grib2 -set_grib_type c3b -new_grid_winds earth -new_grid ${!gg} ${NET}.${cycle}.1hpm25.${grid}.grib2
done
-cp_vrfy ${DATA}/${NET}.${cycle}*pm25*.grib2 ${COMOUT}
+cp ${DATA}/${NET}.${cycle}*pm25*.grib2 ${COMOUT}
# Create AWIPS GRIB2 data for Bias-Corrected PM2.5
if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then
@@ -126,7 +119,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then
export FORT12="filesize"
export FORT31=
export FORT51=${NET}.${cycle}.1hpm25.${grid}.grib2.temp
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_1hpm25.${cycle}.${grid}
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_1hpm25.${cycle}.${grid}
echo `ls -l ${NET}.${cycle}.grib2_pm25.${grid}.temp | awk '{print $5} '` > filesize
export XLFRTEOPTS="unit_vars=yes"
@@ -134,16 +127,16 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then
export FORT12="filesize"
export FORT31=
export FORT51=awpaqm.${cycle}.1hpm25.${grid}.grib2
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_1hpm25.${cycle}.${grid}
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_1hpm25.${cycle}.${grid}
# Post Files to COMOUTwmo
- cp_vrfy awpaqm.${cycle}.1hpm25.${grid}.grib2 ${COMOUTwmo}
+ cp awpaqm.${cycle}.1hpm25.${grid}.grib2 ${COMOUTwmo}
# Distribute Data
- if [ "${SENDDBN_NTC}" = "TRUE" ] ; then
- ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1hpm25.${grid}.grib2
- ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2
- fi
+# if [ "${SENDDBN_NTC}" = "TRUE" ] ; then
+# ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1hpm25.${grid}.grib2
+# ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2
+# fi
done
fi
@@ -152,7 +145,7 @@ fi
#---------------------------------------------------------------
if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then
- ln_vrfy -sf ${COMIN}/${NET}.${cycle}.chem_sfc.nc a.nc
+ ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.nc a.nc
export chk=1
export chk1=1
@@ -170,10 +163,10 @@ EOF1
flag_run_bicor_max=yes
# 06z needs b.nc to find current day output from 04Z to 06Z
if [ "${cyc}" = "06" ]; then
- if [ -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc ]; then
- ln_vrfy -sf ${COMIN}/../00/${NET}.t00z.chem_sfc.nc b.nc
+ if [ -s ${COMIN}/00/${NET}.t00z.chem_sfc.nc ]; then
+ ln -sf ${COMIN}/00/${NET}.t00z.chem_sfc.nc b.nc
elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then
- ln_vrfy -sf ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc
+ ln -sf ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc
chk=0
else
flag_run_bicor_max=no
@@ -182,20 +175,20 @@ EOF1
if [ "${cyc}" = "12" ]; then
# 12z needs b.nc to find current day output from 04Z to 06Z
- if [ -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc ]; then
- ln_vrfy -sf ${COMIN}/../00/${NET}.t00z.chem_sfc.nc b.nc
+ if [ -s ${COMIN}/00/${NET}.t00z.chem_sfc.nc ]; then
+ ln -sf ${COMIN}/00/${NET}.t00z.chem_sfc.nc b.nc
elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then
- ln_vrfy -sf ${COMINm1}/12/${NET}.${PDYm1}.t12z.chem_sfc.nc b.nc
+ ln -sf ${COMINm1}/12/${NET}.${PDYm1}.t12z.chem_sfc.nc b.nc
chk=0
else
flag_run_bicor_max=no
fi
# 12z needs c.nc to find current day output from 07Z to 12z
- if [ -s ${COMIN}/../06/${NET}.t06z.chem_sfc.nc ]; then
- ln_vrfy -sf ${COMIN}/../06/${NET}.t06z.chem_sfc.nc c.nc
+ if [ -s ${COMIN}/06/${NET}.t06z.chem_sfc.nc ]; then
+ ln -sf ${COMIN}/06/${NET}.t06z.chem_sfc.nc c.nc
elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then
- ln_vrfy -sf ${COMINm1}/12/${NET}.t12z.chem_sfc.nc c.nc
+ ln -sf ${COMINm1}/12/${NET}.t12z.chem_sfc.nc c.nc
chk1=0
else
flag_run_bicor_max=no
@@ -205,13 +198,7 @@ EOF1
PREP_STEP
eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_maxi_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR}
export err=$?
- if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then
err_chk
- else
- if [ $err -ne 0 ]; then
- print_err_msg_exit "Call to executable to run AQM_POST_MAXI_GRIB2 returned with nonzero exit code."
- fi
- fi
POST_STEP
wgrib2 ${NET}_pm25_24h_ave.${id_domain}.grib2 |grep "PMTF" | wgrib2 -i ${NET}_pm25_24h_ave.${id_domain}.grib2 -grib ${NET}.${cycle}.ave_24hr_pm25.${id_domain}.grib2
@@ -228,14 +215,14 @@ EOF1
wgrib2 ${NET}.${cycle}.max_1hr_pm25.${id_domain}.grib2 -set_grib_type c3b -new_grid_winds earth -new_grid ${!gg} ${NET}.${cycle}.1hpm25-max.${grid}.grib2
# Add WMO header for daily 1h PM2.5 and 24hr_ave PM2.5
- rm_vrfy -f filesize
+ rm -f filesize
echo 0 > filesize
export XLFRTEOPTS="unit_vars=yes"
export FORT11=${NET}.${cycle}.1hpm25-max.${grid}.grib2
export FORT12="filesize"
export FORT31=
export FORT51=${NET}.${cycle}.max_1hr_pm25.${grid}.grib2.temp
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_max_1hr_pm25.${cycle}.${grid}
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_max_1hr_pm25.${cycle}.${grid}
echo `ls -l ${NET}.${cycle}.max_1hr_pm25.${grid}.grib2.temp | awk '{print $5} '` > filesize
export XLFRTEOPTS="unit_vars=yes"
@@ -243,16 +230,16 @@ EOF1
export FORT12="filesize"
export FORT31=
export FORT51=awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_max_1hr_pm25.${cycle}.${grid}
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_max_1hr_pm25.${cycle}.${grid}
- rm_vrfy -f filesize
+ rm -f filesize
echo 0 > filesize
export XLFRTEOPTS="unit_vars=yes"
export FORT11=${NET}.${cycle}.24hrpm25-ave.${grid}.grib2
export FORT12="filesize"
export FORT31=
export FORT51=${NET}.${cycle}.24hrpm25-ave.${grid}.grib2.temp
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_24hrpm25_awp.${cycle}.${grid}
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_24hrpm25_awp.${cycle}.${grid}
echo `ls -l ${NET}.${cycle}.24hrpm25-ave.${grid}.grib2.temp | awk '{print $5} '` > filesize
export XLFRTEOPTS="unit_vars=yes"
@@ -260,20 +247,28 @@ EOF1
export FORT12="filesize"
export FORT31=
export FORT51=awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2
- tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_24hrpm25_awp.${cycle}.${grid}
+ tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_24hrpm25_awp.${cycle}.${grid}
- cp_vrfy ${DATA}/${NET}.${cycle}.ave_24hr_pm25*.grib2 ${COMOUT}
- cp_vrfy ${DATA}/${NET}.${cycle}.max_1hr_pm25*.grib2 ${COMOUT}
- cp_vrfy awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 ${COMOUTwmo}
- cp_vrfy awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2 ${COMOUTwmo}
+ cp ${DATA}/${NET}.${cycle}.ave_24hr_pm25*.grib2 ${COMOUT}
+ cp ${DATA}/${NET}.${cycle}.max_1hr_pm25*.grib2 ${COMOUT}
+ cp awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 ${COMOUTwmo}
+ cp awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2 ${COMOUTwmo}
+
+ ##############################
+ # Distribute Data
+ ##############################
+
+ if [ "${SENDDBN_NTC}" = "TRUE" ] ; then
+ ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1hpm25.${grid}.grib2
+ ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2
+ fi
if [ "$SENDDBN" = "TRUE" ]; then
- ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUTwmo}/${NET}.${cycle}.ave_24hr_pm25.${grid}.grib2
- ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUTwmo}/${NET}.${cycle}.max_1hr_pm25.${grid}.grib2
+ ${DBNROOT}/bin/dbn_alert MODEL AQM_PM ${job} ${COMOUTwmo}/awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2
+ ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2
fi
done
fi
-
#
#-----------------------------------------------------------------------
#
diff --git a/scripts/exregional_pre_post_stat.sh b/scripts/exsrw_pre_post_stat.sh
similarity index 81%
rename from scripts/exregional_pre_post_stat.sh
rename to scripts/exsrw_pre_post_stat.sh
index 44f4637684..dfb4c2cf9e 100755
--- a/scripts/exregional_pre_post_stat.sh
+++ b/scripts/exsrw_pre_post_stat.sh
@@ -7,7 +7,7 @@
#
#-----------------------------------------------------------------------
#
-. $USHdir/source_util_funcs.sh
+. ${USHsrw}/source_util_funcs.sh
source_config_for_task "task_pre_post|task_run_post" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
@@ -17,7 +17,7 @@ source_config_for_task "task_pre_post|task_run_post" ${GLOBAL_VAR_DEFNS_FP}
#
#-----------------------------------------------------------------------
#
-{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1
+{ save_shell_opts; set -xue; } > /dev/null 2>&1
#
#-----------------------------------------------------------------------
#
@@ -63,11 +63,11 @@ ist=1
while [ "$ist" -le "${FCST_LEN_HRS}" ]; do
hst=$( printf "%03d" "${ist}" )
- rm_vrfy -f ${DATA}/tmp*nc
- rm_vrfy -f ${DATA}/${NET}.${cycle}.chem_sfc_f${hst}*nc
- rm_vrfy -f ${DATA}/${NET}.${cycle}.met_sfc_f${hst}*nc
+ rm -f ${DATA}/tmp*nc
+ rm -f ${DATA}/${NET}.${cycle}.chem_sfc_f${hst}*nc
+ rm -f ${DATA}/${NET}.${cycle}.met_sfc_f${hst}*nc
- ncks -v lat,lon,o3_ave,no_ave,no2_ave,pm25_ave -d pfull,63,63 ${COMIN}/${NET}.${cycle}.dyn.f${hst}.nc ${DATA}/tmp2a.nc
+ ncks -v lat,lon,o3_ave,no_ave,no2_ave,pm25_ave -d pfull,63,63 ${DATA_SHARE}/${NET}.${cycle}.dyn.f${hst}.nc ${DATA}/tmp2a.nc
ncks -C -O -x -v pfull ${DATA}/tmp2a.nc ${DATA}/tmp2b.nc
@@ -75,11 +75,11 @@ while [ "$ist" -le "${FCST_LEN_HRS}" ]; do
ncrename -v o3_ave,o3 -v no_ave,no -v no2_ave,no2 -v pm25_ave,PM25_TOT ${DATA}/tmp2c.nc
- mv_vrfy ${DATA}/tmp2c.nc ${DATA}/${NET}.${cycle}.chem_sfc.f${hst}.nc
+ mv ${DATA}/tmp2c.nc ${DATA}/${NET}.${cycle}.chem_sfc.f${hst}.nc
- ncks -v dswrf,hpbl,tmp2m,ugrd10m,vgrd10m,spfh2m ${COMIN}/${NET}.${cycle}.phy.f${hst}.nc ${DATA}/${NET}.${cycle}.met_sfc.f${hst}.nc
+ ncks -v dswrf,hpbl,tmp2m,ugrd10m,vgrd10m,spfh2m ${DATA_SHARE}/${NET}.${cycle}.phy.f${hst}.nc ${DATA}/${NET}.${cycle}.met_sfc.f${hst}.nc
- ncks -v aod ${COMIN}/${NET}.${cycle}.phy.f${hst}.nc ${DATA}/${NET}.${cycle}.aod.f${hst}.nc
+ ncks -v aod ${DATA_SHARE}/${NET}.${cycle}.phy.f${hst}.nc ${DATA}/${NET}.${cycle}.aod.f${hst}.nc
(( ist=ist+1 ))
done
@@ -101,7 +101,6 @@ while [ "${ist}" -le "${FCST_LEN_HRS}" ]; do
done
ncecat ${DATA}/${NET}.${cycle}.chem_sfc.f*.nc ${DATA}/${NET}.${cycle}.chem_sfc.nc
-
#
#-----------------------------------------------------------------------
#
@@ -109,10 +108,10 @@ ncecat ${DATA}/${NET}.${cycle}.chem_sfc.f*.nc ${DATA}/${NET}.${cycle}.chem_sfc.
#
#-----------------------------------------------------------------------
#
-mv_vrfy ${DATA}/${NET}.${cycle}.met_sfc.f*.nc ${COMIN}
-mv_vrfy ${DATA}/${NET}.${cycle}.chem_sfc.f*.nc ${COMIN}
-mv_vrfy ${DATA}/${NET}.${cycle}.chem_sfc.nc ${COMIN}
-mv_vrfy ${DATA}/${NET}.${cycle}.aod.f*.nc ${COMIN}
+mv ${DATA}/${NET}.${cycle}.met_sfc.f*.nc ${COMOUT}
+mv ${DATA}/${NET}.${cycle}.chem_sfc.f*.nc ${COMOUT}
+mv ${DATA}/${NET}.${cycle}.chem_sfc.nc ${COMOUT}
+mv ${DATA}/${NET}.${cycle}.aod.f*.nc ${COMOUT}
#
#-----------------------------------------------------------------------
#
diff --git a/tests/WE2E/machine_suites/comprehensive b/tests/WE2E/machine_suites/comprehensive
index a416408056..3af6ae0db4 100644
--- a/tests/WE2E/machine_suites/comprehensive
+++ b/tests/WE2E/machine_suites/comprehensive
@@ -66,12 +66,6 @@ MET_ensemble_verification_only_vx
MET_ensemble_verification_only_vx_time_lag
MET_ensemble_verification_winter_wx
MET_verification_only_vx
-nco
-nco_ensemble
-nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
-nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16
-nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km
-nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR
pregen_grid_orog_sfc_climo
specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS
specify_template_filenames
diff --git a/tests/WE2E/machine_suites/comprehensive.cheyenne b/tests/WE2E/machine_suites/comprehensive.cheyenne
index e518e0c4cb..96792e37b0 100644
--- a/tests/WE2E/machine_suites/comprehensive.cheyenne
+++ b/tests/WE2E/machine_suites/comprehensive.cheyenne
@@ -48,12 +48,6 @@ grid_SUBCONUS_Ind_3km_ics_NAM_lbcs_NAM_suite_GFS_v16
grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot
MET_ensemble_verification_only_vx
MET_verification_only_vx
-nco
-nco_ensemble
-nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
-nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16
-nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km
-nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR
pregen_grid_orog_sfc_climo
specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS
specify_template_filenames
diff --git a/tests/WE2E/machine_suites/comprehensive.derecho b/tests/WE2E/machine_suites/comprehensive.derecho
index 1fa9d1c055..9ce8d067ac 100644
--- a/tests/WE2E/machine_suites/comprehensive.derecho
+++ b/tests/WE2E/machine_suites/comprehensive.derecho
@@ -55,12 +55,6 @@ grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot
MET_ensemble_verification_only_vx
MET_ensemble_verification_winter_wx
MET_verification_only_vx
-nco
-nco_ensemble
-nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
-nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16
-nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km
-nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR
pregen_grid_orog_sfc_climo
specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS
specify_template_filenames
diff --git a/tests/WE2E/machine_suites/comprehensive.noaacloud b/tests/WE2E/machine_suites/comprehensive.noaacloud
index f81d8c9d1a..23c0aa8456 100644
--- a/tests/WE2E/machine_suites/comprehensive.noaacloud
+++ b/tests/WE2E/machine_suites/comprehensive.noaacloud
@@ -49,12 +49,6 @@ grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot
MET_ensemble_verification_only_vx
MET_ensemble_verification_winter_wx
MET_verification_only_vx
-nco
-nco_ensemble
-nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
-nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16
-nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km
-nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR
pregen_grid_orog_sfc_climo
specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS
specify_template_filenames
diff --git a/tests/WE2E/machine_suites/comprehensive.orion b/tests/WE2E/machine_suites/comprehensive.orion
index b5b65c668b..739b4fff8e 100644
--- a/tests/WE2E/machine_suites/comprehensive.orion
+++ b/tests/WE2E/machine_suites/comprehensive.orion
@@ -55,12 +55,6 @@ grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot
MET_ensemble_verification_only_vx
MET_ensemble_verification_winter_wx
MET_verification_only_vx
-nco
-nco_ensemble
-nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
-nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16
-nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km
-nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR
pregen_grid_orog_sfc_climo
specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS
specify_template_filenames
diff --git a/tests/WE2E/machine_suites/coverage.cheyenne b/tests/WE2E/machine_suites/coverage.cheyenne
index 19bbc623c7..8f3c3ec78c 100644
--- a/tests/WE2E/machine_suites/coverage.cheyenne
+++ b/tests/WE2E/machine_suites/coverage.cheyenne
@@ -4,6 +4,5 @@ grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16
grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_HRRR
grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_HRRR_suite_HRRR
-nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16
pregen_grid_orog_sfc_climo
specify_template_filenames
diff --git a/tests/WE2E/machine_suites/coverage.derecho b/tests/WE2E/machine_suites/coverage.derecho
index 3475caebcc..c2a770672e 100644
--- a/tests/WE2E/machine_suites/coverage.derecho
+++ b/tests/WE2E/machine_suites/coverage.derecho
@@ -4,7 +4,6 @@ grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16
grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_HRRR
grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_HRRR_suite_HRRR
-nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16
pregen_grid_orog_sfc_climo
specify_template_filenames
2019_hurricane_barry
diff --git a/tests/WE2E/machine_suites/coverage.gaea b/tests/WE2E/machine_suites/coverage.gaea
index 068077464d..e6aba6ea3d 100644
--- a/tests/WE2E/machine_suites/coverage.gaea
+++ b/tests/WE2E/machine_suites/coverage.gaea
@@ -7,6 +7,4 @@ grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km
grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR
grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot
-nco_ensemble
-nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km
2020_CAPE
diff --git a/tests/WE2E/machine_suites/coverage.hera.gnu.com b/tests/WE2E/machine_suites/coverage.hera.gnu.com
index 75533b4609..4c802781f9 100644
--- a/tests/WE2E/machine_suites/coverage.hera.gnu.com
+++ b/tests/WE2E/machine_suites/coverage.hera.gnu.com
@@ -7,5 +7,4 @@ grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_RAP_suite_WoFS_v0
long_fcst
MET_verification_only_vx
MET_ensemble_verification_only_vx_time_lag
-nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
2019_halloween_storm
diff --git a/tests/WE2E/machine_suites/coverage.jet b/tests/WE2E/machine_suites/coverage.jet
index a01d095828..53308090b1 100644
--- a/tests/WE2E/machine_suites/coverage.jet
+++ b/tests/WE2E/machine_suites/coverage.jet
@@ -9,4 +9,3 @@ grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR
grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot
grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2
grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta
-nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR
diff --git a/tests/WE2E/machine_suites/coverage.orion b/tests/WE2E/machine_suites/coverage.orion
index dd13f27318..c698648b10 100644
--- a/tests/WE2E/machine_suites/coverage.orion
+++ b/tests/WE2E/machine_suites/coverage.orion
@@ -8,5 +8,4 @@ grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR
grid_RRFS_CONUScompact_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16
grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0
-nco
2020_CAD
diff --git a/tests/WE2E/machine_suites/fundamental b/tests/WE2E/machine_suites/fundamental
index 858a442253..09d9482c7d 100644
--- a/tests/WE2E/machine_suites/fundamental
+++ b/tests/WE2E/machine_suites/fundamental
@@ -4,8 +4,6 @@
# Test RRFS_CONUScompact_25km grid, HRRR ics, RAP lbcs, RRFS_v1beta suite
grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta
-# Test grid_RRFS_CONUS_25km in NCO mode with FV3GFS bcs (6hr time offset), FV3_GFS_v16 suite
-nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16
# Test grid_RRFS_CONUS_25km grid, FV3GFS bcs, inline post, GFS_v15p2 suite
grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2
# Test grid_RRFS_CONUS_25km grid, FV3GFS bcs, restart files, GFS_v17_p8 suite
diff --git a/tests/WE2E/run_WE2E_tests.py b/tests/WE2E/run_WE2E_tests.py
index 5e1109c1ea..5d4bd81105 100755
--- a/tests/WE2E/run_WE2E_tests.py
+++ b/tests/WE2E/run_WE2E_tests.py
@@ -96,12 +96,6 @@ def run_we2e_tests(homedir, args) -> None:
logging.debug(f'{testfilename} exists for this platform and run_envir'\
'has not been specified\n'\
'Setting run_envir = {run_envir} for all tests')
- else:
- if not run_envir:
- run_envir = 'nco'
- logging.debug(f'{testfilename} exists for this platform and run_envir has'\
- 'not been specified\n'\
- 'Setting run_envir = {run_envir} for all tests')
logging.debug(f"Reading test file: {testfilename}")
with open(testfilename, encoding="utf-8") as f:
tests_to_check = [x.rstrip() for x in f]
@@ -175,14 +169,6 @@ def run_we2e_tests(homedir, args) -> None:
test_cfg['user'].update({"ACCOUNT": args.account})
if run_envir:
test_cfg['user'].update({"RUN_ENVIR": run_envir})
- if run_envir == "nco":
- if 'nco' not in test_cfg:
- test_cfg['nco'] = dict()
- test_cfg['nco'].update({"model_ver_default": "we2e"})
- if args.opsroot:
- if 'nco' not in test_cfg:
- test_cfg['nco'] = dict()
- test_cfg['nco'].update({"OPSROOT_default": args.opsroot})
# if platform section was not in input config, initialize as empty dict
if 'platform' not in test_cfg:
test_cfg['platform'] = dict()
@@ -529,9 +515,6 @@ def setup_logging(logfile: str = "log.run_WE2E_tests", debug: bool = False) -> N
help='DEPRECATED; DO NOT USE. See "launch" option.')
ap.add_argument('--cron_relaunch_intvl_mnts', type=int,
help='Overrides CRON_RELAUNCH_INTVL_MNTS for all experiments')
- ap.add_argument('--opsroot', type=str,
- help='If test is for NCO mode, sets OPSROOT_default (see config_defaults.yaml'\
- 'for more details on this variable)')
ap.add_argument('--print_test_info', action='store_true',
help='Create a "WE2E_test_info.txt" file summarizing each test prior to'\
'starting experiment')
diff --git a/tests/WE2E/test_configs/default_configs/config.nco.yaml b/tests/WE2E/test_configs/default_configs/config.nco.yaml
deleted file mode 120000
index 690636fd63..0000000000
--- a/tests/WE2E/test_configs/default_configs/config.nco.yaml
+++ /dev/null
@@ -1 +0,0 @@
-../../../../ush/config.nco.yaml
\ No newline at end of file
diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml
deleted file mode 100644
index 8fc88c9b6a..0000000000
--- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml
+++ /dev/null
@@ -1,26 +0,0 @@
-metadata:
- description: |-
- This test is to ensure that the workflow running in nco mode completes
- successfully on the RRFS_CONUS_13km grid using the GFS_v16 physics
- suite with ICs and LBCs derived from the FV3GFS.
-user:
- RUN_ENVIR: nco
-workflow:
- CCPP_PHYS_SUITE: FV3_GFS_v16
- PREDEF_GRID_NAME: RRFS_CONUS_13km
- DATE_FIRST_CYCL: '2019061500'
- DATE_LAST_CYCL: '2019061500'
- FCST_LEN_HRS: 6
- PREEXISTING_DIR_METHOD: rename
-rocoto:
- tasks:
- taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}'
-task_get_extrn_ics:
- USE_USER_STAGED_EXTRN_FILES: true
- EXTRN_MDL_NAME_ICS: FV3GFS
- FV3GFS_FILE_FMT_ICS: grib2
-task_get_extrn_lbcs:
- USE_USER_STAGED_EXTRN_FILES: true
- EXTRN_MDL_NAME_LBCS: FV3GFS
- LBC_SPEC_INTVL_HRS: 3
- FV3GFS_FILE_FMT_LBCS: grib2
diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml
deleted file mode 100644
index 76c9656686..0000000000
--- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml
+++ /dev/null
@@ -1,26 +0,0 @@
-metadata:
- description: |-
- This test is to ensure that the workflow running in nco mode completes
- successfully on the RRFS_CONUS_25km grid using the FV3_GFS_v16 physics
- suite with time-offset ICs/LBCs derived from the FV3GFS.
-user:
- RUN_ENVIR: nco
-workflow:
- CCPP_PHYS_SUITE: FV3_GFS_v16
- PREDEF_GRID_NAME: RRFS_CONUS_25km
- DATE_FIRST_CYCL: '2022081012'
- DATE_LAST_CYCL: '2022081012'
- FCST_LEN_HRS: 6
- PREEXISTING_DIR_METHOD: rename
-rocoto:
- tasks:
- taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}'
-task_get_extrn_ics:
- EXTRN_MDL_NAME_ICS: FV3GFS
- EXTRN_MDL_ICS_OFFSET_HRS: 6
- FV3GFS_FILE_FMT_ICS: netcdf
-task_get_extrn_lbcs:
- EXTRN_MDL_NAME_LBCS: FV3GFS
- LBC_SPEC_INTVL_HRS: 3
- EXTRN_MDL_LBCS_OFFSET_HRS: 6
- FV3GFS_FILE_FMT_LBCS: netcdf
diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.yaml
deleted file mode 100644
index 9a381857ed..0000000000
--- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.yaml
+++ /dev/null
@@ -1,28 +0,0 @@
-metadata:
- description: |-
- This test is to ensure that the workflow running in nco mode completes
- successfully on the RRFS_CONUS_3km grid using the GFS_v15_thompson_mynn_lam3km
- physics suite with ICs and LBCs derived from the FV3GFS.
-user:
- RUN_ENVIR: nco
-workflow:
- CCPP_PHYS_SUITE: FV3_GFS_v15_thompson_mynn_lam3km
- PREDEF_GRID_NAME: RRFS_CONUS_3km
- DATE_FIRST_CYCL: '2019061500'
- DATE_LAST_CYCL: '2019061500'
- FCST_LEN_HRS: 6
- PREEXISTING_DIR_METHOD: rename
-rocoto:
- tasks:
- taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}'
-task_get_extrn_ics:
- USE_USER_STAGED_EXTRN_FILES: true
- EXTRN_MDL_NAME_ICS: FV3GFS
- FV3GFS_FILE_FMT_ICS: grib2
-task_get_extrn_lbcs:
- USE_USER_STAGED_EXTRN_FILES: true
- EXTRN_MDL_NAME_LBCS: FV3GFS
- LBC_SPEC_INTVL_HRS: 3
- FV3GFS_FILE_FMT_LBCS: grib2
-task_run_fcst:
- USE_MERRA_CLIMO: true
diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml
deleted file mode 100644
index 0755e7fc4d..0000000000
--- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml
+++ /dev/null
@@ -1,30 +0,0 @@
-metadata:
- description: |-
- This test is to ensure that the workflow running in nco mode completes
- successfully on the RRFS_CONUScompact_25km grid using the HRRR physics
- suite with ICs derived from the HRRR and LBCs derived from the RAP.
-user:
- RUN_ENVIR: nco
-workflow:
- CCPP_PHYS_SUITE: FV3_HRRR
- PREDEF_GRID_NAME: RRFS_CONUScompact_25km
- DATE_FIRST_CYCL: '2020081000'
- DATE_LAST_CYCL: '2020081000'
- FCST_LEN_HRS: 6
- PREEXISTING_DIR_METHOD: rename
-rocoto:
- tasks:
- taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}'
-task_get_extrn_ics:
- EXTRN_MDL_NAME_ICS: HRRR
- USE_USER_STAGED_EXTRN_FILES: true
- EXTRN_MDL_FILES_ICS:
- - '{yy}{jjj}{hh}00{fcst_hr:02d}00'
-task_get_extrn_lbcs:
- EXTRN_MDL_NAME_LBCS: RAP
- LBC_SPEC_INTVL_HRS: 3
- USE_USER_STAGED_EXTRN_FILES: true
- EXTRN_MDL_FILES_LBCS:
- - '{yy}{jjj}{hh}00{fcst_hr:02d}00'
-task_run_fcst:
- WRITE_DOPOST: true
diff --git a/tests/WE2E/test_configs/wflow_features/config.nco_ensemble.yaml b/tests/WE2E/test_configs/wflow_features/config.nco_ensemble.yaml
deleted file mode 100644
index c060cb7f9f..0000000000
--- a/tests/WE2E/test_configs/wflow_features/config.nco_ensemble.yaml
+++ /dev/null
@@ -1,34 +0,0 @@
-metadata:
- description: |-
- This test checks the capability of the workflow to run ensemble forecasts
- (i.e. DO_ENSEMBLE set to "TRUE") in nco mode (i.e. RUN_ENVIR set to
- "nco") with the number of ensemble members (NUM_ENS_MEMBERS) set to
- "2". The lack of leading zeros in this "2" should cause the ensemble
- members to be named "mem1" and "mem2" (instead of, for instance, "mem01"
- and "mem02").
- Note also that this test uses two cycle hours ("12" and "18") to test
- the capability of the workflow to run ensemble forecasts for more than
- one cycle hour in nco mode.
-user:
- RUN_ENVIR: nco
-workflow:
- CCPP_PHYS_SUITE: FV3_GFS_v15p2
- PREDEF_GRID_NAME: RRFS_CONUS_25km
- DATE_FIRST_CYCL: '2019070100'
- DATE_LAST_CYCL: '2019070212'
- INCR_CYCL_FREQ: 12
- FCST_LEN_HRS: 6
- PREEXISTING_DIR_METHOD: rename
-task_get_extrn_ics:
- EXTRN_MDL_NAME_ICS: FV3GFS
- USE_USER_STAGED_EXTRN_FILES: true
-task_get_extrn_lbcs:
- EXTRN_MDL_NAME_LBCS: FV3GFS
- LBC_SPEC_INTVL_HRS: 3
- USE_USER_STAGED_EXTRN_FILES: true
-global:
- DO_ENSEMBLE: true
- NUM_ENS_MEMBERS: 2
-rocoto:
- tasks:
- taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}'
diff --git a/tests/WE2E/test_configs/wflow_features/config.nco_inline_post.yaml b/tests/WE2E/test_configs/wflow_features/config.nco_inline_post.yaml
deleted file mode 120000
index 6ec59fe0dd..0000000000
--- a/tests/WE2E/test_configs/wflow_features/config.nco_inline_post.yaml
+++ /dev/null
@@ -1 +0,0 @@
-../grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml
\ No newline at end of file
diff --git a/tests/test_python/test_generate_FV3LAM_wflow.py b/tests/test_python/test_generate_FV3LAM_wflow.py
index 9e9e9f5274..48029d21b6 100644
--- a/tests/test_python/test_generate_FV3LAM_wflow.py
+++ b/tests/test_python/test_generate_FV3LAM_wflow.py
@@ -8,12 +8,9 @@
from multiprocessing import Process
from python_utils import (
- load_config_file,
- update_dict,
cp_vrfy,
run_command,
define_macos_utilities,
- cfg_to_yaml_str,
set_env_var,
get_env_var,
)
@@ -24,7 +21,7 @@ class Testing(unittest.TestCase):
""" Class to run the tests. """
def test_generate_FV3LAM_wflow(self):
- """ Test that a community and nco sample config can successfully
+ """ Test that a sample config can successfully
lead to the creation of an experiment directory. No jobs are
submitted. """
@@ -49,30 +46,6 @@ def run_workflow(USHdir, logfile):
)
run_workflow(USHdir, logfile)
- # nco test case
- nco_test_config = load_config_file(f"{USHdir}/config.nco.yaml")
- # Since we don't have a pre-gen grid dir on a generic linux
- # platform, turn the make_* tasks on for this test.
- cfg_updates = {
- "user": {
- "MACHINE": "linux",
- },
- "rocoto": {
- "tasks": {
- "taskgroups": \
- """'{{ ["parm/wflow/prep.yaml",
- "parm/wflow/coldstart.yaml",
- "parm/wflow/post.yaml"]|include }}'"""
- },
- },
- }
- update_dict(cfg_updates, nco_test_config)
-
- with open(f"{USHdir}/config.yaml", "w", encoding="utf-8") as cfg_file:
- cfg_file.write(cfg_to_yaml_str(nco_test_config))
-
- run_workflow(USHdir, logfile)
-
def setUp(self):
define_macos_utilities()
set_env_var("DEBUG", False)
diff --git a/ush/config.aqm.community.yaml b/ush/config.aqm.community.yaml
index 7586719f2e..2f32d0eac5 100644
--- a/ush/config.aqm.community.yaml
+++ b/ush/config.aqm.community.yaml
@@ -2,7 +2,7 @@ metadata:
description: config for Online-CMAQ, AQM_NA_13km, community mode
user:
RUN_ENVIR: community
- MACHINE: [hera or wcoss2]
+ MACHINE: hera
ACCOUNT: [account name]
workflow:
USE_CRON_TO_RELAUNCH: true
diff --git a/ush/config.aqm.nco.realtime.yaml b/ush/config.aqm.nco.realtime.yaml
deleted file mode 100644
index f2299eacc9..0000000000
--- a/ush/config.aqm.nco.realtime.yaml
+++ /dev/null
@@ -1,99 +0,0 @@
-metadata:
- description: config for Online-CMAQ, AQM_NA_13km, real-time, NCO mode on WCOSS2
-user:
- RUN_ENVIR: nco
- MACHINE: wcoss2
- ACCOUNT: [account name]
-workflow:
- USE_CRON_TO_RELAUNCH: true
- CRON_RELAUNCH_INTVL_MNTS: 3
- EXPT_SUBDIR: aqm_nco_aqmna13km
- PREDEF_GRID_NAME: AQM_NA_13km
- CCPP_PHYS_SUITE: FV3_GFS_v16
- DATE_FIRST_CYCL: '2023051600'
- DATE_LAST_CYCL: '2023051618'
- INCR_CYCL_FREQ: 6
- FCST_LEN_HRS: -1
- FCST_LEN_CYCL:
- - 6
- - 72
- - 72
- - 6
- PREEXISTING_DIR_METHOD: rename
- VERBOSE: true
- DEBUG: true
- COMPILER: intel
- DIAG_TABLE_TMPL_FN: diag_table_aqm.FV3_GFS_v16
- FIELD_TABLE_TMPL_FN: field_table_aqm.FV3_GFS_v16
- DO_REAL_TIME: true
- COLDSTART: false
- WARMSTART_CYCLE_DIR: /path/to/restart/dir
-nco:
- envir_default: prod
- NET_default: aqm
- model_ver_default: v7.0
- RUN_default: aqm
- OPSROOT_default: /path/to/custom/opsroot
- KEEPDATA_default: true
-rocoto:
- tasks:
- taskgroups: '{{ ["parm/wflow/aqm_prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/aqm_post.yaml"]|include }}'
- task_get_extrn_lbcs:
- walltime: 02:00:00
- metatask_run_ensemble:
- task_run_fcst_mem#mem#:
- walltime: 04:00:00
-# task_aqm_ics_ext:
- task_aqm_lbcs:
- walltime: 01:00:00
-task_make_grid:
- GRID_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/DOMAIN_DATA/AQM_NA_13km
-task_make_orog:
- OROG_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/DOMAIN_DATA/AQM_NA_13km
-task_make_sfc_climo:
- SFC_CLIMO_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/DOMAIN_DATA/AQM_NA_13km
-task_get_extrn_ics:
- EXTRN_MDL_NAME_ICS: FV3GFS
- FV3GFS_FILE_FMT_ICS: netcdf
- EXTRN_MDL_ICS_OFFSET_HRS: 6
-task_get_extrn_lbcs:
- EXTRN_MDL_NAME_LBCS: FV3GFS
- LBC_SPEC_INTVL_HRS: 6
- FV3GFS_FILE_FMT_LBCS: netcdf
- EXTRN_MDL_LBCS_OFFSET_HRS: 6
-task_run_fcst:
- DT_ATMOS: 180
- LAYOUT_X: 50
- LAYOUT_Y: 34
- BLOCKSIZE: 16
- RESTART_INTERVAL: 6 24 42 60
- QUILTING: true
- PRINT_ESMF: false
- DO_FCST_RESTART: false
-task_run_post:
- POST_OUTPUT_DOMAIN_NAME: 793
-global:
- DO_ENSEMBLE: false
- NUM_ENS_MEMBERS: 2
- HALO_BLEND: 0
-cpl_aqm_parm:
- CPL_AQM: true
- DO_AQM_CHEM_LBCS: true
- DO_AQM_GEFS_LBCS: true
- DO_AQM_DUST: true
- DO_AQM_CANOPY: false
- DO_AQM_PRODUCT: true
- DO_AQM_SAVE_AIRNOW_HIST: false
- DO_AQM_SAVE_FIRE: false
- AQM_BIO_FILE: BEIS_RRFScmaq_C775.ncf
- AQM_DUST_FILE_PREFIX: FENGSHA_p8_10km_inputs
- AQM_DUST_FILE_SUFFIX: .nc
- AQM_CANOPY_FILE_PREFIX: gfs.t12z.geo
- AQM_CANOPY_FILE_SUFFIX: .canopy_regrid.nc
- AQM_FIRE_FILE_PREFIX: Hourly_Emissions_regrid_NA_13km
- AQM_FIRE_FILE_SUFFIX: _h72.nc
- AQM_RC_FIRE_FREQUENCY: hourly
- AQM_LBCS_FILES: am4_bndy.c793.2019.v1.nc
- NEXUS_GRID_FN: grid_spec_793.nc
- NUM_SPLIT_NEXUS: 6
-
diff --git a/ush/config.nco.yaml b/ush/config.nco.yaml
deleted file mode 100644
index afcce0ba8a..0000000000
--- a/ush/config.nco.yaml
+++ /dev/null
@@ -1,41 +0,0 @@
-metadata:
- description: >-
- Sample nco config
-user:
- RUN_ENVIR: nco
- MACHINE: hera
- ACCOUNT: an_account
-workflow:
- USE_CRON_TO_RELAUNCH: false
- EXPT_SUBDIR: test_nco
- CCPP_PHYS_SUITE: FV3_GFS_v16
- PREDEF_GRID_NAME: RRFS_CONUS_25km
- DATE_FIRST_CYCL: '2022040700'
- DATE_LAST_CYCL: '2022040700'
- FCST_LEN_HRS: 6
- PREEXISTING_DIR_METHOD: rename
- VERBOSE: true
- COMPILER: intel
-nco:
- model_ver_default: v1.0
- RUN_default: srw_test
-task_get_extrn_ics:
- EXTRN_MDL_NAME_ICS: FV3GFS
- FV3GFS_FILE_FMT_ICS: grib2
-task_get_extrn_lbcs:
- EXTRN_MDL_NAME_LBCS: FV3GFS
- LBC_SPEC_INTVL_HRS: 3
- FV3GFS_FILE_FMT_LBCS: grib2
-task_run_fcst:
- WRITE_DOPOST: true
- QUILTING: true
-task_plot_allvars:
- COMOUT_REF: ""
-task_run_post:
- POST_OUTPUT_DOMAIN_NAME: conus_25km
-rocoto:
- tasks:
- taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}'
- metatask_run_ensemble:
- task_run_fcst_mem#mem#:
- walltime: 01:00:00
diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml
index b35b6108c7..6e7823c5d2 100644
--- a/ush/config_defaults.yaml
+++ b/ush/config_defaults.yaml
@@ -1116,31 +1116,8 @@ nco:
# Name of model run (third level of com directory structure).
# In general, same as ${NET_default}.
#
- # OPSROOT_default:
- # The operations root directory in NCO mode.
- #
- # COMROOT_default:
- # The com root directory for input/output data that is located on
- # the current system.
- #
- # DATAROOT_default:
- # Directory containing the (temporary) working directory for running
- # jobs.
- #
- # DCOMROOT_default:
- # dcom root directory, which contains input/incoming data that is
- # retrieved from outside WCOSS.
- #
- # LOGBASEDIR_default:
- # Directory in which the log files from the workflow tasks will be placed.
- #
- # COMIN_BASEDIR:
- # com directory for current model's input data, typically
- # $COMROOT/$NET/$model_ver/$RUN.$PDY
- #
- # COMOUT_BASEDIR:
- # com directory for current model's output data, typically
- # $COMROOT/$NET/$model_ver/$RUN.$PDY
+ # PTMP:
+ # User-defined path to the com type directories (OPSROOT=$PTMP/$envir).
#
# DBNROOT_default:
# Root directory for the data-alerting utilities.
@@ -1174,26 +1151,20 @@ nco:
#
#-----------------------------------------------------------------------
#
- envir_default: "para"
+ envir_default: "test"
NET_default: "srw"
RUN_default: "srw"
model_ver_default: "v1.0.0"
- OPSROOT_default: '{{ workflow.EXPT_BASEDIR }}/../nco_dirs'
- COMROOT_default: '{{ OPSROOT_default }}/com'
- DATAROOT_default: '{{ OPSROOT_default }}/tmp'
- DCOMROOT_default: '{{ OPSROOT_default }}/dcom'
- LOGBASEDIR_default: '{% if user.RUN_ENVIR == "nco" %}{{ [OPSROOT_default, "output"]|path_join }}{% else %}{{ [workflow.EXPTDIR, "log"]|path_join }}{% endif %}'
- COMIN_BASEDIR: '{{ COMROOT_default }}/{{ NET_default }}/{{ model_ver_default }}'
- COMOUT_BASEDIR: '{{ COMROOT_default }}/{{ NET_default }}/{{ model_ver_default }}'
+ PTMP: '{{ workflow.EXPT_BASEDIR }}/../nco_dirs'
DBNROOT_default: ""
- SENDECF_default: false
- SENDDBN_default: false
- SENDDBN_NTC_default: false
- SENDCOM_default: false
- SENDWEB_default: false
- KEEPDATA_default: true
+ SENDECF_default: "NO"
+ SENDDBN_default: "NO"
+ SENDDBN_NTC_default: "NO"
+ SENDCOM_default: "YES"
+ SENDWEB_default: "NO"
+ KEEPDATA_default: "YES"
MAILTO_default: ""
MAILCC_default: ""
@@ -2173,6 +2144,10 @@ task_nexus_emission:
# PPN_NEXUS_EMISSION:
# Processes per node for the nexus_emission_* tasks.
#
+ # NNODES_NEXUS_EMISSION:
+ # The number of nodes to request from the job scheduler
+ # for the nexus emission task.
+ #
# KMP_AFFINITY_NEXUS_EMISSION:
# Intel Thread Affinity Interface for the nexus_emission_* tasks.
#
@@ -2183,10 +2158,22 @@ task_nexus_emission:
# Controls the size of the stack for threads created by the OpenMP implementation.
#-------------------------------------------------------------------------------
PPN_NEXUS_EMISSION: '{{ platform.NCORES_PER_NODE // OMP_NUM_THREADS_NEXUS_EMISSION }}'
+ NNODES_NEXUS_EMISSION: 4
KMP_AFFINITY_NEXUS_EMISSION: "scatter"
OMP_NUM_THREADS_NEXUS_EMISSION: 2
OMP_STACKSIZE_NEXUS_EMISSION: "1024m"
+#-----------------------------
+# POINT_SOURCE config parameters
+#-----------------------------
+task_point_source:
+ #-------------------------------------------------------------------------------
+ # PT_SRC_SUBDIR:
+ # Sub-directory structure of point source data under FIXemis.
+ # Full path: FIXemis/PT_SRC_SUBDIR
+ #-------------------------------------------------------------------------------
+ PT_SRC_SUBDIR: "NEI2016v1/v2023-01-PT"
+
#----------------------------
# BIAS_CORRECTION_O3 config parameters
#-----------------------------
@@ -2574,41 +2561,15 @@ cpl_aqm_parm:
#
# DO_AQM_SAVE_FIRE:
# Archive fire emission file to HPSS
- #
- # DCOMINbio_default:
- # Path to the directory containing AQM bio files
#
- # DCOMINdust_default:
- # Path to the directory containing AQM dust file
+ # COMINairnow_default:
+ # Path to the directory containing AIRNOW observation data
#
- # DCOMINcanopy_default:
- # Path to the directory containing AQM canopy files
- #
- # DCOMINfire_default:
+ # COMINfire_default:
# Path to the directory containing AQM fire files
#
- # DCOMINchem_lbcs_default:
- # Path to the directory containing chemical LBC files
- #
- # DCOMINgefs_default:
+ # COMINgefs_default:
# Path to the directory containing GEFS aerosol LBC files
- #
- # DCOMINpt_src_default:
- # Parent directory containing point source files
- #
- # DCOMINairnow_default:
- # Path to the directory containing AIRNOW observation data
- #
- # COMINbicor:
- # Path of reading in historical training data for biascorrection
- #
- # COMOUTbicor:
- # Path to save the current cycle's model output and AirNow obs as
- # training data for future use. $COMINbicor and $COMOUTbicor can be
- # distinguished by the ${yyyy}${mm}${dd} under the same location
- #
- # AQM_CONFIG_DIR:
- # Configuration directory for AQM
#
# AQM_BIO_FILE:
# File name of AQM BIO file
@@ -2634,9 +2595,6 @@ cpl_aqm_parm:
# AQM_FIRE_FILE_OFFSET_HRS:
# Time offset when retrieving fire emission data files.
#
- # AQM_FIRE_ARCHV_DIR:
- # Path to the archive directory for RAVE emission files on HPSS
- #
# AQM_RC_FIRE_FREQUENCY:
# Fire frequency in aqm.rc
#
@@ -2655,13 +2613,6 @@ cpl_aqm_parm:
# AQM_GEFS_FILE_CYC:
# Cycle of the GEFS aerosol LBC files only if it is fixed
#
- # NEXUS_INPUT_DIR:
- # Same as GRID_DIR but for the the air quality emission generation task.
- # Should be blank for the default value specified in setup.sh
- #
- # NEXUS_FIX_DIR:
- # Directory containing grid_spec files as the input file of nexus
- #
# NEXUS_GRID_FN:
# File name of the input grid_spec file of nexus
#
@@ -2690,18 +2641,10 @@ cpl_aqm_parm:
DO_AQM_SAVE_AIRNOW_HIST: false
DO_AQM_SAVE_FIRE: false
- DCOMINbio_default: ""
- DCOMINdust_default: "/path/to/dust/dir"
- DCOMINcanopy_default: "/path/to/canopy/dir"
- DCOMINfire_default: ""
- DCOMINchem_lbcs_default: ""
- DCOMINgefs_default: ""
- DCOMINpt_src_default: "/path/to/point/source/base/directory"
- DCOMINairnow_default: "/path/to/airnow/obaservation/data"
- COMINbicor: "/path/to/historical/airnow/data/dir"
- COMOUTbicor: "/path/to/historical/airnow/data/dir"
+ COMINairnow_default: "/path/to/airnow/obaservation/data"
+ COMINfire_default: ""
+ COMINgefs_default: ""
- AQM_CONFIG_DIR: ""
AQM_BIO_FILE: "BEIS_SARC401.ncf"
AQM_DUST_FILE_PREFIX: "FENGSHA_p8_10km_inputs"
@@ -2713,7 +2656,6 @@ cpl_aqm_parm:
AQM_FIRE_FILE_PREFIX: "GBBEPx_C401GRID.emissions_v003"
AQM_FIRE_FILE_SUFFIX: ".nc"
AQM_FIRE_FILE_OFFSET_HRS: 0
- AQM_FIRE_ARCHV_DIR: "/path/to/archive/dir/for/RAVE/on/HPSS"
AQM_RC_FIRE_FREQUENCY: "static"
AQM_RC_PRODUCT_FN: "aqm.prod.nc"
@@ -2724,8 +2666,6 @@ cpl_aqm_parm:
AQM_GEFS_FILE_PREFIX: "geaer"
AQM_GEFS_FILE_CYC: ""
- NEXUS_INPUT_DIR: ""
- NEXUS_FIX_DIR: ""
NEXUS_GRID_FN: "grid_spec_GSD_HRRR_25km.nc"
NUM_SPLIT_NEXUS: 3
NEXUS_GFS_SFC_OFFSET_HRS: 0
diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py
index 5608e4cbf2..726e8eb0f3 100644
--- a/ush/create_aqm_rc_file.py
+++ b/ush/create_aqm_rc_file.py
@@ -61,25 +61,23 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations):
#
# Set parameters in the aqm.rc file.
#
- aqm_rc_bio_file_fp=os.path.join(DCOMINbio, AQM_BIO_FILE)
+ aqm_rc_bio_file_fp=os.path.join(FIXaqm,"bio", AQM_BIO_FILE)
# Fire config
aqm_rc_fire_file_fp=os.path.join(
COMIN,
- "FIRE_EMISSION",
f"{AQM_FIRE_FILE_PREFIX}_{yyyymmdd}_t{hh}z{AQM_FIRE_FILE_SUFFIX}"
)
# Dust config
aqm_rc_dust_file_fp=os.path.join(
- DCOMINdust,
+ FIXaqm,"dust",
f"{AQM_DUST_FILE_PREFIX}_{PREDEF_GRID_NAME}{AQM_DUST_FILE_SUFFIX}",
)
# Canopy config
aqm_rc_canopy_file_fp=os.path.join(
- DCOMINcanopy,
- PREDEF_GRID_NAME,
+ FIXaqm,"canopy",PREDEF_GRID_NAME,
f"{AQM_CANOPY_FILE_PREFIX}.{mm}{AQM_CANOPY_FILE_SUFFIX}",
)
#
@@ -96,10 +94,9 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations):
"do_aqm_canopy": DO_AQM_CANOPY,
"do_aqm_product": DO_AQM_PRODUCT,
"ccpp_phys_suite": CCPP_PHYS_SUITE,
- "aqm_config_dir": AQM_CONFIG_DIR,
"init_concentrations": init_concentrations,
"aqm_rc_bio_file_fp": aqm_rc_bio_file_fp,
- "dcominbio": DCOMINbio,
+ "fixaqm": FIXaqm,
"aqm_rc_fire_file_fp": aqm_rc_fire_file_fp,
"aqm_rc_fire_frequency": AQM_RC_FIRE_FREQUENCY,
"aqm_rc_dust_file_fp": aqm_rc_dust_file_fp,
diff --git a/ush/job_preamble.sh b/ush/job_preamble.sh
index e243f31b37..e9c3683c40 100644
--- a/ush/job_preamble.sh
+++ b/ush/job_preamble.sh
@@ -25,10 +25,10 @@ export envir="${envir:-${envir_default}}"
export NET="${NET:-${NET_default}}"
export RUN="${RUN:-${RUN_default}}"
export model_ver="${model_ver:-${model_ver_default}}"
-export COMROOT="${COMROOT:-${COMROOT_default}}"
-export DATAROOT="${DATAROOT:-${DATAROOT_default}}"
-export DCOMROOT="${DCOMROOT:-${DCOMROOT_default}}"
-export LOGBASEDIR="${LOGBASEDIR:-${LOGBASEDIR_default}}"
+export COMROOT="${COMROOT:-${PTMP}/${envir}/com}"
+export DATAROOT="${DATAROOT:-${PTMP}/${envir}/tmp}"
+export DCOMROOT="${DCOMROOT:-${PTMP}/${envir}/dcom}"
+export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}"
export DBNROOT="${DBNROOT:-${DBNROOT_default}}"
export SENDECF="${SENDECF:-${SENDECF_default}}"
@@ -41,49 +41,25 @@ export MAILTO="${MAILTO:-${MAILTO_default}}"
export MAILCC="${MAILCC:-${MAILCC_default}}"
if [ "${RUN_ENVIR}" = "nco" ]; then
+ [[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
if [ "${MACHINE}" = "WCOSS2" ]; then
- [[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT
export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}"
export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}"
export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}"
export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}"
export COMINgefs="${COMINgefs:-$(compath.py ${envir}/gefs/${gefs_ver})}"
else
- export COMIN="${COMIN_BASEDIR}/${RUN}.${PDY}/${cyc}"
- export COMOUT="${COMOUT_BASEDIR}/${RUN}.${PDY}/${cyc}"
- export COMINm1="${COMIN_BASEDIR}/${RUN}.${PDYm1}"
+ export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}"
+ export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}"
+ export COMINm1="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDYm1}/${cyc}}"
fi
else
- export COMIN="${COMIN_BASEDIR}/${PDY}${cyc}"
- export COMOUT="${COMOUT_BASEDIR}/${PDY}${cyc}"
- export COMINm1="${COMIN_BASEDIR}/${RUN}.${PDYm1}"
+ export COMIN="${EXPTDIR}/${PDY}${cyc}"
+ export COMOUT="${EXPTDIR}/${PDY}${cyc}"
+ export COMINm1="${EXPTDIR}/${PDYm1}${cyc}"
fi
export COMOUTwmo="${COMOUTwmo:-${COMOUT}/wmo}"
-export DCOMINbio="${DCOMINbio:-${DCOMINbio_default}}"
-export DCOMINdust="${DCOMINdust:-${DCOMINdust_default}}"
-export DCOMINcanopy="${DCOMINcanopy:-${DCOMINcanopy_default}}"
-export DCOMINfire="${DCOMINfire:-${DCOMINfire_default}}"
-export DCOMINchem_lbcs="${DCOMINchem_lbcs:-${DCOMINchem_lbcs_default}}"
-export DCOMINgefs="${DCOMINgefs:-${DCOMINgefs_default}}"
-export DCOMINpt_src="${DCOMINpt_src:-${DCOMINpt_src_default}}"
-export DCOMINairnow="${DCOMINairnow:-${DCOMINairnow_default}}"
-
-#
-#-----------------------------------------------------------------------
-#
-# Change YES/NO (NCO standards; job card) to TRUE/FALSE (workflow standards)
-# for NCO environment variables
-#
-#-----------------------------------------------------------------------
-#
-export KEEPDATA=$(boolify "${KEEPDATA}")
-export SENDCOM=$(boolify "${SENDCOM}")
-export SENDDBN=$(boolify "${SENDDBN}")
-export SENDDBN_NTC=$(boolify "${SENDDBN_NTC}")
-export SENDECF=$(boolify "${SENDECF}")
-export SENDWEB=$(boolify "${SENDWEB}")
-
#
#-----------------------------------------------------------------------
#
@@ -91,11 +67,12 @@ export SENDWEB=$(boolify "${SENDWEB}")
#
#-----------------------------------------------------------------------
#
-if [ $subcyc -eq 0 ]; then
- export cycle="t${cyc}z"
+if [ ${subcyc} -ne 0 ]; then
+ export cycle="t${cyc}${subcyc}z"
else
- export cycle="t${cyc}${subcyc}z"
+ export cycle="t${cyc}z"
fi
+
if [ "${RUN_ENVIR}" = "nco" ] && [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z $ENSMEM_INDX ]; then
export dot_ensmem=".mem${ENSMEM_INDX}"
else
diff --git a/ush/machine/hera.yaml b/ush/machine/hera.yaml
index 0aadaa6d8b..4d836af317 100644
--- a/ush/machine/hera.yaml
+++ b/ush/machine/hera.yaml
@@ -19,7 +19,7 @@ platform:
RUN_CMD_SERIAL: time
RUN_CMD_UTILS: srun --export=ALL
RUN_CMD_NEXUS: srun -n ${nprocs} --export=ALL
- RUN_CMD_AQMLBC: srun --export=ALL -n ${NUMTS}
+ RUN_CMD_AQMLBC: srun --export=ALL -n ${numts}
SCHED_NATIVE_CMD: --export=NONE
SCHED_NATIVE_CMD_HPSS: -n 1 --export=NONE
PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }'
@@ -35,21 +35,14 @@ platform:
FIXorg: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/fix/fix_orog
FIXsfc: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/fix/fix_sfc_climo
FIXshp: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/NaturalEarth
+ FIXaqm: /scratch2/NAGAPE/epic/SRW-AQM_DATA/fix_aqm
+ FIXemis: /scratch1/RDARCH/rda-arl-gpu/Barry.Baker/emissions/nexus
EXTRN_MDL_DATA_STORES: hpss aws nomads
cpl_aqm_parm:
- AQM_CONFIG_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/fix/aqm/epa/data
- DCOMINbio_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/fix/aqm/bio
- DCOMINdust_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/FENGSHA
- DCOMINcanopy_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/canopy
- DCOMINfire_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/RAVE_fire
- DCOMINchem_lbcs_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/LBCS/AQM_NA13km_AM4_v1
- DCOMINgefs_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/GEFS_DATA
- DCOMINpt_src_default: /scratch1/RDARCH/rda-arl-gpu/Barry.Baker/emissions/nexus/NEI2016v1/v2023-01-PT
- NEXUS_INPUT_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/emissions/nexus
- NEXUS_FIX_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/nexus/fix
- NEXUS_GFS_SFC_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/GFS_DATA
- PT_SRC_BASEDIR: /scratch1/RDARCH/rda-arl-gpu/Barry.Baker/emissions/nexus/NEI2016v1/v2023-01-PT
+ COMINfire_default: /scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/RAVE_fire
+ COMINgefs_default: /scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/GEFS_DATA
+ NEXUS_GFS_SFC_DIR: /scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA
rocoto:
tasks:
diff --git a/ush/machine/wcoss2.yaml b/ush/machine/wcoss2.yaml
index 53733959bb..b8c3625dff 100644
--- a/ush/machine/wcoss2.yaml
+++ b/ush/machine/wcoss2.yaml
@@ -41,21 +41,6 @@ data:
RAP: compath.py ${envir}/rap/${rap_ver}/rap.${PDYext}
NAM: compath.py ${envir}/nam/${nam_ver}/nam.${PDYext}
HRRR: compath.py ${envir}/hrrr/${hrrr_ver}/hrrr.${PDYext}/conus
-cpl_aqm_parm:
- AQM_CONFIG_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/aqm/epa/data
- DCOMINbio_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/aqm/bio
- DCOMINdust_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/FENGSHA
- DCOMINcanopy_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/canopy
- DCOMINfire_default: /lfs/h1/ops/dev/dcom
- DCOMINchem_lbcs_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/LBCS/AQM_NA13km_AM4_v1
- DCOMINgefs_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/GEFS_DATA
- DCOMINpt_src_default: /lfs/h2/emc/physics/noscrub/Youhua.Tang/nei2016v1-pt/v2023-01-PT
- DCOMINairnow_default: /lfs/h1/ops/prod/dcom
- COMINbicor: /lfs/h2/emc/physics/noscrub/jianping.huang/Bias_correction/aqmv7.0.81
- COMOUTbicor: /lfs/h2/emc/physics/noscrub/jianping.huang/Bias_correction/aqmv7.0.81
- NEXUS_INPUT_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/nexus_emissions
- NEXUS_FIX_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/nexus/fix
- NEXUS_GFS_SFC_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/GFS_DATA
rocoto:
tasks:
diff --git a/ush/setup.py b/ush/setup.py
index 1d574ec18c..cdea8fde0f 100644
--- a/ush/setup.py
+++ b/ush/setup.py
@@ -1161,49 +1161,15 @@ def get_location(xcs, fmt, expt_cfg):
#
# -----------------------------------------------------------------------
#
-
- # These NCO variables need to be set based on the user's specified
- # run environment. The default is set in config_defaults for nco. If
- # running in community mode, we set these paths to the experiment
- # directory.
- nco_vars = [
- "opsroot_default",
- "comroot_default",
- "dataroot_default",
- "dcomroot_default",
- "comin_basedir",
- "comout_basedir",
- ]
-
- nco_config = expt_config["nco"]
- if run_envir != "nco":
- # Put the variables in config dict.
- for nco_var in nco_vars:
- nco_config[nco_var.upper()] = exptdir
-
# Use env variables for NCO variables and create NCO directories
workflow_manager = expt_config["platform"].get("WORKFLOW_MANAGER")
if run_envir == "nco" and workflow_manager == "rocoto":
- for nco_var in nco_vars:
- envar = os.environ.get(nco_var)
- if envar is not None:
- nco_config[nco_var.upper()] = envar
-
- mkdir_vrfy(f' -p "{nco_config.get("OPSROOT_default")}"')
- mkdir_vrfy(f' -p "{nco_config.get("COMROOT_default")}"')
- mkdir_vrfy(f' -p "{nco_config.get("DATAROOT_default")}"')
- mkdir_vrfy(f' -p "{nco_config.get("DCOMROOT_default")}"')
-
# Update the rocoto string for the fcst output location if
# running an ensemble in nco mode
if global_sect["DO_ENSEMBLE"]:
rocoto_config["entities"]["FCST_DIR"] = \
- "{{ nco.DATAROOT_default }}/run_fcst_mem#mem#.{{ workflow.WORKFLOW_ID }}_@Y@m@d@H"
-
- if nco_config["DBNROOT_default"] and workflow_manager == "rocoto":
- mkdir_vrfy(f' -p "{nco_config["DBNROOT_default"]}"')
+ "{{ nco.PTMP }}/{{ nco.envir_default }}/tmp/run_fcst_mem#mem#.{{ workflow.WORKFLOW_ID }}_@Y@m@d@H"
- mkdir_vrfy(f' -p "{nco_config.get("LOGBASEDIR_default")}"')
# create experiment dir
mkdir_vrfy(f' -p "{exptdir}"')