diff --git a/.gitignore b/.gitignore
index 869c78107c..2935804ac4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -36,7 +36,6 @@ fix/gsi
fix/lut
fix/mom6
fix/orog
-fix/reg2grb2
fix/sfc_climo
fix/ugwd
fix/verif
@@ -99,6 +98,9 @@ parm/post/postxconfig-NT-GFS-WAFS.txt
parm/post/postxconfig-NT-GFS.txt
parm/post/postxconfig-NT-gefs-aerosol.txt
parm/post/postxconfig-NT-gefs-chem.txt
+parm/post/ocean.csv
+parm/post/ice.csv
+parm/post/ocnicepost.nml.jinja2
parm/ufs/noahmptable.tbl
parm/ufs/model_configure.IN
parm/ufs/MOM_input_*.IN
@@ -137,7 +139,6 @@ sorc/radmon_bcor.fd
sorc/radmon_time.fd
sorc/rdbfmsua.fd
sorc/recentersigp.fd
-sorc/reg2grb2.fd
sorc/supvit.fd
sorc/syndat_getjtbul.fd
sorc/syndat_maksynrc.fd
@@ -147,6 +148,7 @@ sorc/tocsbufr.fd
sorc/upp.fd
sorc/vint.fd
sorc/webtitle.fd
+sorc/ocnicepost.fd
# Ignore scripts from externals
#------------------------------
diff --git a/Jenkinsfile b/Jenkinsfile
index c591aae70f..be62a20512 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -9,7 +9,7 @@ pipeline {
options {
skipDefaultCheckout()
- buildDiscarder(logRotator(numToKeepStr: '2'))
+ parallelsAlwaysFailFast()
}
stages { // This initial stage is used to get the Machine name from the GitHub labels on the PR
@@ -24,15 +24,15 @@ pipeline {
machine = 'none'
for (label in pullRequest.labels) {
echo "Label: ${label}"
- if ((label.matches("CI-Hera-Ready"))) {
+ if ((label.matches('CI-Hera-Ready'))) {
machine = 'hera'
- } else if ((label.matches("CI-Orion-Ready"))) {
+ } else if ((label.matches('CI-Orion-Ready'))) {
machine = 'orion'
- } else if ((label.matches("CI-Hercules-Ready"))) {
+ } else if ((label.matches('CI-Hercules-Ready'))) {
machine = 'hercules'
}
- } // createing a second machine varible with first letter capital
- // because the first letter of the machine name is captitalized in the GitHub labels
+ } // createing a second machine varible with first letter capital
+ // because the first letter of the machine name is captitalized in the GitHub labels
Machine = machine[0].toUpperCase() + machine.substring(1)
}
}
@@ -42,12 +42,12 @@ pipeline {
agent { label "${machine}-emc" }
steps {
script {
- properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in','Hera-EMC','Orion-EMC'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])])
+ properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hera-EMC', 'Orion-EMC'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])])
HOME = "${WORKSPACE}/TESTDIR"
commonworkspace = "${WORKSPACE}"
- sh( script: "mkdir -p ${HOME}/RUNTESTS", returnStatus: true)
+ sh(script: "mkdir -p ${HOME}/RUNTESTS")
pullRequest.addLabel("CI-${Machine}-Building")
- if ( pullRequest.labels.any{ value -> value.matches("CI-${Machine}-Ready") } ) {
+ if (pullRequest.labels.any { value -> value.matches("CI-${Machine}-Ready") }) {
pullRequest.removeLabel("CI-${Machine}-Ready")
}
}
@@ -61,40 +61,46 @@ pipeline {
// throttle(['global_matrix_build'])
//}
axes {
- axis {
- name "system"
- values "gfs", "gefs"
+ axis {
+ name 'system'
+ values 'gfs', 'gefs'
}
}
stages {
- stage("build system") {
+ stage('build system') {
steps {
script {
def HOMEgfs = "${HOME}/${system}" // local HOMEgfs is used to build the system on per system basis under the common workspace HOME
- sh( script: "mkdir -p ${HOMEgfs}", returnStatus: true)
+ sh(script: "mkdir -p ${HOMEgfs}")
ws(HOMEgfs) {
env.MACHINE_ID = machine // MACHINE_ID is used in the build scripts to determine the machine and is added to the shell environment
if (fileExists("${HOMEgfs}/sorc/BUILT_semaphor")) { // if the system is already built, skip the build in the case of re-runs
- sh( script: "cat ${HOMEgfs}/sorc/BUILT_semaphor", returnStdout: true).trim() // TODO: and user configurable control to manage build semphore
- ws(commonworkspace) { pullRequest.comment("Cloned PR already built (or build skipped) on ${machine} in directory ${HOMEgfs}") }
+ sh(script: "cat ${HOMEgfs}/sorc/BUILT_semaphor", returnStdout: true).trim() // TODO: and user configurable control to manage build semphore
+ pullRequest.comment("Cloned PR already built (or build skipped) on ${machine} in directory ${HOMEgfs}
Still doing a checkout to get the latest changes")
+ sh(script: 'source workflow/gw_setup.sh; git pull --recurse-submodules')
+ dir('sorc') {
+ sh(script: './link_workflow.sh')
+ }
} else {
checkout scm
- sh( script: "source workflow/gw_setup.sh;which git;git --version;git submodule update --init --recursive", returnStatus: true)
- def builds_file = readYaml file: "ci/cases/yamls/build.yaml"
+ sh(script: 'source workflow/gw_setup.sh;which git;git --version;git submodule update --init --recursive')
+ def builds_file = readYaml file: 'ci/cases/yamls/build.yaml'
def build_args_list = builds_file['builds']
- def build_args = build_args_list[system].join(" ").trim().replaceAll("null", "")
+ def build_args = build_args_list[system].join(' ').trim().replaceAll('null', '')
dir("${HOMEgfs}/sorc") {
- sh( script: "${build_args}", returnStatus: true)
- sh( script: "./link_workflow.sh", returnStatus: true)
- sh( script: "echo ${HOMEgfs} > BUILT_semaphor", returnStatus: true)
+ sh(script: "${build_args}")
+ sh(script: './link_workflow.sh')
+ sh(script: "echo ${HOMEgfs} > BUILT_semaphor")
}
}
- if ( pullRequest.labels.any{ value -> value.matches("CI-${Machine}-Building") } ) {
- pullRequest.removeLabel("CI-${Machine}-Building")
+ if (env.CHANGE_ID && system == 'gfs') {
+ if (pullRequest.labels.any { value -> value.matches("CI-${Machine}-Building") }) {
+ pullRequest.removeLabel("CI-${Machine}-Building")
+ }
+ pullRequest.addLabel("CI-${Machine}-Running")
}
- pullRequest.addLabel("CI-${Machine}-Running")
- }
- }
+ }
+ }
}
}
}
@@ -106,21 +112,22 @@ pipeline {
agent { label "${machine}-emc" }
axes {
axis {
- name "Case"
- values "C48_ATM", "C48_S2SWA_gefs", "C48_S2SW", "C96_atm3DVar" // TODO add dynamic list of cases from env vars (needs addtional plugins)
+ name 'Case'
+ // TODO add dynamic list of cases from env vars (needs addtional plugins)
+ values 'C48_ATM', 'C48_S2SWA_gefs', 'C48_S2SW', 'C96_atm3DVar', 'C96C48_hybatmDA', 'C96_atmsnowDA' // 'C48mx500_3DVarAOWCDA'
}
}
stages {
stage('Create Experiment') {
- steps {
+ steps {
script {
- sh( script: "sed -n '/{.*}/!p' ${HOME}/gfs/ci/cases/pr/${Case}.yaml > ${HOME}/gfs/ci/cases/pr/${Case}.yaml.tmp", returnStatus: true)
+ sh(script: "sed -n '/{.*}/!p' ${HOME}/gfs/ci/cases/pr/${Case}.yaml > ${HOME}/gfs/ci/cases/pr/${Case}.yaml.tmp")
def yaml_case = readYaml file: "${HOME}/gfs/ci/cases/pr/${Case}.yaml.tmp"
system = yaml_case.experiment.system
def HOMEgfs = "${HOME}/${system}" // local HOMEgfs is used to populate the XML on per system basis
env.RUNTESTS = "${HOME}/RUNTESTS"
- sh( script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh create_experiment ${HOMEgfs}/ci/cases/pr/${Case}.yaml", returnStatus: true)
- }
+ sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh create_experiment ${HOMEgfs}/ci/cases/pr/${Case}.yaml")
+ }
}
}
stage('Run Experiments') {
@@ -128,56 +135,28 @@ pipeline {
script {
HOMEgfs = "${HOME}/gfs" // common HOMEgfs is used to launch the scripts that run the experiments
ws(HOMEgfs) {
- pslot = sh( script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh get_pslot ${HOME}/RUNTESTS ${Case}", returnStdout: true ).trim()
- pullRequest.comment("**Running experiments: ${Case} on ${Machine}**
Built against system **${system}** in directory:
`${HOMEgfs}`
With the experiment in directory:
`${HOME}/RUNTESTS/${pslot}`")
- try {
- sh( script: "${HOMEgfs}/ci/scripts/run-check_ci.sh ${HOME} ${pslot}", returnStatus: true)
- } catch (Exception e) {
- pullRequest.comment("**FAILURE** running experiments: ${Case} on ${Machine}")
- error("Failed to run experiments ${Case} on ${Machine}")
- }
- pullRequest.comment("**SUCCESS** running experiments: ${Case} on ${Machine}")
- }
- }
- }
- post {
- always {
- script {
- ws (HOMEgfs) {
- for (label in pullRequest.labels) {
- if (label.contains("${Machine}")) {
- pullRequest.removeLabel(label)
- }
- }
- }
- }
- }
- success {
- script {
- ws (HOMEgfs) {
- pullRequest.addLabel("CI-${Machine}-Passed")
- def timestamp = new Date().format("MM dd HH:mm:ss", TimeZone.getTimeZone('America/New_York'))
- pullRequest.comment("**CI SUCCESS** ${Machine} at ${timestamp}\n\nBuilt and ran in directory `${HOME}`")
- }
- }
- }
- failure {
- script {
- ws (HOMEgfs) {
- pullRequest.addLabel("CI-${Machine}-Failed")
- def timestamp = new Date().format("MM dd HH:mm:ss", TimeZone.getTimeZone('America/New_York'))
- pullRequest.comment("**CI FAILED** ${Machine} at ${timestamp}
Built and ran in directory `${HOME}`")
- if (fileExists('${HOME}/RUNTESTS/ci.log')) {
- def fileContent = readFile '${HOME}/RUNTESTS/ci.log'
- fileContent.eachLine { line ->
- if( line.contains(".log")) {
- archiveArtifacts artifacts: "${line}", fingerprint: true
+ pslot = sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh get_pslot ${HOME}/RUNTESTS ${Case}", returnStdout: true).trim()
+ // pullRequest.comment("**Running** experiment: ${Case} on ${Machine}
With the experiment in directory:
`${HOME}/RUNTESTS/${pslot}`")
+ err = sh(script: "${HOMEgfs}/ci/scripts/run-check_ci.sh ${HOME} ${pslot}")
+ if (err != 0) {
+ pullRequest.comment("**FAILURE** running experiment: ${Case} on ${Machine}")
+ sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cancel_all_batch_jobs ${HOME}/RUNTESTS")
+ ws(HOME) {
+ if (fileExists('RUNTESTS/error.logs')) {
+ def fileContent = readFile 'RUNTESTS/error.logs'
+ def lines = fileContent.readLines()
+ for (line in lines) {
+ echo "archiving: ${line}"
+ archiveArtifacts artifacts: "${line}", fingerprint: true
+ }
}
}
+ error("Failed to run experiments ${Case} on ${Machine}")
}
- }
+ // pullRequest.comment("**SUCCESS** running experiment: ${Case} on ${Machine}")
}
}
+
}
}
}
@@ -185,4 +164,35 @@ pipeline {
}
}
+ post {
+ always {
+ script {
+ if(env.CHANGE_ID) {
+ for (label in pullRequest.labels) {
+ if (label.contains("${Machine}")) {
+ pullRequest.removeLabel(label)
+ }
+ }
+ }
+ }
+ }
+ success {
+ script {
+ if(env.CHANGE_ID) {
+ pullRequest.addLabel("CI-${Machine}-Passed")
+ def timestamp = new Date().format('MM dd HH:mm:ss', TimeZone.getTimeZone('America/New_York'))
+ pullRequest.comment("**CI SUCCESS** ${Machine} at ${timestamp}\n\nBuilt and ran in directory `${HOME}`")
+ }
+ }
+ }
+ failure {
+ script {
+ if(env.CHANGE_ID) {
+ pullRequest.addLabel("CI-${Machine}-Failed")
+ def timestamp = new Date().format('MM dd HH:mm:ss', TimeZone.getTimeZone('America/New_York'))
+ pullRequest.comment("**CI FAILED** ${Machine} at ${timestamp}
Built and ran in directory `${HOME}`")
+ }
+ }
+ }
+ }
}
diff --git a/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml b/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml
index b972d3a445..6e9fc6d3de 100644
--- a/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml
+++ b/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml
@@ -17,6 +17,7 @@ arguments:
start: warm
yaml: {{ HOMEgfs }}/ci/cases/yamls/soca_gfs_defaults_ci.yaml
-skip_ci_on_hosts:
+skip_ci_on_host:
- orion
+ - hera
- hercules
diff --git a/ci/scripts/run-check_ci.sh b/ci/scripts/run-check_ci.sh
index f98f434462..8e1e927050 100755
--- a/ci/scripts/run-check_ci.sh
+++ b/ci/scripts/run-check_ci.sh
@@ -25,6 +25,7 @@ pslot=${2:-${pslot:-?}} # Name of the experiment being tested by this scr
# TODO: Make this configurable (for now all scripts run from gfs for CI at runtime)
HOMEgfs="${TEST_DIR}/gfs"
RUNTESTS="${TEST_DIR}/RUNTESTS"
+run_check_logfile="${RUNTESTS}/ci-run_check.log"
# Source modules and setup logging
echo "Source modules."
@@ -77,15 +78,16 @@ while true; do
{
echo "Experiment ${pslot} Terminated with ${num_failed} tasks failed at $(date)" || true
echo "Experiment ${pslot} Terminated: *FAILED*"
- } >> "${RUNTESTS}/ci.log"
-
+ } | tee -a "${run_check_logfile}"
error_logs=$(rocotostat -d "${db}" -w "${xml}" | grep -E 'FAIL|DEAD' | awk '{print "-c", $1, "-t", $2}' | xargs rocotocheck -d "${db}" -w "${xml}" | grep join | awk '{print $2}') || true
{
echo "Error logs:"
echo "${error_logs}"
- } >> "${RUNTESTS}/ci.log"
- sed -i "s/\`\`\`//2g" "${RUNTESTS}/ci.log"
- sacct --format=jobid,jobname%35,WorkDir%100,stat | grep "${pslot}" | grep "${pr}\/RUNTESTS" | awk '{print $1}' | xargs scancel || true
+ } | tee -a "${run_check_logfile}"
+ # rm -f "${RUNTESTS}/error.logs"
+ for log in ${error_logs}; do
+ echo "RUNTESTS${log#*RUNTESTS}" >> "${RUNTESTS}/error.logs"
+ done
rc=1
break
fi
@@ -95,8 +97,7 @@ while true; do
echo "Experiment ${pslot} Completed at $(date)" || true
echo "with ${num_succeeded} successfully completed jobs" || true
echo "Experiment ${pslot} Completed: *SUCCESS*"
- } >> "${RUNTESTS}/ci.log"
- sed -i "s/\`\`\`//2g" "${RUNTESTS}/ci.log"
+ } | tee -a "${run_check_logfile}"
rc=0
break
fi
@@ -107,3 +108,4 @@ while true; do
done
exit "${rc}"
+
diff --git a/ci/scripts/utils/ci_utils.sh b/ci/scripts/utils/ci_utils.sh
index 6f2426c388..ce2e039307 100755
--- a/ci/scripts/utils/ci_utils.sh
+++ b/ci/scripts/utils/ci_utils.sh
@@ -102,6 +102,14 @@ function get_pslot () {
}
+function cancel_all_batch_jobs () {
+ local RUNTESTS="${1}"
+ pslot_list=$(get_pslot_list "${RUNTESTS}")
+ for pslot in ${pslot_list}; do
+ cancel_batch_jobs "${pslot}"
+ done
+}
+
function create_experiment () {
local yaml_config="${1}"
diff --git a/env/HERA.env b/env/HERA.env
index 057a2313f8..e9a0ee050f 100755
--- a/env/HERA.env
+++ b/env/HERA.env
@@ -199,6 +199,13 @@ elif [[ "${step}" = "atmos_products" ]]; then
export USE_CFP="YES" # Use MPMD for downstream product generation on Hera
+elif [[ "${step}" = "oceanice_products" ]]; then
+
+ nth_max=$((npe_node_max / npe_node_oceanice_products))
+
+ export NTHREADS_OCNICEPOST=${nth_oceanice_products:-1}
+ export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}"
+
elif [[ "${step}" = "ecen" ]]; then
nth_max=$((npe_node_max / npe_node_ecen))
diff --git a/env/HERCULES.env b/env/HERCULES.env
index ebfa51398b..396e587798 100755
--- a/env/HERCULES.env
+++ b/env/HERCULES.env
@@ -207,10 +207,20 @@ case ${step} in
[[ ${NTHREADS_UPP} -gt ${nth_max} ]] && export NTHREADS_UPP=${nth_max}
export APRUN_UPP="${launcher} -n ${npe_upp} --cpus-per-task=${NTHREADS_UPP}"
;;
+
"atmos_products")
export USE_CFP="YES" # Use MPMD for downstream product generation
;;
+
+"oceanice_products")
+
+ nth_max=$((npe_node_max / npe_node_oceanice_products))
+
+ export NTHREADS_OCNICEPOST=${nth_oceanice_products:-1}
+ export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}"
+;;
+
"ecen")
nth_max=$((npe_node_max / npe_node_ecen))
diff --git a/env/JET.env b/env/JET.env
index eada0b1c70..02e11950e5 100755
--- a/env/JET.env
+++ b/env/JET.env
@@ -190,6 +190,13 @@ elif [[ "${step}" = "atmos_products" ]]; then
export USE_CFP="YES" # Use MPMD for downstream product generation
+elif [[ "${step}" = "oceanice_products" ]]; then
+
+ nth_max=$((npe_node_max / npe_node_oceanice_products))
+
+ export NTHREADS_OCNICEPOST=${nth_oceanice_products:-1}
+ export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}"
+
elif [[ "${step}" = "ecen" ]]; then
nth_max=$((npe_node_max / npe_node_ecen))
diff --git a/env/ORION.env b/env/ORION.env
index c5e94cc559..f0a97eb933 100755
--- a/env/ORION.env
+++ b/env/ORION.env
@@ -209,6 +209,13 @@ elif [[ "${step}" = "atmos_products" ]]; then
export USE_CFP="YES" # Use MPMD for downstream product generation
+elif [[ "${step}" = "oceanice_products" ]]; then
+
+ nth_max=$((npe_node_max / npe_node_oceanice_products))
+
+ export NTHREADS_OCNICEPOST=${nth_oceanice_products:-1}
+ export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}"
+
elif [[ "${step}" = "ecen" ]]; then
nth_max=$((npe_node_max / npe_node_ecen))
diff --git a/env/S4.env b/env/S4.env
index b103e865d3..717d971c7d 100755
--- a/env/S4.env
+++ b/env/S4.env
@@ -177,6 +177,13 @@ elif [[ "${step}" = "atmos_products" ]]; then
export USE_CFP="YES" # Use MPMD for downstream product generation
+elif [[ "${step}" = "oceanice_products" ]]; then
+
+ nth_max=$((npe_node_max / npe_node_oceanice_products))
+
+ export NTHREADS_OCNICEPOST=${nth_oceanice_products:-1}
+ export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}"
+
elif [[ "${step}" = "ecen" ]]; then
nth_max=$((npe_node_max / npe_node_ecen))
diff --git a/env/WCOSS2.env b/env/WCOSS2.env
index 307ad71c43..bbf4de2ae3 100755
--- a/env/WCOSS2.env
+++ b/env/WCOSS2.env
@@ -193,6 +193,13 @@ elif [[ "${step}" = "atmos_products" ]]; then
export USE_CFP="YES" # Use MPMD for downstream product generation
+elif [[ "${step}" = "oceanice_products" ]]; then
+
+ nth_max=$((npe_node_max / npe_node_oceanice_products))
+
+ export NTHREADS_OCNICEPOST=${nth_oceanice_products:-1}
+ export APRUN_OCNICEPOST="${launcher} -n 1 -ppn ${npe_node_oceanice_products} --cpu-bind depth --depth ${NTHREADS_OCNICEPOST}"
+
elif [[ "${step}" = "ecen" ]]; then
nth_max=$((npe_node_max / npe_node_ecen))
diff --git a/jobs/JGLOBAL_ARCHIVE b/jobs/JGLOBAL_ARCHIVE
index 66f6dfa8dc..235084e631 100755
--- a/jobs/JGLOBAL_ARCHIVE
+++ b/jobs/JGLOBAL_ARCHIVE
@@ -14,16 +14,15 @@ YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS COM_ATMOS_BUFR COM_ATMO
COM_ATMOS_TRACK COM_ATMOS_WMO \
COM_CHEM_HISTORY COM_CHEM_ANALYSIS\
COM_MED_RESTART \
- COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART \
+ COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART COM_ICE_GRIB \
COM_OBS COM_TOP \
- COM_OCEAN_HISTORY COM_OCEAN_INPUT COM_OCEAN_RESTART COM_OCEAN_XSECT COM_OCEAN_2D COM_OCEAN_3D \
+ COM_OCEAN_HISTORY COM_OCEAN_INPUT COM_OCEAN_RESTART COM_OCEAN_GRIB COM_OCEAN_NETCDF \
COM_OCEAN_ANALYSIS \
COM_WAVE_GRID COM_WAVE_HISTORY COM_WAVE_STATION \
COM_ATMOS_OZNMON COM_ATMOS_RADMON COM_ATMOS_MINMON COM_CONF
for grid in "0p25" "0p50" "1p00"; do
YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_GRID_TMPL"
- YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_OCEAN_GRIB_${grid}:COM_OCEAN_GRIB_GRID_TMPL"
done
###############################################################
diff --git a/jobs/JGLOBAL_OCEANICE_PRODUCTS b/jobs/JGLOBAL_OCEANICE_PRODUCTS
new file mode 100755
index 0000000000..1d8c6b42c6
--- /dev/null
+++ b/jobs/JGLOBAL_OCEANICE_PRODUCTS
@@ -0,0 +1,40 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+source "${HOMEgfs}/ush/jjob_header.sh" -e "oceanice_products" -c "base oceanice_products"
+
+
+##############################################
+# Begin JOB SPECIFIC work
+##############################################
+
+# Construct COM variables from templates
+YMD="${PDY}" HH="${cyc}" generate_com -rx "COM_${COMPONENT^^}_HISTORY"
+YMD="${PDY}" HH="${cyc}" generate_com -rx "COM_${COMPONENT^^}_GRIB"
+YMD="${PDY}" HH="${cyc}" generate_com -rx "COM_${COMPONENT^^}_NETCDF"
+
+###############################################################
+# Run exglobal script
+"${HOMEgfs}/scripts/exglobal_oceanice_products.py"
+status=$?
+(( status != 0 )) && exit "${status}"
+
+##############################################
+# End JOB SPECIFIC work
+##############################################
+
+##############################################
+# Final processing
+##############################################
+if [[ -e "${pgmout}" ]]; then
+ cat "${pgmout}"
+fi
+
+##########################################
+# Remove the Temporary working directory
+##########################################
+cd "${DATAROOT}" || exit 1
+[[ "${KEEPDATA:-NO}" == "NO" ]] && rm -rf "${DATA}"
+
+
+exit 0
diff --git a/jobs/rocoto/oceanice_products.sh b/jobs/rocoto/oceanice_products.sh
new file mode 100755
index 0000000000..48816fb3a1
--- /dev/null
+++ b/jobs/rocoto/oceanice_products.sh
@@ -0,0 +1,37 @@
+#! /usr/bin/env bash
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+###############################################################
+## ocean ice products driver script
+## FHRLST : forecast hour list to post-process (e.g. f000, f000_f001_f002, ...)
+###############################################################
+
+# Source FV3GFS workflow modules
+. "${HOMEgfs}/ush/load_fv3gfs_modules.sh"
+status=$?
+if (( status != 0 )); then exit "${status}"; fi
+
+###############################################################
+# setup python path for workflow utilities and tasks
+wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
+PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
+export PYTHONPATH
+
+export job="oceanice_products"
+export jobid="${job}.$$"
+
+###############################################################
+# shellcheck disable=SC2153,SC2001
+IFS='_' read -ra fhrs <<< "${FHRLST//f}" # strip off the 'f's and convert to array
+
+#---------------------------------------------------------------
+# Execute the JJOB
+for fhr in "${fhrs[@]}"; do
+ export FORECAST_HOUR=$(( 10#${fhr} ))
+ "${HOMEgfs}/jobs/JGLOBAL_OCEANICE_PRODUCTS"
+ status=$?
+ if (( status != 0 )); then exit "${status}"; fi
+done
+
+exit 0
diff --git a/jobs/rocoto/ocnpost.sh b/jobs/rocoto/ocnpost.sh
deleted file mode 100755
index 5a2dc091cf..0000000000
--- a/jobs/rocoto/ocnpost.sh
+++ /dev/null
@@ -1,119 +0,0 @@
-#! /usr/bin/env bash
-
-source "${HOMEgfs}/ush/preamble.sh"
-
-###############################################################
-## CICE5/MOM6 post driver script
-## FHRGRP : forecast hour group to post-process (e.g. 0, 1, 2 ...)
-## FHRLST : forecast hourlist to be post-process (e.g. anl, f000, f000_f001_f002, ...)
-###############################################################
-
-# Source FV3GFS workflow modules
-source "${HOMEgfs}/ush/load_fv3gfs_modules.sh"
-status=$?
-(( status != 0 )) && exit "${status}"
-
-export job="ocnpost"
-export jobid="${job}.$$"
-source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnpost" -c "base ocnpost"
-
-##############################################
-# Set variables used in the exglobal script
-##############################################
-export CDUMP=${RUN/enkf}
-
-##############################################
-# Begin JOB SPECIFIC work
-##############################################
-YMD=${PDY} HH=${cyc} generate_com -rx COM_OCEAN_HISTORY COM_OCEAN_2D COM_OCEAN_3D \
- COM_OCEAN_XSECT COM_ICE_HISTORY
-
-for grid in "0p50" "0p25"; do
- YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_OCEAN_GRIB_${grid}:COM_OCEAN_GRIB_GRID_TMPL"
-done
-
-for outdir in COM_OCEAN_2D COM_OCEAN_3D COM_OCEAN_XSECT COM_OCEAN_GRIB_0p25 COM_OCEAN_GRIB_0p50; do
- if [[ ! -d "${!outdir}" ]]; then
- mkdir -p "${!outdir}"
- fi
-done
-
-fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g')
-
-export OMP_NUM_THREADS=1
-export ENSMEM=${ENSMEM:-000}
-
-export IDATE=${PDY}${cyc}
-
-for fhr in ${fhrlst}; do
- export fhr=${fhr}
- # Ignore possible spelling error (nothing is misspelled)
- # shellcheck disable=SC2153
- VDATE=$(${NDATE} "${fhr}" "${IDATE}")
- # shellcheck disable=
- declare -x VDATE
- cd "${DATA}" || exit 2
- if (( 10#${fhr} > 0 )); then
- # TODO: This portion calls NCL scripts that are deprecated (see Issue #923)
- if [[ "${MAKE_OCN_GRIB:-YES}" == "YES" ]]; then
- export MOM6REGRID=${MOM6REGRID:-${HOMEgfs}}
- "${MOM6REGRID}/scripts/run_regrid.sh"
- status=$?
- [[ ${status} -ne 0 ]] && exit "${status}"
-
- # Convert the netcdf files to grib2
- export executable=${MOM6REGRID}/exec/reg2grb2.x
- "${MOM6REGRID}/scripts/run_reg2grb2.sh"
- status=$?
- [[ ${status} -ne 0 ]] && exit "${status}"
- ${NMV} "ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25.grb2" "${COM_OCEAN_GRIB_0p25}/"
- ${NMV} "ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p5x0p5.grb2" "${COM_OCEAN_GRIB_0p50}/"
- fi
-
- #break up ocn netcdf into multiple files:
- if [[ -f "${COM_OCEAN_2D}/ocn_2D_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then
- echo "File ${COM_OCEAN_2D}/ocn_2D_${VDATE}.${ENSMEM}.${IDATE}.nc already exists"
- else
- ncks -x -v vo,uo,so,temp \
- "${COM_OCEAN_HISTORY}/ocn${VDATE}.${ENSMEM}.${IDATE}.nc" \
- "${COM_OCEAN_2D}/ocn_2D_${VDATE}.${ENSMEM}.${IDATE}.nc"
- status=$?
- [[ ${status} -ne 0 ]] && exit "${status}"
- fi
- if [[ -f "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then
- echo "File ${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc already exists"
- else
- ncks -x -v Heat_PmE,LW,LwLatSens,MLD_003,MLD_0125,SSH,SSS,SST,SSU,SSV,SW,cos_rot,ePBL,evap,fprec,frazil,latent,lprec,lrunoff,sensible,sin_rot,speed,taux,tauy,wet_c,wet_u,wet_v \
- "${COM_OCEAN_HISTORY}/ocn${VDATE}.${ENSMEM}.${IDATE}.nc" \
- "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc"
- status=$?
- [[ ${status} -ne 0 ]] && exit "${status}"
- fi
- if [[ -f "${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then
- echo "File ${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc already exists"
- else
- ncks -v temp -d yh,0.0 \
- "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" \
- "${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc"
- status=$?
- [[ ${status} -ne 0 ]] && exit "${status}"
- fi
- if [[ -f "${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then
- echo "File ${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc already exists"
- else
- ncks -v uo -d yh,0.0 \
- "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" \
- "${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc"
- status=$?
- [[ ${status} -ne 0 ]] && exit "${status}"
- fi
- fi
-done
-
-# clean up working folder
-if [[ ${KEEPDATA:-"NO"} = "NO" ]] ; then rm -rf "${DATA}" ; fi
-###############################################################
-# Exit out cleanly
-
-
-exit 0
diff --git a/modulefiles/module_base.hera.lua b/modulefiles/module_base.hera.lua
index 976cabc54e..f88409a7a9 100644
--- a/modulefiles/module_base.hera.lua
+++ b/modulefiles/module_base.hera.lua
@@ -37,6 +37,7 @@ load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None")))
load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None")))
load(pathJoin("met", (os.getenv("met_ver") or "None")))
load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None")))
-- MET/METplus are not available for use with spack-stack, yet
--load(pathJoin("met", (os.getenv("met_ver") or "None")))
diff --git a/modulefiles/module_base.hercules.lua b/modulefiles/module_base.hercules.lua
index 1d65ac02c5..409a1d6f5a 100644
--- a/modulefiles/module_base.hercules.lua
+++ b/modulefiles/module_base.hercules.lua
@@ -39,6 +39,7 @@ load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None")))
load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None")))
load(pathJoin("met", (os.getenv("met_ver") or "None")))
load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None")))
setenv("WGRIB2","wgrib2")
setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None"))
diff --git a/modulefiles/module_base.jet.lua b/modulefiles/module_base.jet.lua
index 3cd25a0656..e6ff3136b6 100644
--- a/modulefiles/module_base.jet.lua
+++ b/modulefiles/module_base.jet.lua
@@ -37,6 +37,7 @@ load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None")))
load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None")))
load(pathJoin("met", (os.getenv("met_ver") or "None")))
load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None")))
setenv("WGRIB2","wgrib2")
setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None"))
diff --git a/modulefiles/module_base.orion.lua b/modulefiles/module_base.orion.lua
index 8c020fc6e2..bad652a6f9 100644
--- a/modulefiles/module_base.orion.lua
+++ b/modulefiles/module_base.orion.lua
@@ -35,6 +35,7 @@ load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None")))
load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None")))
load(pathJoin("met", (os.getenv("met_ver") or "None")))
load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None")))
-- MET/METplus are not yet supported with spack-stack
--load(pathJoin("met", (os.getenv("met_ver") or "None")))
diff --git a/modulefiles/module_base.s4.lua b/modulefiles/module_base.s4.lua
index 28d8886114..21c4a50c4c 100644
--- a/modulefiles/module_base.s4.lua
+++ b/modulefiles/module_base.s4.lua
@@ -34,6 +34,7 @@ load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None")))
load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None")))
load(pathJoin("met", (os.getenv("met_ver") or "None")))
load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None")))
setenv("WGRIB2","wgrib2")
setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None"))
diff --git a/parm/config/gefs/config.base.emc.dyn b/parm/config/gefs/config.base.emc.dyn
index ff2fe3377b..051a2188c3 100644
--- a/parm/config/gefs/config.base.emc.dyn
+++ b/parm/config/gefs/config.base.emc.dyn
@@ -144,7 +144,7 @@ export DO_OCN="NO"
export DO_ICE="NO"
export DO_AERO="NO"
export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both
-export DOBNDPNT_WAVE="NO"
+export DOBNDPNT_WAVE="NO" # The GEFS buoys file does not currently have any boundary points
export FRAC_GRID=".true."
# Set operational resolution
diff --git a/parm/config/gefs/config.oceanice_products b/parm/config/gefs/config.oceanice_products
new file mode 120000
index 0000000000..f6cf9cd60b
--- /dev/null
+++ b/parm/config/gefs/config.oceanice_products
@@ -0,0 +1 @@
+../gfs/config.oceanice_products
\ No newline at end of file
diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources
index 36b70aecb8..18750d1192 100644
--- a/parm/config/gefs/config.resources
+++ b/parm/config/gefs/config.resources
@@ -19,7 +19,7 @@ if (( $# != 1 )); then
echo "tracker genesis genesis_fsu"
echo "verfozn verfrad vminmon fit2obs metp arch cleanup"
echo "eobs ediag eomg eupd ecen esfc efcs epos earc"
- echo "init_chem mom6ic ocnpost"
+ echo "init_chem mom6ic"
echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt"
echo "wavegempak waveawipsbulls waveawipsgridded"
echo "postsnd awips gempak npoess"
@@ -68,6 +68,15 @@ esac
export npe_node_max
case ${step} in
+
+ "stage_ic")
+ export wtime_stage_ic="00:15:00"
+ export npe_stage_ic=1
+ export npe_node_stage_ic=1
+ export nth_stage_ic=1
+ export is_exclusive=True
+ ;;
+
"waveinit")
export wtime_waveinit="00:10:00"
export npe_waveinit=12
@@ -77,25 +86,10 @@ case ${step} in
export memory_waveinit="2GB"
;;
- "wavepostsbs")
- export wtime_wavepostsbs="00:20:00"
- export wtime_wavepostsbs_gfs="03:00:00"
- export npe_wavepostsbs=8
- export nth_wavepostsbs=1
- export npe_node_wavepostsbs=$(( npe_node_max / nth_wavepostsbs ))
- export NTASKS=${npe_wavepostsbs}
- export memory_wavepostsbs="10GB"
- export memory_wavepostsbs_gfs="10GB"
- ;;
-
"fcst" | "efcs")
export is_exclusive=True
- if [[ "${step}" == "fcst" ]]; then
- _CDUMP_LIST=${CDUMP:-"gdas gfs"}
- elif [[ "${step}" == "efcs" ]]; then
- _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"}
- fi
+ _CDUMP_LIST=${CDUMP:-"gdas gfs"}
# During workflow creation, we need resources for all CDUMPs and CDUMP is undefined
for _CDUMP in ${_CDUMP_LIST}; do
@@ -224,11 +218,47 @@ case ${step} in
export is_exclusive=True
;;
- "stage_ic")
- export wtime_stage_ic="00:15:00"
- export npe_stage_ic=1
- export npe_node_stage_ic=1
- export nth_stage_ic=1
+ "oceanice_products")
+ export wtime_oceanice_products="00:15:00"
+ export npe_oceanice_products=1
+ export npe_node_oceanice_products=1
+ export nth_oceanice_products=1
+ export memory_oceanice_products="96GB"
+ ;;
+
+ "wavepostsbs")
+ export wtime_wavepostsbs="03:00:00"
+ export npe_wavepostsbs=1
+ export nth_wavepostsbs=1
+ export npe_node_wavepostsbs=$(( npe_node_max / nth_wavepostsbs ))
+ export NTASKS=${npe_wavepostsbs}
+ export memory_wavepostsbs="10GB"
+ ;;
+
+ "wavepostbndpnt")
+ export wtime_wavepostbndpnt="01:00:00"
+ export npe_wavepostbndpnt=240
+ export nth_wavepostbndpnt=1
+ export npe_node_wavepostbndpnt=$(( npe_node_max / nth_wavepostbndpnt ))
+ export NTASKS=${npe_wavepostbndpnt}
+ export is_exclusive=True
+ ;;
+
+ "wavepostbndpntbll")
+ export wtime_wavepostbndpntbll="01:00:00"
+ export npe_wavepostbndpntbll=448
+ export nth_wavepostbndpntbll=1
+ export npe_node_wavepostbndpntbll=$(( npe_node_max / nth_wavepostbndpntbll ))
+ export NTASKS=${npe_wavepostbndpntbll}
+ export is_exclusive=True
+ ;;
+
+ "wavepostpnt")
+ export wtime_wavepostpnt="04:00:00"
+ export npe_wavepostpnt=200
+ export nth_wavepostpnt=1
+ export npe_node_wavepostpnt=$(( npe_node_max / nth_wavepostpnt ))
+ export NTASKS=${npe_wavepostpnt}
export is_exclusive=True
;;
@@ -239,4 +269,4 @@ case ${step} in
esac
-echo "END: config.resources"
\ No newline at end of file
+echo "END: config.resources"
diff --git a/parm/config/gefs/config.wavepostbndpnt b/parm/config/gefs/config.wavepostbndpnt
new file mode 100644
index 0000000000..412c5fb42a
--- /dev/null
+++ b/parm/config/gefs/config.wavepostbndpnt
@@ -0,0 +1,11 @@
+#! /usr/bin/env bash
+
+########## config.wavepostbndpnt ##########
+# Wave steps specific
+
+echo "BEGIN: config.wavepostbndpnt"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" wavepostbndpnt
+
+echo "END: config.wavepostbndpnt"
diff --git a/parm/config/gefs/config.wavepostbndpntbll b/parm/config/gefs/config.wavepostbndpntbll
new file mode 100644
index 0000000000..6695ab0f84
--- /dev/null
+++ b/parm/config/gefs/config.wavepostbndpntbll
@@ -0,0 +1,11 @@
+#! /usr/bin/env bash
+
+########## config.wavepostbndpntbll ##########
+# Wave steps specific
+
+echo "BEGIN: config.wavepostbndpntbll"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" wavepostbndpntbll
+
+echo "END: config.wavepostbndpntbll"
diff --git a/parm/config/gefs/config.wavepostpnt b/parm/config/gefs/config.wavepostpnt
new file mode 100644
index 0000000000..e87237da82
--- /dev/null
+++ b/parm/config/gefs/config.wavepostpnt
@@ -0,0 +1,11 @@
+#! /usr/bin/env bash
+
+########## config.wavepostpnt ##########
+# Wave steps specific
+
+echo "BEGIN: config.wavepostpnt"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" wavepostpnt
+
+echo "END: config.wavepostpnt"
diff --git a/parm/config/gefs/config.wavepostsbs b/parm/config/gefs/config.wavepostsbs
new file mode 100644
index 0000000000..b3c5902e3c
--- /dev/null
+++ b/parm/config/gefs/config.wavepostsbs
@@ -0,0 +1,28 @@
+#! /usr/bin/env bash
+
+########## config.wavepostsbs ##########
+# Wave steps specific
+
+echo "BEGIN: config.wavepostsbs"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" wavepostsbs
+
+# Subgrid info for grib2 encoding
+export WAV_SUBGRBSRC=""
+export WAV_SUBGRB=""
+
+# Options for point output (switch on/off boundary point output)
+export DOIBP_WAV='NO' # Input boundary points
+export DOFLD_WAV='YES' # Field data
+export DOPNT_WAV='YES' # Station data
+export DOGRB_WAV='YES' # Create grib2 files
+if [[ -n "${waveinterpGRD}" ]]; then
+ export DOGRI_WAV='YES' # Create interpolated grids
+else
+ export DOGRI_WAV='NO' # Do not create interpolated grids
+fi
+export DOSPC_WAV='YES' # Spectral post
+export DOBLL_WAV='YES' # Bulletin post
+
+echo "END: config.wavepostsbs"
diff --git a/parm/config/gfs/config.base.emc.dyn b/parm/config/gfs/config.base.emc.dyn
index 250ce03a48..79b6a6455d 100644
--- a/parm/config/gfs/config.base.emc.dyn
+++ b/parm/config/gfs/config.base.emc.dyn
@@ -360,13 +360,13 @@ export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@"
export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@"
# Analysis increments to zero in CALCINCEXEC
-export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'"
+export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'"
# Write analysis files for early cycle EnKF
export DO_CALC_INCREMENT_ENKF_GFS="YES"
# Stratospheric increments to zero
-export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'"
+export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'"
export INCVARS_EFOLD="5"
# Swith to generate netcdf or binary diagnostic files. If not specified,
diff --git a/parm/config/gfs/config.com b/parm/config/gfs/config.com
index db648b5866..1f046fdef6 100644
--- a/parm/config/gfs/config.com
+++ b/parm/config/gfs/config.com
@@ -80,15 +80,16 @@ declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history'
declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart'
declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input'
declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean'
-declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D'
-declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D'
-declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect'
+declare -rx COM_OCEAN_NETCDF_TMPL=${COM_BASE}'/products/ocean/netcdf'
declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2'
declare -rx COM_OCEAN_GRIB_GRID_TMPL=${COM_OCEAN_GRIB_TMPL}'/${GRID}'
declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input'
declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history'
declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart'
+declare -rx COM_ICE_NETCDF_TMPL=${COM_BASE}'/products/ice/netcdf'
+declare -rx COM_ICE_GRIB_TMPL=${COM_BASE}'/products/ice/grib2'
+declare -rx COM_ICE_GRIB_GRID_TMPL=${COM_ICE_GRIB_TMPL}'/${GRID}'
declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history'
declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem'
diff --git a/parm/config/gfs/config.oceanice_products b/parm/config/gfs/config.oceanice_products
new file mode 100644
index 0000000000..bea70c21cc
--- /dev/null
+++ b/parm/config/gfs/config.oceanice_products
@@ -0,0 +1,15 @@
+#! /usr/bin/env bash
+
+########## config.oceanice_products ##########
+
+echo "BEGIN: config.oceanice_products"
+
+# Get task specific resources
+source "${EXPDIR}/config.resources" oceanice_products
+
+export OCEANICEPRODUCTS_CONFIG="${HOMEgfs}/parm/post/oceanice_products.yaml"
+
+# No. of forecast hours to process in a single job
+export NFHRS_PER_GROUP=3
+
+echo "END: config.oceanice_products"
diff --git a/parm/config/gfs/config.ocnpost b/parm/config/gfs/config.ocnpost
deleted file mode 100644
index 851c476e6c..0000000000
--- a/parm/config/gfs/config.ocnpost
+++ /dev/null
@@ -1,29 +0,0 @@
-#! /usr/bin/env bash
-
-########## config.ocnpost ##########
-
-echo "BEGIN: config.ocnpost"
-
-# Get task specific resources
-source "${EXPDIR}/config.resources" ocnpost
-
-# Convert netcdf files to grib files using post job
-#-------------------------------------------
-case "${OCNRES}" in
- "025") export MAKE_OCN_GRIB="YES";;
- "050") export MAKE_OCN_GRIB="NO";;
- "100") export MAKE_OCN_GRIB="NO";;
- "500") export MAKE_OCN_GRIB="NO";;
- *) export MAKE_OCN_GRIB="NO";;
-esac
-
-if [[ "${machine}" = "WCOSS2" ]] || [[ "${machine}" = "HERCULES" ]]; then
- #Currently the conversion to netcdf uses NCL which is not on WCOSS2 or HERCULES
- #This should be removed when this is updated
- export MAKE_OCN_GRIB="NO"
-fi
-
-# No. of forecast hours to process in a single job
-export NFHRS_PER_GROUP=3
-
-echo "END: config.ocnpost"
diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources
index b746a4b32a..ced6e6a3d8 100644
--- a/parm/config/gfs/config.resources
+++ b/parm/config/gfs/config.resources
@@ -19,7 +19,7 @@ if (( $# != 1 )); then
echo "tracker genesis genesis_fsu"
echo "verfozn verfrad vminmon fit2obs metp arch cleanup"
echo "eobs ediag eomg eupd ecen esfc efcs epos earc"
- echo "init_chem mom6ic ocnpost"
+ echo "init_chem mom6ic oceanice_products"
echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt"
echo "wavegempak waveawipsbulls waveawipsgridded"
echo "postsnd awips gempak npoess"
@@ -651,17 +651,12 @@ case ${step} in
unset NTASKS_TOT
;;
- "ocnpost")
- export wtime_ocnpost="00:30:00"
- export npe_ocnpost=1
- export npe_node_ocnpost=1
- export nth_ocnpost=1
- export memory_ocnpost="96G"
- if [[ ${machine} == "JET" ]]; then
- # JET only has 88GB of requestable memory per node
- # so a second node is required to meet the requiremtn
- npe_ocnpost=2
- fi
+ "oceanice_products")
+ export wtime_oceanice_products="00:15:00"
+ export npe_oceanice_products=1
+ export npe_node_oceanice_products=1
+ export nth_oceanice_products=1
+ export memory_oceanice_products="96GB"
;;
"upp")
@@ -671,6 +666,7 @@ case ${step} in
;;
"C192" | "C384" | "C768")
export npe_upp=120
+ export memory_upp="48GB"
;;
*)
echo "FATAL ERROR: Resources not defined for job ${job} at resolution ${CASE}"
diff --git a/parm/config/gfs/config.wavepostbndpnt b/parm/config/gfs/config.wavepostbndpnt
index dfeddc79b2..412c5fb42a 100644
--- a/parm/config/gfs/config.wavepostbndpnt
+++ b/parm/config/gfs/config.wavepostbndpnt
@@ -6,6 +6,6 @@
echo "BEGIN: config.wavepostbndpnt"
# Get task specific resources
-. $EXPDIR/config.resources wavepostbndpnt
+source "${EXPDIR}/config.resources" wavepostbndpnt
echo "END: config.wavepostbndpnt"
diff --git a/parm/config/gfs/config.wavepostbndpntbll b/parm/config/gfs/config.wavepostbndpntbll
index bb7224cc70..6695ab0f84 100644
--- a/parm/config/gfs/config.wavepostbndpntbll
+++ b/parm/config/gfs/config.wavepostbndpntbll
@@ -6,6 +6,6 @@
echo "BEGIN: config.wavepostbndpntbll"
# Get task specific resources
-. $EXPDIR/config.resources wavepostbndpntbll
+source "${EXPDIR}/config.resources" wavepostbndpntbll
echo "END: config.wavepostbndpntbll"
diff --git a/parm/config/gfs/config.wavepostpnt b/parm/config/gfs/config.wavepostpnt
index 8befb91760..e87237da82 100644
--- a/parm/config/gfs/config.wavepostpnt
+++ b/parm/config/gfs/config.wavepostpnt
@@ -6,6 +6,6 @@
echo "BEGIN: config.wavepostpnt"
# Get task specific resources
-. $EXPDIR/config.resources wavepostpnt
+source "${EXPDIR}/config.resources" wavepostpnt
echo "END: config.wavepostpnt"
diff --git a/parm/config/gfs/config.wavepostsbs b/parm/config/gfs/config.wavepostsbs
index 8e74aae069..b3c5902e3c 100644
--- a/parm/config/gfs/config.wavepostsbs
+++ b/parm/config/gfs/config.wavepostsbs
@@ -6,7 +6,7 @@
echo "BEGIN: config.wavepostsbs"
# Get task specific resources
-. $EXPDIR/config.resources wavepostsbs
+source "${EXPDIR}/config.resources" wavepostsbs
# Subgrid info for grib2 encoding
export WAV_SUBGRBSRC=""
diff --git a/parm/post/oceanice_products.yaml b/parm/post/oceanice_products.yaml
new file mode 100644
index 0000000000..44b4094c56
--- /dev/null
+++ b/parm/post/oceanice_products.yaml
@@ -0,0 +1,75 @@
+ocnicepost:
+ executable: "ocnicepost.x"
+ namelist:
+ debug: False
+ fix_data:
+ mkdir:
+ - "{{ DATA }}"
+ copy:
+ - ["{{ HOMEgfs }}/exec/ocnicepost.x", "{{ DATA }}/"]
+ - ["{{ HOMEgfs }}/parm/post/ocnicepost.nml.jinja2", "{{ DATA }}/"]
+ - ["{{ HOMEgfs }}/parm/post/{{ component }}.csv", "{{ DATA }}/"]
+ - ["{{ HOMEgfs }}/fix/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Bu.to.Ct.bilinear.nc", "{{ DATA }}/"]
+ - ["{{ HOMEgfs }}/fix/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cu.to.Ct.bilinear.nc", "{{ DATA }}/"]
+ - ["{{ HOMEgfs }}/fix/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Cv.to.Ct.bilinear.nc", "{{ DATA }}/"]
+ {% for grid in product_grids %}
+ - ["{{ HOMEgfs }}/fix/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.bilinear.nc", "{{ DATA }}/"]
+ - ["{{ HOMEgfs }}/fix/mom6/post/{{ model_grid }}/tripole.{{ model_grid }}.Ct.to.rect.{{ grid }}.conserve.nc", "{{ DATA }}/"]
+ - ["{{ HOMEgfs }}/fix/mom6/post/template.global.{{ grid }}.gb2", "{{ DATA }}/"]
+ {% endfor %}
+
+nc2grib2:
+ script: "{{ HOMEgfs }}/ush/oceanice_nc2grib2.sh"
+
+ocean:
+ namelist:
+ ftype: "ocean"
+ maskvar: "temp"
+ sinvar: "sin_rot"
+ cosvar: "cos_rot"
+ angvar: ""
+ {% if model_grid == 'mx025' or model_grid == 'mx050' or model_grid == 'mx100' %}
+ ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 225.86945, 241.06255, 266.5239, 308.7874, 373.9288, 467.3998, 593.87915, 757.1453, 959.97325, 1204.059, 1489.9735, 1817.1455, 2183.879, 2587.3995, 3023.9285, 3488.7875, 3976.524, 4481.0625]
+ {% elif model_grid == 'mx500' %}
+ ocean_levels: [5, 15, 25, 35, 45, 55, 65, 75, 85, 95, 105, 115, 125, 135, 145, 155, 165, 175, 185, 195, 205, 215, 225.86945, 241.06255, 266.5239]
+ {% endif %}
+ subset: ['SSH', 'SST', 'SSS', 'speed', 'MLD_003', 'latent', 'sensible', 'SW', 'LW', 'LwLatSens', 'Heat_PmE', 'SSU', 'SSV', 'taux', 'tauy', 'temp', 'so', 'uo', 'vo']
+ data_in:
+ copy:
+ - ["{{ COM_OCEAN_HISTORY }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.6hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ocean.nc"]
+ data_out:
+ mkdir:
+ - "{{ COM_OCEAN_NETCDF }}"
+ {% for grid in product_grids %}
+ - "{{ COM_OCEAN_GRIB }}/{{ grid }}"
+ {% endfor %}
+ copy:
+ - ["{{ DATA }}/ocean_subset.nc", "{{ COM_OCEAN_NETCDF }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"]
+ {% for grid in product_grids %}
+ - ["{{ DATA }}/ocean.{{ grid }}.grib2", "{{ COM_OCEAN_GRIB }}/{{ grid }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.grib2"]
+ - ["{{ DATA }}/ocean.{{ grid }}.grib2.idx", "{{ COM_OCEAN_GRIB }}/{{ grid }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.grib2.idx"]
+ {% endfor %}
+
+ice:
+ namelist:
+ ftype: "ice"
+ maskvar: "tmask"
+ sinvar: ""
+ cosvar: ""
+ angvar: "ANGLET"
+ subset: ['hi_h', 'hs_h', 'aice_h', 'Tsfc_h', 'uvel_h', 'vvel_h', 'frzmlt_h', 'albsni_h', 'mlt_onset_h', 'frz_onset_h']
+ data_in:
+ copy:
+ - ["{{ COM_ICE_HISTORY }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.6hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ice.nc"]
+ data_out:
+ mkdir:
+ - "{{ COM_ICE_NETCDF }}"
+ {% for grid in product_grids %}
+ - "{{ COM_ICE_GRIB }}/{{ grid }}"
+ {% endfor %}
+ copy:
+ - ["{{ DATA }}/ice_subset.nc", "{{ COM_ICE_NETCDF }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"]
+ {% for grid in product_grids %}
+ - ["{{ DATA }}/ice.{{ grid }}.grib2", "{{ COM_ICE_GRIB }}/{{ grid }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.grib2"]
+ - ["{{ DATA }}/ice.{{ grid }}.grib2.idx", "{{ COM_ICE_GRIB }}/{{ grid }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.grib2.idx"]
+ {% endfor %}
diff --git a/scripts/exgfs_wave_post_pnt.sh b/scripts/exgfs_wave_post_pnt.sh
index a7aa957564..c085c48f30 100755
--- a/scripts/exgfs_wave_post_pnt.sh
+++ b/scripts/exgfs_wave_post_pnt.sh
@@ -156,7 +156,11 @@ source "$HOMEgfs/ush/preamble.sh"
cp -f $PARMwave/wave_${NET}.buoys buoy.loc.temp
if [ "$DOBNDPNT_WAV" = YES ]; then
#only do boundary points
- sed -n '/^\$.*/!p' buoy.loc.temp | grep IBP > buoy.loc
+ sed -n '/^\$.*/!p' buoy.loc.temp | grep IBP > buoy.loc || {
+ echo "WARNING: No boundary points found in buoy file ${PARMwave}/wave_${NET}.buoys"
+ echo " Ending job without doing anything."
+ exit 0
+ }
else
#exclude boundary points
sed -n '/^\$.*/!p' buoy.loc.temp | grep -v IBP > buoy.loc
diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh
index 2f7e3be972..833b06bd98 100755
--- a/scripts/exglobal_archive.sh
+++ b/scripts/exglobal_archive.sh
@@ -182,12 +182,12 @@ if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then
targrp_list="${targrp_list} gfswave"
fi
- if [ "${DO_OCN}" = "YES" ]; then
- targrp_list="${targrp_list} ocn_ice_grib2_0p5 ocn_ice_grib2_0p25 ocn_2D ocn_3D ocn_xsect ocn_daily gfs_flux_1p00"
+ if [[ "${DO_OCN}" == "YES" ]]; then
+ targrp_list="${targrp_list} ocean_6hravg ocean_daily ocean_grib2 gfs_flux_1p00"
fi
- if [ "${DO_ICE}" = "YES" ]; then
- targrp_list="${targrp_list} ice"
+ if [[ "${DO_ICE}" == "YES" ]]; then
+ targrp_list="${targrp_list} ice_6hravg ice_grib2"
fi
# Aerosols
diff --git a/scripts/exglobal_forecast.sh b/scripts/exglobal_forecast.sh
index eebc9e59c3..c07cde3004 100755
--- a/scripts/exglobal_forecast.sh
+++ b/scripts/exglobal_forecast.sh
@@ -105,9 +105,11 @@ common_predet
echo "MAIN: Loading variables before determination of run type"
FV3_predet
+[[ ${cplflx} = .true. ]] && CMEPS_predet
[[ ${cplflx} = .true. ]] && MOM6_predet
[[ ${cplwav} = .true. ]] && WW3_predet
[[ ${cplice} = .true. ]] && CICE_predet
+[[ ${cplchm} = .true. ]] && GOCART_predet
echo "MAIN: Variables before determination of run type loaded"
echo "MAIN: Determining run type"
@@ -119,6 +121,7 @@ echo "MAIN: RUN Type Determined"
echo "MAIN: Post-determination set up of run type"
FV3_postdet
+[[ ${cplflx} = .true. ]] && CMEPS_postdet
[[ ${cplflx} = .true. ]] && MOM6_postdet
[[ ${cplwav} = .true. ]] && WW3_postdet
[[ ${cplice} = .true. ]] && CICE_postdet
@@ -154,6 +157,7 @@ ${ERRSCRIPT} || exit "${err}"
FV3_out
[[ ${cplflx} = .true. ]] && MOM6_out
+[[ ${cplflx} = .true. ]] && CMEPS_out
[[ ${cplwav} = .true. ]] && WW3_out
[[ ${cplice} = .true. ]] && CICE_out
[[ ${cplchm} = .true. ]] && GOCART_out
diff --git a/scripts/exglobal_oceanice_products.py b/scripts/exglobal_oceanice_products.py
new file mode 100755
index 0000000000..0f8e2e0d6d
--- /dev/null
+++ b/scripts/exglobal_oceanice_products.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python3
+
+import os
+
+from wxflow import AttrDict, Logger, logit, cast_strdict_as_dtypedict
+from pygfs.task.oceanice_products import OceanIceProducts
+
+# initialize root logger
+logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True)
+
+
+@logit(logger)
+def main():
+
+ config = cast_strdict_as_dtypedict(os.environ)
+
+ # Instantiate the OceanIce object
+ oceanice = OceanIceProducts(config)
+
+ # Pull out all the configuration keys needed to run the rest of steps
+ keys = ['HOMEgfs', 'DATA', 'current_cycle', 'RUN', 'NET',
+ 'COM_OCEAN_HISTORY', 'COM_OCEAN_GRIB',
+ 'COM_ICE_HISTORY', 'COM_ICE_GRIB',
+ 'APRUN_OCNICEPOST',
+ 'component', 'forecast_hour', 'valid_datetime', 'avg_period',
+ 'model_grid', 'product_grids', 'oceanice_yaml']
+ oceanice_dict = AttrDict()
+ for key in keys:
+ oceanice_dict[key] = oceanice.task_config[key]
+
+ # Initialize the DATA/ directory; copy static data
+ oceanice.initialize(oceanice_dict)
+
+ for grid in oceanice_dict.product_grids:
+
+ logger.info(f"Processing {grid} grid")
+
+ # Configure DATA/ directory for execution; prepare namelist etc.
+ oceanice.configure(oceanice_dict, grid)
+
+ # Run the oceanice post executable to interpolate and create grib2 files
+ oceanice.execute(oceanice_dict, grid)
+
+ # Subset raw model data to create netCDF products
+ oceanice.subset(oceanice_dict)
+
+ # Copy processed output from execute and subset
+ oceanice.finalize(oceanice_dict)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/scripts/run_reg2grb2.sh b/scripts/run_reg2grb2.sh
deleted file mode 100755
index ab2c80043e..0000000000
--- a/scripts/run_reg2grb2.sh
+++ /dev/null
@@ -1,72 +0,0 @@
-#! /usr/bin/env bash
-
-source "${HOMEgfs}/ush/preamble.sh"
-
-#requires grib_util module
-
-MOM6REGRID=${MOM6REGRID:-${HOMEgfs}}
-export mask_file="${MOM6REGRID}/fix/reg2grb2/mask.0p25x0p25.grb2"
-
-# offline testing:
-#export DATA=
-#export icefile=$DATA/DATA0p5/icer2012010106.01.2012010100_0p5x0p5.nc
-#export ocnfile=$DATA/DATA0p5/ocnr2012010106.01.2012010100_0p5x0p5.nc
-#export outfile=$DATA/DATA0p5/out/ocnh2012010106.01.2012010100.grb2
-#
-# workflow testing:
-export icefile="icer${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25_CICE.nc"
-export ocnfile="ocnr${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25_MOM6.nc"
-export outfile="ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25.grb2"
-export outfile0p5="ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p5x0p5.grb2"
-
-export mfcstcpl=${mfcstcpl:-1}
-export IGEN_OCNP=${IGEN_OCNP:-197}
-
-# PT This is the forecast date
-export year=${VDATE:0:4}
-export month=${VDATE:4:2}
-export day=${VDATE:6:2}
-export hour=${VDATE:8:2}
-
-# PT This is the initialization date
-export syear=${IDATE:0:4}
-export smonth=${IDATE:4:2}
-export sday=${IDATE:6:2}
-export shour=${IDATE:8:2}
-
-# PT Need to get this from above - could be 6 or 1 hour
-export hh_inc_ocn=6
-#
-# set for 1p0 lat-lon
-#export im=360
-#export jm=181
-# export km=40
-#export imo=360
-#export jmo=181
-#
-# set for 0p5 lat-lon
-#export im=720
-#export jm=361
-#export km=40
-#export imo=720
-#export jmo=361
-#
-# set for 0p25 lat-lon
-export im=1440
-export jm=721
-export imo=1440
-export jmo=721
-export km=40
-
-export flats=-90.
-export flatn=90.
-export flonw=0.0
-export flone=359.75
-
-ln -sf "${mask_file}" ./iceocnpost.g2
-${executable} > "reg2grb2.${VDATE}.${IDATE}.out"
-
-# interpolated from 0p25 to 0p5 grid
-grid2p05="0 6 0 0 0 0 0 0 720 361 0 0 90000000 0 48 -90000000 359500000 500000 500000 0"
-${COPYGB2} -g "${grid2p05}" -i0 -x "${outfile}" "${outfile0p5}"
-
diff --git a/scripts/run_regrid.sh b/scripts/run_regrid.sh
deleted file mode 100755
index 103e9a759e..0000000000
--- a/scripts/run_regrid.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#! /usr/bin/env bash
-
-source "${HOMEgfs}/ush/preamble.sh"
-
-MOM6REGRID="${MOM6REGRID:-${HOMEgfs}}"
-export EXEC_DIR="${MOM6REGRID}/exec"
-export USH_DIR="${MOM6REGRID}/ush"
-export COMOUTocean="${COM_OCEAN_HISTORY}"
-export COMOUTice="${COM_ICE_HISTORY}"
-export IDATE="${IDATE}"
-export VDATE="${VDATE}"
-export ENSMEM="${ENSMEM}"
-export FHR="${fhr}"
-export DATA="${DATA}"
-export FIXreg2grb2="${FIXreg2grb2}"
-
-###### DO NOT MODIFY BELOW UNLESS YOU KNOW WHAT YOU ARE DOING #######
-#Need NCL module to be loaded:
-echo "${NCARG_ROOT}"
-export NCL="${NCARG_ROOT}/bin/ncl"
-
-ls -alrt
-
-${NCL} "${USH_DIR}/icepost.ncl"
-${NCL} "${USH_DIR}/ocnpost.ncl"
-#####################################################################
-
diff --git a/sorc/gfs_utils.fd b/sorc/gfs_utils.fd
index 1bdc3de6f3..4b7f6095d2 160000
--- a/sorc/gfs_utils.fd
+++ b/sorc/gfs_utils.fd
@@ -1 +1 @@
-Subproject commit 1bdc3de6f37825ad5872f52d3675795cb8fe3811
+Subproject commit 4b7f6095d260b7fcd9c99c337454e170f1aa7f2f
diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh
index 6d5d40a354..21bc30faa4 100755
--- a/sorc/link_workflow.sh
+++ b/sorc/link_workflow.sh
@@ -107,7 +107,6 @@ for dir in aer \
lut \
mom6 \
orog \
- reg2grb2 \
sfc_climo \
ugwd \
verif \
@@ -135,16 +134,20 @@ for file in postxconfig-NT-GEFS-F00.txt postxconfig-NT-GEFS.txt postxconfig-NT-G
postxconfig-NT-GFS-ANL.txt postxconfig-NT-GFS-F00.txt postxconfig-NT-GFS-FLUX-F00.txt \
postxconfig-NT-GFS.txt postxconfig-NT-GFS-FLUX.txt postxconfig-NT-GFS-GOES.txt \
postxconfig-NT-GFS-F00-TWO.txt postxconfig-NT-GFS-TWO.txt \
- params_grib2_tbl_new post_tag_gfs128 post_tag_gfs65 nam_micro_lookup.dat
+ params_grib2_tbl_new post_tag_gfs128 post_tag_gfs65 nam_micro_lookup.dat
do
${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/parm/${file}" .
done
for file in optics_luts_DUST.dat optics_luts_DUST_nasa.dat optics_luts_NITR_nasa.dat \
optics_luts_SALT.dat optics_luts_SALT_nasa.dat optics_luts_SOOT.dat optics_luts_SOOT_nasa.dat \
- optics_luts_SUSO.dat optics_luts_SUSO_nasa.dat optics_luts_WASO.dat optics_luts_WASO_nasa.dat
+ optics_luts_SUSO.dat optics_luts_SUSO_nasa.dat optics_luts_WASO.dat optics_luts_WASO_nasa.dat
do
${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/fix/chem/${file}" .
done
+for file in ice.csv ocean.csv ocnicepost.nml.jinja2
+do
+ ${LINK_OR_COPY} "${HOMEgfs}/sorc/gfs_utils.fd/parm/ocnicepost/${file}" .
+done
cd "${HOMEgfs}/scripts" || exit 8
${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_utils.fd/scripts/exemcsfc_global_sfc_prep.sh" .
@@ -152,7 +155,7 @@ cd "${HOMEgfs}/ush" || exit 8
for file in emcsfc_ice_blend.sh global_cycle_driver.sh emcsfc_snow.sh global_cycle.sh; do
${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_utils.fd/ush/${file}" .
done
-for file in finddate.sh make_ntc_bull.pl make_NTC_file.pl make_tif.sh month_name.sh ; do
+for file in make_ntc_bull.pl make_NTC_file.pl make_tif.sh month_name.sh ; do
${LINK_OR_COPY} "${HOMEgfs}/sorc/gfs_utils.fd/ush/${file}" .
done
@@ -243,7 +246,7 @@ cd "${HOMEgfs}/exec" || exit 1
for utilexe in fbwndgfs.x gaussian_sfcanl.x gfs_bufr.x supvit.x syndat_getjtbul.x \
syndat_maksynrc.x syndat_qctropcy.x tocsbufr.x overgridid.x \
- mkgfsawps.x enkf_chgres_recenter_nc.x tave.x vint.x reg2grb2.x
+ mkgfsawps.x enkf_chgres_recenter_nc.x tave.x vint.x ocnicepost.x
do
[[ -s "${utilexe}" ]] && rm -f "${utilexe}"
${LINK_OR_COPY} "${HOMEgfs}/sorc/gfs_utils.fd/install/bin/${utilexe}" .
@@ -397,7 +400,6 @@ for prog in enkf_chgres_recenter_nc.fd \
mkgfsawps.fd \
overgridid.fd \
rdbfmsua.fd \
- reg2grb2.fd \
supvit.fd \
syndat_getjtbul.fd \
syndat_maksynrc.fd \
@@ -405,7 +407,8 @@ for prog in enkf_chgres_recenter_nc.fd \
tave.fd \
tocsbufr.fd \
vint.fd \
- webtitle.fd
+ webtitle.fd \
+ ocnicepost.fd
do
if [[ -d "${prog}" ]]; then rm -rf "${prog}"; fi
${LINK_OR_COPY} "gfs_utils.fd/src/${prog}" .
diff --git a/sorc/ncl.setup b/sorc/ncl.setup
deleted file mode 100644
index b4981689db..0000000000
--- a/sorc/ncl.setup
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-
-set +x
-case ${target} in
- 'jet'|'hera')
- module load ncl/6.5.0
- export NCARG_LIB=${NCARG_ROOT}/lib
- ;;
- *)
- echo "[${BASH_SOURCE[0]}]: unknown ${target}"
- ;;
-esac
diff --git a/ush/forecast_det.sh b/ush/forecast_det.sh
index e1a2a49a7e..198df6505f 100755
--- a/ush/forecast_det.sh
+++ b/ush/forecast_det.sh
@@ -8,7 +8,7 @@
## This script is a definition of functions.
#####
-# For all non-evironment variables
+# For all non-environment variables
# Cycling and forecast hour specific parameters
FV3_det(){
diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh
index 8e40d6c881..052e549251 100755
--- a/ush/forecast_postdet.sh
+++ b/ush/forecast_postdet.sh
@@ -266,10 +266,10 @@ EOF
# inline post fix files
if [[ ${WRITE_DOPOST} = ".true." ]]; then
- ${NLN} "${PARM_POST}/post_tag_gfs${LEVS}" "${DATA}/itag"
- ${NLN} "${FLTFILEGFS:-${PARM_POST}/postxconfig-NT-GFS-TWO.txt}" "${DATA}/postxconfig-NT.txt"
- ${NLN} "${FLTFILEGFSF00:-${PARM_POST}/postxconfig-NT-GFS-F00-TWO.txt}" "${DATA}/postxconfig-NT_FH00.txt"
- ${NLN} "${POSTGRB2TBL:-${PARM_POST}/params_grib2_tbl_new}" "${DATA}/params_grib2_tbl_new"
+ ${NLN} "${PARMgfs}/post/post_tag_gfs${LEVS}" "${DATA}/itag"
+ ${NLN} "${FLTFILEGFS:-${PARMgfs}/post/postxconfig-NT-GFS-TWO.txt}" "${DATA}/postxconfig-NT.txt"
+ ${NLN} "${FLTFILEGFSF00:-${PARMgfs}/post/postxconfig-NT-GFS-F00-TWO.txt}" "${DATA}/postxconfig-NT_FH00.txt"
+ ${NLN} "${POSTGRB2TBL:-${PARMgfs}/post/params_grib2_tbl_new}" "${DATA}/params_grib2_tbl_new"
fi
#------------------------------------------------------------------
@@ -463,8 +463,6 @@ EOF
LONB_STP=${LONB_STP:-${LONB_CASE}}
LATB_STP=${LATB_STP:-${LATB_CASE}}
cd "${DATA}" || exit 1
- if [[ ! -d ${COM_ATMOS_HISTORY} ]]; then mkdir -p "${COM_ATMOS_HISTORY}"; fi
- if [[ ! -d ${COM_ATMOS_MASTER} ]]; then mkdir -p "${COM_ATMOS_MASTER}"; fi
if [[ "${QUILTING}" = ".true." ]] && [[ "${OUTPUT_GRID}" = "gaussian_grid" ]]; then
for fhr in ${FV3_OUTPUT_FH}; do
local FH3=$(printf %03i "${fhr}")
@@ -503,7 +501,6 @@ FV3_out() {
# Copy FV3 restart files
if [[ ${RUN} =~ "gdas" ]]; then
cd "${DATA}/RESTART"
- mkdir -p "${COM_ATMOS_RESTART}"
local idate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H)
while [[ ${idate} -le ${forecast_end_cycle} ]]; do
for file in "${idate:0:8}.${idate:8:2}0000."*; do
@@ -604,8 +601,6 @@ WW3_postdet() {
${NLN} "${wavcurfile}" "${DATA}/current.${WAVECUR_FID}"
fi
- if [[ ! -d ${COM_WAVE_HISTORY} ]]; then mkdir -p "${COM_WAVE_HISTORY}"; fi
-
# Link output files
cd "${DATA}"
if [[ ${waveMULTIGRID} = ".true." ]]; then
@@ -682,6 +677,7 @@ MOM6_postdet() {
${NLN} "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.MOM.res.nc" "${DATA}/INPUT/MOM.res.nc"
case ${OCNRES} in
"025")
+ local nn
for nn in $(seq 1 4); do
if [[ -f "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.MOM.res_${nn}.nc" ]]; then
${NLN} "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.MOM.res_${nn}.nc" "${DATA}/INPUT/MOM.res_${nn}.nc"
@@ -700,7 +696,7 @@ MOM6_postdet() {
fi
# Copy MOM6 fixed files
- ${NCP} "${FIXmom}/${OCNRES}/"* "${DATA}/INPUT/"
+ ${NCP} "${FIXmom}/${OCNRES}/"* "${DATA}/INPUT/" # TODO: These need to be explicit
# Copy coupled grid_spec
spec_file="${FIXcpl}/a${CASE}o${OCNRES}/grid_spec.nc"
@@ -711,27 +707,6 @@ MOM6_postdet() {
exit 3
fi
- # Copy mediator restart files to RUNDIR # TODO: mediator should have its own CMEPS_postdet() function
- if [[ ${warm_start} = ".true." ]]; then
- local mediator_file="${COM_MED_RESTART}/${PDY}.${cyc}0000.ufs.cpld.cpl.r.nc"
- if [[ -f "${mediator_file}" ]]; then
- ${NCP} "${mediator_file}" "${DATA}/ufs.cpld.cpl.r.nc"
- rm -f "${DATA}/rpointer.cpl"
- touch "${DATA}/rpointer.cpl"
- echo "ufs.cpld.cpl.r.nc" >> "${DATA}/rpointer.cpl"
- else
- # We have a choice to make here.
- # Either we can FATAL ERROR out, or we can let the coupling fields initialize from zero
- # cmeps_run_type is determined based on the availability of the mediator restart file
- echo "WARNING: ${mediator_file} does not exist for warm_start = .true., initializing!"
- #echo "FATAL ERROR: ${mediator_file} must exist for warm_start = .true. and does not, ABORT!"
- #exit 4
- fi
- else
- # This is a cold start, so initialize the coupling fields from zero
- export cmeps_run_type="startup"
- fi
-
# If using stochatic parameterizations, create a seed that does not exceed the
# largest signed integer
if [[ "${DO_OCN_SPPT}" = "YES" ]] || [[ "${DO_OCN_PERT_EPBL}" = "YES" ]]; then
@@ -743,58 +718,53 @@ MOM6_postdet() {
fi
fi
- # Create COMOUTocean
- [[ ! -d ${COM_OCEAN_HISTORY} ]] && mkdir -p "${COM_OCEAN_HISTORY}"
-
# Link output files
if [[ "${RUN}" =~ "gfs" || "${RUN}" =~ "gefs" ]]; then
- # Link output files for RUN = gfs
-
- # TODO: get requirements on what files need to be written out and what these dates here are and what they mean
+ # Link output files for RUN = gfs|gefs
- if [[ ! -d ${COM_OCEAN_HISTORY} ]]; then mkdir -p "${COM_OCEAN_HISTORY}"; fi
+ # Looping over MOM6 output hours
+ local fhr fhr3 last_fhr interval midpoint vdate vdate_mid source_file dest_file
+ for fhr in ${MOM6_OUTPUT_FH}; do
+ fhr3=$(printf %03i "${fhr}")
- # Looping over FV3 output hours
- # TODO: Need to define MOM6_OUTPUT_FH and control at some point for issue #1629
- for fhr in ${FV3_OUTPUT_FH}; do
if [[ -z ${last_fhr:-} ]]; then
- local last_fhr=${fhr}
+ last_fhr=${fhr}
continue
fi
+
(( interval = fhr - last_fhr ))
(( midpoint = last_fhr + interval/2 ))
- local vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
- local vdate_mid=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${midpoint} hours" +%Y%m%d%H)
-
+ vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
+ vdate_mid=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${midpoint} hours" +%Y%m%d%H)
# Native model output uses window midpoint in the filename, but we are mapping that to the end of the period for COM
- local source_file="ocn_${vdate_mid:0:4}_${vdate_mid:4:2}_${vdate_mid:6:2}_${vdate_mid:8:2}.nc"
- local dest_file="ocn${vdate}.${ENSMEM}.${current_cycle}.nc"
+ source_file="ocn_${vdate_mid:0:4}_${vdate_mid:4:2}_${vdate_mid:6:2}_${vdate_mid:8:2}.nc"
+ dest_file="${RUN}.ocean.t${cyc}z.${interval}hr_avg.f${fhr3}.nc"
${NLN} "${COM_OCEAN_HISTORY}/${dest_file}" "${DATA}/${source_file}"
- local source_file="ocn_daily_${vdate:0:4}_${vdate:4:2}_${vdate:6:2}.nc"
- local dest_file=${source_file}
- if [[ ! -a "${DATA}/${source_file}" ]]; then
+ # Daily output
+ if (( fhr > 0 & fhr % 24 == 0 )); then
+ source_file="ocn_daily_${vdate:0:4}_${vdate:4:2}_${vdate:6:2}.nc"
+ dest_file="${RUN}.ocean.t${cyc}z.daily.f${fhr3}.nc"
${NLN} "${COM_OCEAN_HISTORY}/${dest_file}" "${DATA}/${source_file}"
fi
- local last_fhr=${fhr}
+ last_fhr=${fhr}
+
done
elif [[ "${RUN}" =~ "gdas" ]]; then
# Link output files for RUN = gdas
- # Save MOM6 backgrounds
- for fhr in ${FV3_OUTPUT_FH}; do
- local idatestr=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y_%m_%d_%H)
+ # Save (instantaneous) MOM6 backgrounds
+ for fhr in ${MOM6_OUTPUT_FH}; do
local fhr3=$(printf %03i "${fhr}")
- ${NLN} "${COM_OCEAN_HISTORY}/${RUN}.t${cyc}z.ocnf${fhr3}.nc" "${DATA}/ocn_da_${idatestr}.nc"
+ local vdatestr=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y_%m_%d_%H)
+ ${NLN} "${COM_OCEAN_HISTORY}/${RUN}.ocean.t${cyc}z.inst.f${fhr3}.nc" "${DATA}/ocn_da_${vdatestr}.nc"
done
fi
- mkdir -p "${COM_OCEAN_RESTART}"
-
# Link ocean restarts from DATA to COM
# Coarser than 1/2 degree has a single MOM restart
${NLN} "${COM_OCEAN_RESTART}/${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.MOM.res.nc" "${DATA}/MOM6_RESTART/"
@@ -809,10 +779,16 @@ MOM6_postdet() {
;;
esac
- # Loop over restart_interval frequency and link restarts from DATA to COM
- local idate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H)
- while [[ ${idate} -lt ${forecast_end_cycle} ]]; do
- local idatestr=$(date +%Y-%m-%d-%H -d "${idate:0:8} ${idate:8:2}")
+ if [[ "${RUN}" =~ "gdas" ]]; then
+ local interval idate
+ if [[ "${DOIAU}" = "YES" ]]; then
+ # Link restarts at the beginning of the next cycle from DATA to COM
+ interval=$(( assim_freq / 2 ))
+ idate=$(date --utc -d "${next_cycle:0:8} ${next_cycle:8:2} - ${interval} hours" +%Y%m%d%H)
+ else
+ # Link restarts at the middle of the next cycle from DATA to COM
+ idate="${next_cycle}"
+ fi
${NLN} "${COM_OCEAN_RESTART}/${idate:0:8}.${idate:8:2}0000.MOM.res.nc" "${DATA}/MOM6_RESTART/"
case ${OCNRES} in
"025")
@@ -821,23 +797,7 @@ MOM6_postdet() {
done
;;
esac
- local idate=$(date --utc -d "${idate:0:8} ${idate:8:2} + ${restart_interval} hours" +%Y%m%d%H)
- done
-
- # TODO: mediator should have its own CMEPS_postdet() function
- # Link mediator restarts from DATA to COM
- # DANGER DANGER DANGER - Linking mediator restarts to COM causes the model to fail with a message like this below:
- # Abort with message NetCDF: File exists && NC_NOCLOBBER in file pio-2.5.7/src/clib/pioc_support.c at line 2173
- # Instead of linking, copy the mediator files after the model finishes
- #local COMOUTmed="${ROTDIR}/${RUN}.${PDY}/${cyc}/med"
- #mkdir -p "${COMOUTmed}/RESTART"
- #local idate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H)
- #while [[ ${idate} -le ${forecast_end_cycle} ]]; do
- # local seconds=$(to_seconds ${idate:8:2}0000) # use function to_seconds from forecast_predet.sh to convert HHMMSS to seconds
- # local idatestr="${idate:0:4}-${idate:4:2}-${idate:6:2}-${seconds}"
- # ${NLN} "${COMOUTmed}/RESTART/${idate:0:8}.${idate:8:2}0000.ufs.cpld.cpl.r.nc" "${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc"
- # local idate=$(date --utc -d "${idate:0:8} ${idate:8:2} + ${restart_interval} hours" +%Y%m%d%H)
- #done
+ fi
echo "SUB ${FUNCNAME[0]}: MOM6 input data linked/copied"
@@ -853,26 +813,8 @@ MOM6_out() {
echo "SUB ${FUNCNAME[0]}: Copying output data for MOM6"
# Copy MOM_input from DATA to COM_OCEAN_INPUT after the forecast is run (and successfull)
- if [[ ! -d ${COM_OCEAN_INPUT} ]]; then mkdir -p "${COM_OCEAN_INPUT}"; fi
${NCP} "${DATA}/INPUT/MOM_input" "${COM_CONF}/ufs.MOM_input"
- # TODO: mediator should have its own CMEPS_out() function
- # Copy mediator restarts from DATA to COM
- # Linking mediator restarts to COM causes the model to fail with a message.
- # See MOM6_postdet() function for error message
- mkdir -p "${COM_MED_RESTART}"
- local idate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H)
- while [[ ${idate} -le ${forecast_end_cycle} ]]; do
- local seconds=$(to_seconds "${idate:8:2}"0000) # use function to_seconds from forecast_predet.sh to convert HHMMSS to seconds
- local idatestr="${idate:0:4}-${idate:4:2}-${idate:6:2}-${seconds}"
- local mediator_file="${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc"
- if [[ -f ${mediator_file} ]]; then
- ${NCP} "${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc" "${COM_MED_RESTART}/${idate:0:8}.${idate:8:2}0000.ufs.cpld.cpl.r.nc"
- else
- echo "Mediator restart ${mediator_file} not found."
- fi
- local idate=$(date --utc -d "${idate:0:8} ${idate:8:2} + ${restart_interval} hours" +%Y%m%d%H)
- done
}
CICE_postdet() {
@@ -895,54 +837,40 @@ CICE_postdet() {
${NLN} "${FIXcice}/${ICERES}/${CICE_MASK}" "${DATA}/"
${NLN} "${FIXcice}/${ICERES}/${MESH_ICE}" "${DATA}/"
- # Link CICE output files
- if [[ ! -d "${COM_ICE_HISTORY}" ]]; then mkdir -p "${COM_ICE_HISTORY}"; fi
- mkdir -p "${COM_ICE_RESTART}"
+ # Link iceh_ic file to COM. This is the initial condition file from CICE (f000)
+ # TODO: Is this file needed in COM? Is this going to be used for generating any products?
+ local vdate seconds vdatestr fhr fhr3 interval last_fhr
+ seconds=$(to_seconds "${current_cycle:8:2}0000") # convert HHMMSS to seconds
+ vdatestr="${current_cycle:0:4}-${current_cycle:4:2}-${current_cycle:6:2}-${seconds}"
+ ${NLN} "${COM_ICE_HISTORY}/${RUN}.ice.t${cyc}z.ic.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${vdatestr}.nc"
- if [[ "${RUN}" =~ "gfs" || "${RUN}" =~ "gefs" ]]; then
- # Link output files for RUN = gfs
-
- # TODO: make these forecast output files consistent w/ GFS output
- # TODO: Work w/ NB to determine appropriate naming convention for these files
-
- # TODO: consult w/ NB on how to improve on this. Gather requirements and more information on what these files are and how they are used to properly catalog them
- local vdate seconds vdatestr fhr last_fhr
- for fhr in ${FV3_OUTPUT_FH}; do
- vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
- seconds=$(to_seconds "${vdate:8:2}0000") # convert HHMMSS to seconds
- vdatestr="${vdate:0:4}-${vdate:4:2}-${vdate:6:2}-${seconds}"
+ # Link CICE forecast output files from DATA/CICE_OUTPUT to COM
+ local source_file dest_file
+ for fhr in ${CICE_OUTPUT_FH}; do
+ fhr3=$(printf %03i "${fhr}")
- if [[ 10#${fhr} -eq 0 ]]; then
- ${NLN} "${COM_ICE_HISTORY}/iceic${vdate}.${ENSMEM}.${current_cycle}.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${vdatestr}.nc"
- else
- (( interval = fhr - last_fhr )) # Umm.. isn't this CICE_HISTFREQ_N in hours (currently set to FHOUT)?
- ${NLN} "${COM_ICE_HISTORY}/ice${vdate}.${ENSMEM}.${current_cycle}.nc" "${DATA}/CICE_OUTPUT/iceh_$(printf "%0.2d" "${interval}")h.${vdatestr}.nc"
- fi
+ if [[ -z ${last_fhr:-} ]]; then
last_fhr=${fhr}
- done
+ continue
+ fi
- elif [[ "${RUN}" =~ "gdas" ]]; then
+ (( interval = fhr - last_fhr ))
- # Link CICE generated initial condition file from DATA/CICE_OUTPUT to COMOUTice
- # This can be thought of as the f000 output from the CICE model
- local seconds vdatestr
- seconds=$(to_seconds "${current_cycle:8:2}0000") # convert HHMMSS to seconds
- vdatestr="${current_cycle:0:4}-${current_cycle:4:2}-${current_cycle:6:2}-${seconds}"
- ${NLN} "${COM_ICE_HISTORY}/${RUN}.t${cyc}z.iceic.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${vdatestr}.nc"
-
- # Link instantaneous CICE forecast output files from DATA/CICE_OUTPUT to COMOUTice
- local vdate vdatestr seconds fhr fhr3
- fhr="${FHOUT}"
- while [[ "${fhr}" -le "${FHMAX}" ]]; do
- vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
- seconds=$(to_seconds "${vdate:8:2}0000") # convert HHMMSS to seconds
- vdatestr="${vdate:0:4}-${vdate:4:2}-${vdate:6:2}-${seconds}"
- fhr3=$(printf %03i "${fhr}")
- ${NLN} "${COM_ICE_HISTORY}/${RUN}.t${cyc}z.icef${fhr3}.nc" "${DATA}/CICE_OUTPUT/iceh_inst.${vdatestr}.nc"
- fhr=$((fhr + FHOUT))
- done
+ vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
+ seconds=$(to_seconds "${vdate:8:2}0000") # convert HHMMSS to seconds
+ vdatestr="${vdate:0:4}-${vdate:4:2}-${vdate:6:2}-${seconds}"
- fi
+ if [[ "${RUN}" =~ "gfs" || "${RUN}" =~ "gefs" ]]; then
+ source_file="iceh_$(printf "%0.2d" "${interval}")h.${vdatestr}.nc"
+ dest_file="${RUN}.ice.t${cyc}z.${interval}hr_avg.f${fhr3}.nc"
+ elif [[ "${RUN}" =~ "gdas" ]]; then
+ source_file="iceh_inst.${vdatestr}.nc"
+ dest_file="${RUN}.ice.t${cyc}z.inst.f${fhr3}.nc"
+ fi
+ ${NLN} "${COM_ICE_HISTORY}/${dest_file}" "${DATA}/CICE_OUTPUT/${source_file}"
+
+ last_fhr=${fhr}
+ done
# Link CICE restarts from CICE_RESTART to COMOUTice/RESTART
# Loop over restart_interval and link restarts from DATA to COM
@@ -966,7 +894,6 @@ CICE_out() {
echo "SUB ${FUNCNAME[0]}: Copying output data for CICE"
# Copy ice_in namelist from DATA to COMOUTice after the forecast is run (and successfull)
- if [[ ! -d "${COM_ICE_INPUT}" ]]; then mkdir -p "${COM_ICE_INPUT}"; fi
${NCP} "${DATA}/ice_in" "${COM_CONF}/ufs.ice_in"
}
@@ -1004,8 +931,6 @@ GOCART_rc() {
GOCART_postdet() {
echo "SUB ${FUNCNAME[0]}: Linking output data for GOCART"
- if [[ ! -d "${COM_CHEM_HISTORY}" ]]; then mkdir -p "${COM_CHEM_HISTORY}"; fi
-
for fhr in ${FV3_OUTPUT_FH}; do
local vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
@@ -1033,6 +958,56 @@ GOCART_out() {
${NCP} "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \
"${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4"
done
+}
+
+CMEPS_postdet() {
+ echo "SUB ${FUNCNAME[0]}: Linking output data for CMEPS mediator"
+
+ # Copy mediator restart files to RUNDIR
+ if [[ "${warm_start}" = ".true." ]]; then
+ local mediator_file="${COM_MED_RESTART}/${PDY}.${cyc}0000.ufs.cpld.cpl.r.nc"
+ if [[ -f "${mediator_file}" ]]; then
+ ${NCP} "${mediator_file}" "${DATA}/ufs.cpld.cpl.r.nc"
+ rm -f "${DATA}/rpointer.cpl"
+ touch "${DATA}/rpointer.cpl"
+ echo "ufs.cpld.cpl.r.nc" >> "${DATA}/rpointer.cpl"
+ else
+ # We have a choice to make here.
+ # Either we can FATAL ERROR out, or we can let the coupling fields initialize from zero
+ # cmeps_run_type is determined based on the availability of the mediator restart file
+ echo "WARNING: ${mediator_file} does not exist for warm_start = .true., initializing!"
+ #echo "FATAL ERROR: ${mediator_file} must exist for warm_start = .true. and does not, ABORT!"
+ #exit 4
+ fi
+ fi
+ # Link mediator restarts from DATA to COM
+ # DANGER DANGER DANGER - Linking mediator restarts to COM causes the model to fail with a message like this below:
+ # Abort with message NetCDF: File exists && NC_NOCLOBBER in file pio-2.5.7/src/clib/pioc_support.c at line 2173
+ # Instead of linking, copy the mediator files after the model finishes. See CMEPS_out() below.
+ #local rdate rdatestr seconds mediator_file
+ #rdate=${forecast_end_cycle}
+ #seconds=$(to_seconds "${rdate:8:2}"0000) # use function to_seconds from forecast_predet.sh to convert HHMMSS to seconds
+ #rdatestr="${rdate:0:4}-${rdate:4:2}-${rdate:6:2}-${seconds}"
+ #${NLN} "${COM_MED_RESTART}/${rdate:0:8}.${rdate:8:2}0000.ufs.cpld.cpl.r.nc" "${DATA}/CMEPS_RESTART/ufs.cpld.cpl.r.${rdatestr}.nc"
+
+}
+
+CMEPS_out() {
+ echo "SUB ${FUNCNAME[0]}: Copying output data for CMEPS mediator"
+
+ # Linking mediator restarts to COM causes the model to fail with a message.
+ # Abort with message NetCDF: File exists && NC_NOCLOBBER in file pio-2.5.7/src/clib/pioc_support.c at line 2173
+ # Copy mediator restarts from DATA to COM
+ local rdate rdatestr seconds mediator_file
+ rdate=${forecast_end_cycle}
+ seconds=$(to_seconds "${rdate:8:2}"0000) # use function to_seconds from forecast_predet.sh to convert HHMMSS to seconds
+ rdatestr="${rdate:0:4}-${rdate:4:2}-${rdate:6:2}-${seconds}"
+ mediator_file="${DATA}/CMEPS_RESTART/ufs.cpld.cpl.r.${rdatestr}.nc"
+ if [[ -f ${mediator_file} ]]; then
+ ${NCP} "${mediator_file}" "${COM_MED_RESTART}/${rdate:0:8}.${rdate:8:2}0000.ufs.cpld.cpl.r.nc"
+ else
+ echo "Mediator restart ${mediator_file} not found."
+ fi
}
diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh
index 8f46ed6ea0..1aaa1a4b9d 100755
--- a/ush/forecast_predet.sh
+++ b/ush/forecast_predet.sh
@@ -8,9 +8,6 @@
## This script is a definition of functions.
#####
-# For all non-evironment variables
-# Cycling and forecast hour specific parameters
-
to_seconds() {
# Function to convert HHMMSS to seconds since 00Z
local hhmmss=${1:?}
@@ -50,21 +47,12 @@ common_predet(){
# shellcheck disable=SC2034
pwd=$(pwd)
CDUMP=${CDUMP:-gdas}
- CASE=${CASE:-C768}
- CDATE=${CDATE:-2017032500}
+ CASE=${CASE:-C96}
+ CDATE=${CDATE:-"${PDY}${cyc}"}
ENSMEM=${ENSMEM:-000}
- FCSTEXECDIR=${FCSTEXECDIR:-${HOMEgfs}/exec}
- FCSTEXEC=${FCSTEXEC:-ufs_model.x}
-
- # Directories.
- FIXgfs=${FIXgfs:-${HOMEgfs}/fix}
-
- # Model specific stuff
- PARM_POST=${PARM_POST:-${HOMEgfs}/parm/post}
-
# Define significant cycles
- current_cycle=${CDATE}
+ current_cycle="${PDY}${cyc}"
previous_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} - ${assim_freq} hours" +%Y%m%d%H)
# ignore errors that variable isn't used
# shellcheck disable=SC2034
@@ -88,23 +76,28 @@ common_predet(){
tcyc=${scyc}
fi
- mkdir -p "${COM_CONF}"
+ FHMIN=${FHMIN:-0}
+ FHMAX=${FHMAX:-9}
+ FHOUT=${FHOUT:-3}
+ FHMAX_HF=${FHMAX_HF:-0}
+ FHOUT_HF=${FHOUT_HF:-1}
+
+ # Several model components share DATA/INPUT for input data
+ if [[ ! -d "${DATA}/INPUT" ]]; then mkdir -p "${DATA}/INPUT"; fi
+
+ if [[ ! -d "${COM_CONF}" ]]; then mkdir -p "${COM_CONF}"; fi
cd "${DATA}" || ( echo "FATAL ERROR: Unable to 'cd ${DATA}', ABORT!"; exit 8 )
}
FV3_predet(){
echo "SUB ${FUNCNAME[0]}: Defining variables for FV3"
- FHMIN=${FHMIN:-0}
- FHMAX=${FHMAX:-9}
- FHOUT=${FHOUT:-3}
+
+ if [[ ! -d "${COM_ATMOS_HISTORY}" ]]; then mkdir -p "${COM_ATMOS_HISTORY}"; fi
+ if [[ ! -d "${COM_ATMOS_MASTER}" ]]; then mkdir -p "${COM_ATMOS_MASTER}"; fi
+ if [[ ! -d "${COM_ATMOS_RESTART}" ]]; then mkdir -p "${COM_ATMOS_RESTART}"; fi
+
FHZER=${FHZER:-6}
FHCYC=${FHCYC:-24}
- FHMAX_HF=${FHMAX_HF:-0}
- FHOUT_HF=${FHOUT_HF:-1}
- NSOUT=${NSOUT:-"-1"}
- FDIAG=${FHOUT}
- if (( FHMAX_HF > 0 && FHOUT_HF > 0 )); then FDIAG=${FHOUT_HF}; fi
- WRITE_DOPOST=${WRITE_DOPOST:-".false."}
restart_interval=${restart_interval:-${FHMAX}}
# restart_interval = 0 implies write restart at the END of the forecast i.e. at FHMAX
if [[ ${restart_interval} -eq 0 ]]; then
@@ -112,8 +105,8 @@ FV3_predet(){
fi
# Convert output settings into an explicit list for FV3
- # NOTE: FV3_OUTPUT_FH is also currently used in other components
- # TODO: Have a seperate control for other components to address issue #1629
+ # Ignore "not used" warning
+ # shellcheck disable=SC2034
FV3_OUTPUT_FH=""
local fhr=${FHMIN}
if (( FHOUT_HF > 0 && FHMAX_HF > 0 )); then
@@ -122,12 +115,6 @@ FV3_predet(){
fi
FV3_OUTPUT_FH="${FV3_OUTPUT_FH} $(seq -s ' ' "${fhr}" "${FHOUT}" "${FHMAX}")"
- # Model resolution specific parameters
- DELTIM=${DELTIM:-225}
- layout_x=${layout_x:-8}
- layout_y=${layout_y:-16}
- LEVS=${LEVS:-65}
-
# Other options
# ignore errors that variable isn't used
# shellcheck disable=SC2034
@@ -141,18 +128,8 @@ FV3_predet(){
# Model config options
ntiles=6
- TYPE=${TYPE:-"nh"} # choices: nh, hydro
- MONO=${MONO:-"non-mono"} # choices: mono, non-mono
-
- QUILTING=${QUILTING:-".true."}
- OUTPUT_GRID=${OUTPUT_GRID:-"gaussian_grid"}
- WRITE_NEMSIOFLIP=${WRITE_NEMSIOFLIP:-".true."}
- WRITE_FSYNCFLAG=${WRITE_FSYNCFLAG:-".true."}
-
rCDUMP=${rCDUMP:-${CDUMP}}
- mkdir -p "${DATA}/INPUT"
-
#------------------------------------------------------------------
# changeable parameters
# dycore definitions
@@ -210,8 +187,7 @@ FV3_predet(){
print_freq=${print_freq:-6}
#-------------------------------------------------------
- if [[ ${RUN} =~ "gfs" || ${RUN} = "gefs" ]]; then
- if [[ ! -d ${COM_ATMOS_RESTART} ]]; then mkdir -p "${COM_ATMOS_RESTART}" ; fi
+ if [[ "${RUN}" =~ "gfs" || "${RUN}" = "gefs" ]]; then
${NLN} "${COM_ATMOS_RESTART}" RESTART
# The final restart written at the end doesn't include the valid date
# Create links that keep the same name pattern for these files
@@ -225,26 +201,68 @@ FV3_predet(){
${NLN} "${file}" "${COM_ATMOS_RESTART}/${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${file}"
done
else
- mkdir -p "${DATA}/RESTART"
+ if [[ ! -d "${DATA}/RESTART" ]]; then mkdir -p "${DATA}/RESTART"; fi
fi
- echo "SUB ${FUNCNAME[0]}: pre-determination variables set"
}
WW3_predet(){
echo "SUB ${FUNCNAME[0]}: WW3 before run type determination"
+
+ if [[ ! -d "${COM_WAVE_HISTORY}" ]]; then mkdir -p "${COM_WAVE_HISTORY}"; fi
if [[ ! -d "${COM_WAVE_RESTART}" ]]; then mkdir -p "${COM_WAVE_RESTART}" ; fi
+
${NLN} "${COM_WAVE_RESTART}" "restart_wave"
}
CICE_predet(){
echo "SUB ${FUNCNAME[0]}: CICE before run type determination"
+
+ if [[ ! -d "${COM_ICE_HISTORY}" ]]; then mkdir -p "${COM_ICE_HISTORY}"; fi
+ if [[ ! -d "${COM_ICE_RESTART}" ]]; then mkdir -p "${COM_ICE_RESTART}"; fi
+ if [[ ! -d "${COM_ICE_INPUT}" ]]; then mkdir -p "${COM_ICE_INPUT}"; fi
+
if [[ ! -d "${DATA}/CICE_OUTPUT" ]]; then mkdir -p "${DATA}/CICE_OUTPUT"; fi
if [[ ! -d "${DATA}/CICE_RESTART" ]]; then mkdir -p "${DATA}/CICE_RESTART"; fi
+
+ # CICE does not have a concept of high frequency output like FV3
+ # Convert output settings into an explicit list for CICE
+ # Ignore "not used" warning
+ # shellcheck disable=SC2034
+ CICE_OUTPUT_FH=$(seq -s ' ' "${FHMIN}" "${FHOUT}" "${FHMAX}")
+
}
MOM6_predet(){
echo "SUB ${FUNCNAME[0]}: MOM6 before run type determination"
+
+ if [[ ! -d "${COM_OCEAN_HISTORY}" ]]; then mkdir -p "${COM_OCEAN_HISTORY}"; fi
+ if [[ ! -d "${COM_OCEAN_RESTART}" ]]; then mkdir -p "${COM_OCEAN_RESTART}"; fi
+ if [[ ! -d "${COM_OCEAN_INPUT}" ]]; then mkdir -p "${COM_OCEAN_INPUT}"; fi
+
if [[ ! -d "${DATA}/MOM6_OUTPUT" ]]; then mkdir -p "${DATA}/MOM6_OUTPUT"; fi
if [[ ! -d "${DATA}/MOM6_RESTART" ]]; then mkdir -p "${DATA}/MOM6_RESTART"; fi
+
+ # MOM6 does not have a concept of high frequency output like FV3
+ # Convert output settings into an explicit list for MOM6
+ # Ignore "not used" warning
+ # shellcheck disable=SC2034
+ MOM6_OUTPUT_FH=$(seq -s ' ' "${FHMIN}" "${FHOUT}" "${FHMAX}")
+
+}
+
+CMEPS_predet(){
+ echo "SUB ${FUNCNAME[0]}: CMEPS before run type determination"
+
+ if [[ ! -d "${COM_MED_RESTART}" ]]; then mkdir -p "${COM_MED_RESTART}"; fi
+
+ if [[ ! -d "${DATA}/CMEPS_RESTART" ]]; then mkdir -p "${DATA}/CMEPS_RESTART"; fi
+
+}
+
+GOCART_predet(){
+ echo "SUB ${FUNCNAME[0]}: GOCART before run type determination"
+
+ if [[ ! -d "${COM_CHEM_HISTORY}" ]]; then mkdir -p "${COM_CHEM_HISTORY}"; fi
+
}
diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh
index f1beb9469d..903c2d63fb 100755
--- a/ush/hpssarch_gen.sh
+++ b/ush/hpssarch_gen.sh
@@ -251,48 +251,64 @@ if [[ ${type} = "gfs" ]]; then
} >> "${DATA}/gfswave.txt"
fi
- if [[ ${DO_OCN} = "YES" ]]; then
+ if [[ "${DO_OCN}" == "YES" ]]; then
- head="gfs.t${cyc}z."
+ head="gfs.ocean.t${cyc}z."
+ rm -f "${DATA}/ocean_6hravg.txt"; touch "${DATA}/ocean_6hravg.txt"
+ rm -f "${DATA}/ocean_daily.txt"; touch "${DATA}/ocean_daily.txt"
+ rm -f "${DATA}/ocean_grib2.txt"; touch "${DATA}/ocean_grib2.txt"
- rm -f "${DATA}/gfs_flux_1p00.txt"
- rm -f "${DATA}/ocn_ice_grib2_0p5.txt"
- rm -f "${DATA}/ocn_ice_grib2_0p25.txt"
- rm -f "${DATA}/ocn_2D.txt"
- rm -f "${DATA}/ocn_3D.txt"
- rm -f "${DATA}/ocn_xsect.txt"
- rm -f "${DATA}/ocn_daily.txt"
- touch "${DATA}/gfs_flux_1p00.txt"
- touch "${DATA}/ocn_ice_grib2_0p5.txt"
- touch "${DATA}/ocn_ice_grib2_0p25.txt"
- touch "${DATA}/ocn_2D.txt"
- touch "${DATA}/ocn_3D.txt"
- touch "${DATA}/ocn_xsect.txt"
- touch "${DATA}/ocn_daily.txt"
- echo "${COM_OCEAN_INPUT/${ROTDIR}\//}/MOM_input" >> "${DATA}/ocn_2D.txt"
- echo "${COM_OCEAN_2D/${ROTDIR}\//}/ocn_2D*" >> "${DATA}/ocn_2D.txt"
- echo "${COM_OCEAN_3D/${ROTDIR}\//}/ocn_3D*" >> "${DATA}/ocn_3D.txt"
- echo "${COM_OCEAN_XSECT/${ROTDIR}\//}/ocn*EQ*" >> "${DATA}/ocn_xsect.txt"
- echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/ocn_daily*" >> "${DATA}/ocn_daily.txt"
- echo "${COM_OCEAN_GRIB_0p50/${ROTDIR}\//}/ocn_ice*0p5x0p5.grb2" >> "${DATA}/ocn_ice_grib2_0p5.txt"
- echo "${COM_OCEAN_GRIB_0p25/${ROTDIR}\//}/ocn_ice*0p25x0p25.grb2" >> "${DATA}/ocn_ice_grib2_0p25.txt"
+ echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/${head}6hr_avg.f*.nc" >> "${DATA}/ocean_6hravg.txt"
+ echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/${head}daily.f*.nc" >> "${DATA}/ocean_daily.txt"
+
+ {
+ if [[ -d "${COM_OCEAN_GRIB}/5p00" ]]; then
+ echo "${COM_OCEAN_GRIB/${ROTDIR}\//}/5p00/${head}5p00.f*.grib2"
+ echo "${COM_OCEAN_GRIB/${ROTDIR}\//}/5p00/${head}5p00.f*.grib2.idx"
+ fi
+ if [[ -d "${COM_OCEAN_GRIB}/1p00" ]]; then
+ echo "${COM_OCEAN_GRIB/${ROTDIR}\//}/1p00/${head}1p00.f*.grib2"
+ echo "${COM_OCEAN_GRIB/${ROTDIR}\//}/1p00/${head}1p00.f*.grib2.idx"
+ fi
+ if [[ -d "${COM_OCEAN_GRIB}/0p25" ]]; then
+ echo "${COM_OCEAN_GRIB/${ROTDIR}\//}/0p25/${head}0p25.f*.grib2"
+ echo "${COM_OCEAN_GRIB/${ROTDIR}\//}/0p25/${head}0p25.f*.grib2.idx"
+ fi
+ } >> "${DATA}/ocean_grib2.txt"
# Also save fluxes from atmosphere
+ head="gfs.t${cyc}z."
+ rm -f "${DATA}/gfs_flux_1p00.txt"; touch "${DATA}/gfs_flux_1p00.txt"
{
echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}flux.1p00.f???"
echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}flux.1p00.f???.idx"
} >> "${DATA}/gfs_flux_1p00.txt"
fi
- if [[ ${DO_ICE} = "YES" ]]; then
- head="gfs.t${cyc}z."
+ if [[ "${DO_ICE}" == "YES" ]]; then
+ head="gfs.ice.t${cyc}z."
+ rm -f "${DATA}/ice_6hravg.txt"; touch "${DATA}/ice_6hravg.txt"
+ rm -f "${DATA}/ice_grib2.txt"; touch "${DATA}/ice_grib2.txt"
- rm -f "${DATA}/ice.txt"
- touch "${DATA}/ice.txt"
{
- echo "${COM_ICE_INPUT/${ROTDIR}\//}/ice_in"
- echo "${COM_ICE_HISTORY/${ROTDIR}\//}/ice*nc"
- } >> "${DATA}/ice.txt"
+ echo "${COM_ICE_HISTORY/${ROTDIR}\//}/${head}ic.nc"
+ echo "${COM_ICE_HISTORY/${ROTDIR}\//}/${head}6hr_avg.f*.nc"
+ } >> "${DATA}/ice_6hravg.txt"
+
+ {
+ if [[ -d "${COM_ICE_GRIB}/5p00" ]]; then
+ echo "${COM_ICE_GRIB/${ROTDIR}\//}/5p00/${head}5p00.f*.grib2"
+ echo "${COM_ICE_GRIB/${ROTDIR}\//}/5p00/${head}5p00.f*.grib2.idx"
+ fi
+ if [[ -d "${COM_ICE_GRIB}/1p00" ]]; then
+ echo "${COM_ICE_GRIB/${ROTDIR}\//}/1p00/${head}1p00.f*.grib2"
+ echo "${COM_ICE_GRIB/${ROTDIR}\//}/1p00/${head}1p00.f*.grib2.idx"
+ fi
+ if [[ -d "${COM_ICE_GRIB}/0p25" ]]; then
+ echo "${COM_ICE_GRIB/${ROTDIR}\//}/0p25/${head}0p25.f*.grib2"
+ echo "${COM_ICE_GRIB/${ROTDIR}\//}/0p25/${head}0p25.f*.grib2.idx"
+ fi
+ } >> "${DATA}/ice_grib2.txt"
fi
if [[ ${DO_AERO} = "YES" ]]; then
@@ -766,4 +782,3 @@ fi ##end of enkfgdas or enkfgfs
#-----------------------------------------------------
exit 0
-
diff --git a/ush/icepost.ncl b/ush/icepost.ncl
deleted file mode 100755
index ad102971c4..0000000000
--- a/ush/icepost.ncl
+++ /dev/null
@@ -1,382 +0,0 @@
-;------------------------------------------------------------------
-; Denise.Worthen@noaa.gov (Feb 2019)
-;
-; This script will remap CICE5 output on the tripole grid to
-; a set of rectilinear grids using pre-computed ESMF weights to remap
-; the listed fields to the destination grid and write the results
-; to a new netCDF file
-;
-; See ocnpost.ncl for a complete description
-;
-; Bin.Li@noaa.gov (May 2019)
-; This script is revised to be used in the coupled workflow.
-; Revised parts are marked by
-
- load "$NCARG_ROOT/lib/ncarg/nclscripts/esmf/ESMF_regridding.ncl"
-
-;----------------------------------------------------------------------
-begin
-
-;************************************************
-; specify parameters
-;************************************************
-;
-
- output_masks = False
- ; destination grid sizes and name
- dsttype = (/"rect."/)
- ;dstgrds = (/"1p0", "0p5", "0p25"/)
-;
-
- ; specify a location to use
- ; nemsrc = "/scratch4/NCEPDEV/ocean/save/Denise.Worthen/NEMS_INPUT0.1/ocnicepost/"
- ; interpolation methods
- methods = (/"bilinear" ,"conserve"/)
- ; ocean model output location
- ;dirsrc = "/scratch3/NCEPDEV/stmp2/Denise.Worthen/BM1_ice/"
-
-
- ; variables to be regridded with the native tripole stagger location
-
- varlist = (/ (/ "hi_h", "Ct", "bilinear"/) \
- ,(/ "hs_h", "Ct", "bilinear"/) \
- ,(/ "Tsfc_h", "Ct", "bilinear"/) \
- ,(/ "aice_h", "Ct", "bilinear"/) \
- ,(/ "sst_h", "Ct", "bilinear"/) \
- /)
- dims = dimsizes(varlist)
- nvars = dims(0)
- delete(dims)
- ;print(varlist)
-
- ; vectors to be regridded with the native tripole stagger location
- ; and dimensionality
- ; note: vectors are always unstaggered using bilinear weights, but can
- ; be remapped using conservative
- nvpairs = 1
- veclist = new( (/nvpairs,3,2/),"string")
- veclist = (/ (/ (/"uvel_h", "vvel_h"/), (/"Bu", "Bu"/), (/"bilinear", "bilinear"/) /) \
- /)
- ;print(veclist)
-
- begTime = get_cpu_time()
-;----------------------------------------------------------------------
-; make a list of the directories and files from the run
-;----------------------------------------------------------------------
-; idate = "20120101"
-; icefilelist = systemfunc("ls "+dirsrc+"gfs."+idate+"/00/"+"ice*.nc")
-; icef = addfiles(icefilelist,"r")
-; nfiles = dimsizes(icefilelist)
-;
-
- ; get the rotation angle
- angleT = icef[0]->ANGLET
-
- ; get a 2 dimensional fields for creating the interpolation mask
- ; the mask2d contain 1's on land and 0's at valid points.
- mask2d = where(ismissing(icef[0]->sst_h), 1.0, 0.0)
- ;printVarSummary(mask2d)
-
- ; create conformed rotation arrays to make vector rotations cleaner
- angleT2d=conform_dims(dimsizes(mask2d),angleT,(/1,2/))
-
-;----------------------------------------------------------------------
-; loop over the output resolutions
-;----------------------------------------------------------------------
-
- jj = 1
- ii = 0
-
- do jj = 0,dimsizes(dstgrds)-1
- ;outres = "_"+dstgrds(jj)+"x"+dstgrds(jj)
- outres = dstgrds(jj)+"x"+dstgrds(jj)
- outgrid = dstgrds(jj)
-
- ; regrid a field to obtain the output xy dimensions
- wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+".bilinear.nc"
- tt = ESMF_regrid_with_weights(angleT,wgtsfile,False)
- tt!0 = "lat"
- tt!1 = "lon"
- lat = tt&lat
- lon = tt&lon
- dims = dimsizes(tt)
- nlat = dims(0)
- nlon = dims(1)
- print("fields will be remapped to destination grid size "\
- +nlon+" "+nlat)
-
- delete(tt)
- delete(dims)
-
- ; regrid the masks to obtain the interpolation masks.
- ; the mask2d contain 1's on land and 0's at valid points.
- ; when remapped, any mask value > 0 identifies land values that
- ; have crept into the field. remapped model fields are then
- ; masked with this interpolation mask
-
- wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+".bilinear.nc"
- rgmask2d = ESMF_regrid_with_weights(mask2d, wgtsfile,False)
-
- if(output_masks)then
- testfile = "masks_"+dstgrds(jj)+".nc"
- system("/bin/rm -f "+testfile)
- ; create
- testcdf = addfile(testfile,"c")
- testcdf->rgmask2d = rgmask2d
- ; close
- delete(testcdf)
- end if
-
- ; create the interpolation mask
- rgmask2d = where(rgmask2d .gt. 0.0, rgmask2d@_FillValue, 1.0)
-
-;----------------------------------------------------------------------
-; loop over each file in the icefilelist
-;----------------------------------------------------------------------
-;
- ; retrieve the time stamp
- time = icef[0]->time
- delete(time@bounds)
-
-;----------------------------------------------------------------------
-; set up the output netcdf file
-;----------------------------------------------------------------------
-; system("/bin/rm -f " + outfile) ; remove if exists
-; outcdf = addfile (outfile, "c") ; open output file
-;
-;
-
- ; explicitly declare file definition mode. Improve efficiency.
- setfileoption(outcdf,"DefineMode",True)
-
- ; create global attributes of the file
- fAtt = True ; assign file attributes
- fAtt@creation_date = systemfunc ("date")
- fAtt@source_file = infile
- fileattdef( outcdf, fAtt ) ; copy file attributes
-
- ; predefine the coordinate variables and their dimensionality
- dimNames = (/"time", "lat", "lon"/)
- dimSizes = (/ -1 , nlat, nlon/)
- dimUnlim = (/ True , False, False/)
- filedimdef(outcdf,dimNames,dimSizes,dimUnlim)
-
- ; predefine the the dimensionality of the variables to be written out
- filevardef(outcdf, "time", typeof(time), getvardims(time))
- filevardef(outcdf, "lat", typeof(lat), getvardims(lat))
- filevardef(outcdf, "lon", typeof(lon), getvardims(lon))
-
- ; Copy attributes associated with each variable to the file
- filevarattdef(outcdf, "time", time)
- filevarattdef(outcdf, "lat", lat)
- filevarattdef(outcdf, "lon", lon)
-
- ; predefine variables
- do nv = 0,nvars-1
- varname = varlist(nv,0)
- odims = (/"time", "lat", "lon"/)
- ;print("creating variable "+varname+" in file")
- filevardef(outcdf, varname, "float", odims)
- delete(odims)
- end do
-
- do nv = 0,nvpairs-1
- do nn = 0,1
- vecname = veclist(nv,0,nn)
- odims = (/"time", "lat", "lon"/)
- ;print("creating variable "+vecname+" in file")
- filevardef(outcdf, vecname, "float", odims)
- delete(odims)
- end do
- end do
-
- ; explicitly exit file definition mode.
- setfileoption(outcdf,"DefineMode",False)
-
- lat=lat(::-1)
- ; write the dimensions to the file
- outcdf->time = (/time/)
- outcdf->lat = (/lat/)
- outcdf->lon = (/lon/)
-
-;----------------------------------------------------------------------
-; loop over nvars variables
-;----------------------------------------------------------------------
-
- ;nv = 1
- do nv = 0,nvars-1
- varname = varlist(nv,0)
- vargrid = varlist(nv,1)
- varmeth = varlist(nv,2)
-
- ;print(nv+" "+varname+" "+vargrid+" "+varmeth)
- icevar = icef[ii]->$varname$
- ndims = dimsizes(dimsizes(icevar))
- ;print(ndims+" "+dimsizes(icevar))
-
- if(vargrid .ne. "Ct")then
- ; print error if the variable is not on the Ct grid
- print("Variable is not on Ct grid")
- exit
- end if
-
- ; regrid to dsttype+dstgrd with method
- ;print("remapping "+varname+" to grid "+dsttype+dstgrds(jj))
- wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+"."+varmeth+".nc"
-
- rgtt = ESMF_regrid_with_weights(icevar,wgtsfile,False)
- rgtt = where(ismissing(rgmask2d),icevar@_FillValue,rgtt)
- rgtt=rgtt(:,::-1,:)
-
- ; enter file definition mode to add variable attributes
- setfileoption(outcdf,"DefineMode",True)
- filevarattdef(outcdf, varname, rgtt)
- setfileoption(outcdf,"DefineMode",False)
-
-
- outcdf->$varname$ = (/rgtt/)
-
- delete(icevar)
- delete(rgtt)
-
- ; nv, loop over number of variables
- end do
-
-;----------------------------------------------------------------------
-;
-;----------------------------------------------------------------------
-
- ;nv = 0
- do nv = 0,nvpairs-1
- vecnames = veclist(nv,0,:)
- vecgrids = veclist(nv,1,:)
- vecmeth = veclist(nv,2,:)
- ;print(nv+" "+vecnames+" "+vecgrids+" "+vecmeth)
-
- ; create a vector pair list
- vecpairs = NewList("fifo")
- n = 0
- uvel = icef[ii]->$vecnames(n)$
- vecfld = where(ismissing(uvel),0.0,uvel)
- copy_VarAtts(uvel,vecfld)
- ;print("unstagger "+vecnames(n)+" from "+vecgrids(n)+" to Ct")
- wgtsfile = nemsrc+"/"+"tripole.mx025."+vecgrids(n)+".to.Ct.bilinear.nc"
- ut = ESMF_regrid_with_weights(vecfld,wgtsfile,False)
- delete(ut@remap)
-
- n = 1
- vvel = icef[ii]->$vecnames(n)$
- vecfld = where(ismissing(vvel),0.0,vvel)
- copy_VarAtts(vvel,vecfld)
- ;print("unstagger "+vecnames(n)+" from "+vecgrids(n)+" to Ct")
- wgtsfile = nemsrc+"/"+"tripole.mx025."+vecgrids(n)+".to.Ct.bilinear.nc"
- vt = ESMF_regrid_with_weights(vecfld,wgtsfile,False)
- delete(vt@remap)
-
- ListAppend(vecpairs,ut)
- ListAppend(vecpairs,vt)
- ;print(vecpairs)
-
- ; rotate
- ; first copy Metadata
- urot = vecpairs[0]
- vrot = vecpairs[1]
- urot = cos(angleT2d)*ut - sin(angleT2d)*vt
- vrot = sin(angleT2d)*ut + cos(angleT2d)*vt
-
- ; change attribute to indicate these are now rotated velocities
- urot@long_name=str_sub_str(urot@long_name,"(x)","zonal")
- vrot@long_name=str_sub_str(vrot@long_name,"(y)","meridional")
- ; copy back
- vecpairs[0] = urot
- vecpairs[1] = vrot
- delete([/urot, vrot/])
-
- ; remap
- do n = 0,1
- vecfld = vecpairs[n]
- ; regrid to dsttype+dstgrd with method
- ;print("remapping "+vecnames(n)+" to grid "+dsttype+dstgrds(jj))
- wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+"."+vecmeth(n)+".nc"
-
- rgtt = ESMF_regrid_with_weights(vecfld,wgtsfile,False)
- rgtt = where(ismissing(rgmask2d),vecfld@_FillValue,rgtt)
- rgtt=rgtt(:,::-1,:)
-
- ; enter file definition mode to add variable attributes
- setfileoption(outcdf,"DefineMode",True)
- filevarattdef(outcdf, vecnames(n), rgtt)
- setfileoption(outcdf,"DefineMode",False)
-
- outcdf->$vecnames(n)$ = (/rgtt/)
- delete(rgtt)
- end do
- delete([/uvel,vvel,ut,vt,vecfld,vecpairs/])
- delete([/vecnames,vecgrids,vecmeth/])
- ; nv, loop over number of vector pairs
- end do
-
-;----------------------------------------------------------------------
-; close the outcdf and continue through filelist
-;----------------------------------------------------------------------
-
- delete(outcdf)
-
- ; ii, loop over files
- ;end do
- ;jj, loop over destination grids
- delete([/lat,lon,nlon,nlat/])
- delete([/rgmask2d/])
- end do
- print("One complete ice file in " + (get_cpu_time() - begTime) + " seconds")
-exit
-end
diff --git a/ush/oceanice_nc2grib2.sh b/ush/oceanice_nc2grib2.sh
new file mode 100755
index 0000000000..1d0e5ae274
--- /dev/null
+++ b/ush/oceanice_nc2grib2.sh
@@ -0,0 +1,319 @@
+#!/bin/bash
+
+# This script contains functions to convert ocean/ice rectilinear netCDF files to grib2 format
+# This script uses the wgrib2 utility to convert the netCDF files to grib2 format and then indexes it
+
+source "${HOMEgfs}/ush/preamble.sh"
+
+################################################################################
+function _ice_nc2grib2 {
+# This function converts the ice rectilinear netCDF files to grib2 format
+
+ # Set the inputs
+ local grid=${1} # 0p25, 0p50, 1p00, 5p00
+ local latlon_dims=${2} # 0:721:0:1440, 0:361:0:720, 0:181:0:360, 0:36:0:72
+ local current_cycle=${3} # YYYYMMDDHH
+ local aperiod=${4} # 0-6
+ local infile=${5} # ice.0p25.nc
+ local outfile=${6} # ice.0p25.grib2
+ local template=${7} # template.global.0p25.gb2
+
+ ${WGRIB2} "${template}" \
+ -import_netcdf "${infile}" "hi_h" "0:1:${latlon_dims}" \
+ -set_var ICETK -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "aice_h" "0:1:${latlon_dims}" \
+ -set_var ICEC -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "Tsfc_h" "0:1:${latlon_dims}" \
+ -set_var ICETMP -set center 7 -rpn "273.15:+" \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "uvel_h" "0:1:${latlon_dims}" \
+ -set_var UICE -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "vvel_h" "0:1:${latlon_dims}" \
+ -set_var VICE -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}"
+
+# Additional variables needed for GFSv17/GEFSv13 operational forecast
+# files, but GRIB2 parameters not available in NCEP (-set center 7)
+# tables in wgrib2 v2.0.8:
+
+# -import_netcdf "${infile}" "hs_h" "0:1:${latlon_dims}" \
+# -set_var ??? -set center 7 \
+# -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+# -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+# -import_netcdf "${infile}" "frzmlt_h" "0:1:${latlon_dims}" \
+# -set_var ??? -set center 7 \
+# -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+# -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+# -import_netcdf "${infile}" "albsni_h" "0:1:${latlon_dims}" \
+# -set_var ALBICE -set center 7 -rpn "100.0:/" \
+# -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+# -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+# -import_netcdf "${infile}" "mlt_onset_h" "0:1:${latlon_dims}" \
+# -set_var ??? -set center 7 \
+# -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+# -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+# -import_netcdf "${infile}" "frz_onset_h" "0:1:${latlon_dims}" \
+# -set_var ??? -set center 7 \
+# -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+# -set_scaling same same -set_grib_type c1 -grib_out "${outfile}"
+
+ rc=$?
+ # Check if the conversion was successful
+ if (( rc != 0 )); then
+ echo "FATAL ERROR: Failed to convert the ice rectilinear netCDF file to grib2 format"
+ fi
+ return "${rc}"
+
+}
+
+################################################################################
+function _ocean2D_nc2grib2 {
+# This function converts the ocean 2D rectilinear netCDF files to grib2 format
+
+ # Set the inputs
+ local grid=${1} # 0p25, 0p50, 1p00, 5p00
+ local latlon_dims=${2} # 0:721:0:1440, 0:361:0:720, 0:181:0:360, 0:36:0:72
+ local current_cycle=${3} # YYYYMMDDHH
+ local aperiod=${4} # 0-6
+ local infile=${5} # ocean.0p25.nc
+ local outfile=${6} # ocean_2D.0p25.grib2
+ local template=${7} # template.global.0p25.gb2
+
+ ${WGRIB2} "${template}" \
+ -import_netcdf "${infile}" "SSH" "0:1:${latlon_dims}" \
+ -set_var SSHG -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "SST" "0:1:${latlon_dims}" \
+ -set_var WTMP -set center 7 -rpn "273.15:+" \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "SSS" "0:1:${latlon_dims}" \
+ -set_var SALIN -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "speed" "0:1:${latlon_dims}" \
+ -set_var SPC -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "SSU" "0:1:${latlon_dims}" \
+ -set_var UOGRD -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "SSV" "0:1:${latlon_dims}" \
+ -set_var VOGRD -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "latent" "0:1:${latlon_dims}" \
+ -set_var LHTFL -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "sensible" "0:1:${latlon_dims}" \
+ -set_var SHTFL -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "SW" "0:1:${latlon_dims}" \
+ -set_var DSWRF -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "LW" "0:1:${latlon_dims}" \
+ -set_var DLWRF -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "LwLatSens" "0:1:${latlon_dims}" \
+ -set_var THFLX -set center 7 \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+ -import_netcdf "${infile}" "MLD_003" "0:1:${latlon_dims}" \
+ -set_var WDEPTH -set center 7 -set_lev "mixed layer depth" \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out "${outfile}"
+
+# Additional variables needed for GFSv17/GEFSv13 operational forecast
+# files, but GRIB2 parameters not available in NCEP (-set center 7)
+# tables in wgrib2 v2.0.8:
+#
+# -import_netcdf "${infile}" "Heat_PmE" "0:1:${latlon_dims}" \
+# -set_var DWHFLUX -set center 7 \
+# -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+# -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+# -import_netcdf "${infile}" "taux" "0:1:${latlon_dims}" \
+# -set_var XCOMPSS -set center 7 \
+# -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+# -set_scaling same same -set_grib_type c1 -grib_out "${outfile}" \
+# -import_netcdf "${infile}" "tauy" "0:1:${latlon_dims}" \
+# -set_var YCOMPSS -set center 7 \
+# -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+# -set_scaling same same -set_grib_type c1 -grib_out "${outfile}"
+
+ rc=$?
+ # Check if the conversion was successful
+ if (( rc != 0 )); then
+ echo "FATAL ERROR: Failed to convert the ocean rectilinear netCDF file to grib2 format"
+ fi
+ return "${rc}"
+
+}
+
+################################################################################
+function _ocean3D_nc2grib2 {
+# This function converts the ocean 3D rectilinear netCDF files to grib2 format
+
+ # Set the inputs
+ local grid=${1} # 0p25, 0p50, 1p00, 5p00
+ local latlon_dims=${2} # 0:721:0:1440, 0:361:0:720, 0:181:0:360, 0:36:0:72
+ local levels=${3} # 5:15:25:35:45:55:65:75:85:95:105:115:125
+ local current_cycle=${4} # YYYYMMDDHH
+ local aperiod=${5} # 0-6
+ local infile=${6} # ocean.0p25.nc
+ local outfile=${7} # ocean_3D.0p25.grib2
+ local template=${8} # template.global.0p25.gb2
+
+ IFS=':' read -ra depths <<< "${levels}"
+
+ zl=0
+ for depth in "${depths[@]}"; do
+
+ [[ -f "tmp.gb2" ]] && rm -f "tmp.gb2"
+
+ ${WGRIB2} "${template}" \
+ -import_netcdf "${infile}" "temp" "0:1:${zl}:1:${latlon_dims}" \
+ -set_var WTMP -set center 7 -rpn "273.15:+" \
+ -set_lev "${depth} m below water surface" \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out tmp.gb2 \
+ -import_netcdf "${infile}" "so" "0:1:${zl}:1:${latlon_dims}" \
+ -set_var SALIN -set center 7 \
+ -set_lev "${depth} m below water surface" \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out tmp.gb2 \
+ -import_netcdf "${infile}" "uo" "0:1:${zl}:1:${latlon_dims}" \
+ -set_var UOGRD -set center 7 \
+ -set_lev "${depth} m below water surface" \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out tmp.gb2 \
+ -import_netcdf "${infile}" "vo" "0:1:${zl}:1:${latlon_dims}" \
+ -set_var VOGRD -set center 7 \
+ -set_lev "${depth} m below water surface" \
+ -set_date "${current_cycle}" -set_ftime "${aperiod} hour ave fcst" \
+ -set_scaling same same -set_grib_type c1 -grib_out tmp.gb2
+
+ rc=$?
+ # Check if the conversion was successful
+ if (( rc != 0 )); then
+ echo "FATAL ERROR: Failed to convert the ocean rectilinear netCDF file to grib2 format at depth ${depth}m, ABORT!"
+ return "${rc}"
+ fi
+
+ cat tmp.gb2 >> "${outfile}"
+ rm -f tmp.gb2
+ ((zl = zl + 1))
+
+ done
+
+ # Notes:
+ # WATPTEMP (water potential temperature (theta)) may be a better
+ # GRIB2 parameter than WTMP (water temperature) if MOM6 outputs
+ # potential temperature. WATPTEMP is not available in NCEP
+ # (-set center 7) tables in wgrib2 v2.0.8.
+
+ return "${rc}"
+
+}
+
+################################################################################
+# Input arguments
+component=${1:?"Need a valid component; options: ice|ocean"}
+grid=${2:-"0p25"} # Default to 0.25-degree grid
+current_cycle=${3:-"2013100100"} # Default to 2013100100
+avg_period=${4:-"0-6"} # Default to 6-hourly average
+ocean_levels=${5:-"5:15:25:35:45:55:65:75:85:95:105:115:125"} # Default to 12-levels
+
+case "${grid}" in
+ "0p25")
+ latlon_dims="0:721:0:1440"
+ ;;
+ "0p50")
+ latlon_dims="0:361:0:720"
+ ;;
+ "1p00")
+ latlon_dims="0:181:0:360"
+ ;;
+ "5p00")
+ latlon_dims="0:36:0:72"
+ ;;
+ *)
+ echo "FATAL ERROR: Unsupported grid '${grid}', ABORT!"
+ exit 1
+ ;;
+esac
+
+input_file="${component}.${grid}.nc"
+template="template.global.${grid}.gb2"
+
+# Check if the template file exists
+if [[ ! -f "${template}" ]]; then
+ echo "FATAL ERROR: '${template}' does not exist, ABORT!"
+ exit 127
+fi
+
+# Check if the input file exists
+if [[ ! -f "${input_file}" ]]; then
+ echo "FATAL ERROR: '${input_file}' does not exist, ABORT!"
+ exit 127
+fi
+
+case "${component}" in
+ "ice")
+ rm -f "${component}.${grid}.grib2" || true
+ _ice_nc2grib2 "${grid}" "${latlon_dims}" "${current_cycle}" "${avg_period}" "${input_file}" "${component}.${grid}.grib2" "${template}"
+ rc=$?
+ if (( rc != 0 )); then
+ echo "FATAL ERROR: Failed to convert the ice rectilinear netCDF file to grib2 format"
+ exit "${rc}"
+ fi
+ ;;
+ "ocean")
+ rm -f "${component}_2D.${grid}.grib2" || true
+ _ocean2D_nc2grib2 "${grid}" "${latlon_dims}" "${current_cycle}" "${avg_period}" "${input_file}" "${component}_2D.${grid}.grib2" "${template}"
+ rc=$?
+ if (( rc != 0 )); then
+ echo "FATAL ERROR: Failed to convert the ocean 2D rectilinear netCDF file to grib2 format"
+ exit "${rc}"
+ fi
+ rm -f "${component}_3D.${grid}.grib2" || true
+ _ocean3D_nc2grib2 "${grid}" "${latlon_dims}" "${ocean_levels}" "${current_cycle}" "${avg_period}" "${input_file}" "${component}_3D.${grid}.grib2" "${template}"
+ rc=$?
+ if (( rc != 0 )); then
+ echo "FATAL ERROR: Failed to convert the ocean 3D rectilinear netCDF file to grib2 format"
+ exit "${rc}"
+ fi
+ # Combine the 2D and 3D grib2 files into a single file
+ rm -f "${component}.${grid}.grib2" || true
+ cat "${component}_2D.${grid}.grib2" "${component}_3D.${grid}.grib2" > "${component}.${grid}.grib2"
+
+ ;;
+ *)
+ echo "FATAL ERROR: Unknown component: '${component}'. ABORT!"
+ exit 3
+ ;;
+esac
+
+# Index the output grib2 file
+${WGRIB2} -s "${component}.${grid}.grib2" > "${component}.${grid}.grib2.idx"
+rc=$?
+# Check if the indexing was successful
+if (( rc != 0 )); then
+ echo "FATAL ERROR: Failed to index the file '${component}.${grid}.grib2'"
+ exit "${rc}"
+fi
+
+exit 0
diff --git a/ush/ocnpost.ncl b/ush/ocnpost.ncl
deleted file mode 100755
index 27e60b0edf..0000000000
--- a/ush/ocnpost.ncl
+++ /dev/null
@@ -1,588 +0,0 @@
-;------------------------------------------------------------------
-; Denise.Worthen@noaa.gov (Feb 2019)
-;
-; This script will remap MOM6 ocean output on the tripole grid to
-; a set of rectilinear grids using pre-computed ESMF weights to remap
-; the listed fields to the destination grid and write the results
-; to a new netCDF file
-;
-; Prior to running this script, files containing the conservative
-; and bilinear regridding weights must be generated. These weights
-; are created using the generate_iceocnpost_weights.ncl script.
-;
-; Note: the descriptive text below assumes fortran type indexing
-; where the variables are indexed as (i,j) and indices start at 1
-; NCL indices are (j,i) and start at 0
-;
-; The post involves these steps
-;
-; a) unstaggering velocity points
-; MOM6 is on an Arakawa C grid. MOM6 refers to these
-; locations as "Ct" for the centers and "Cu", "Cv"
-; "Bu" for the left-right, north-south and corner
-; points, respectively.
-;
-; The indexing scheme in MOM6 is as follows:
-;
-; Cv@i,j
-; ----X------X Bu@i,j
-; |
-; |
-; Ct@i,j |
-; X X Cu@i,j
-; |
-; |
-; |
-;
-; CICE5 is on an Arakawa B grid. CICE5 refers to these
-; locations as TLAT,TLON for the centers and ULAT,ULON
-; for the corners
-;
-; In UFS, the CICE5 grid has been created using the MOM6
-; supergrid file. Therefore, all grid points are consistent
-; between the two models.
-;
-; In the following, MOM6's nomenclature will be followed,
-; so that CICE5's U-grid will be referred to as "Bu".
-;
-; b) rotation of tripole vectors to East-West
-; MOM6 and CICE6 both output velocties on their native
-; velocity points. For MOM6, that is u-velocities on the
-; Cu grid and v-velocites on the Cv grid. For CICE5, it is
-; both u and v-velocities on the Bu grid.
-;
-; The rotation angle for both models are defined at center
-; grid points; therefore the velocities need to be first
-; unstaggered before rotation. MOM6 and CICE5 also define
-; opposite directions for the rotations. Finally, while the
-; grid points are identical between the two models, CICE5
-; calculates the rotation angle at center grid points by
-; averaging the four surrounding B grid points. MOM6 derives
-; the rotation angle at the center directly from the latitude
-; and longitude of the center grid points. The angles are therefor
-; not identical between the two grids.
-;
-; c) conservative regridding of some fields
-; Fields such as ice concentration or fluxes which inherently
-; area area-weighted require conservative regridding. Most other
-; variables are state variables and can be regridded using
-; bilinear weighting.
-;
-; An efficient way to accomplish the unstaggering of velocities
-; is to use the bilinear interpolation weights between grid
-; points of the Arakawa C grid and the center grid points (for example
-; Cu->Ct). These weights are generated by the weight generation script
-;
-; Remapping from the tripole to rectilinear uses either the bilinear
-; or conservative weights from the weight generation script. Bilinear weights
-; generated for the first vertical level can be used on other levels
-; (where the masking changes) by utilizing the correct masking procedure.
-; Set output_masks to true to examine the interpolation masks.
-;
-; Intermediate file output can easily be generated for debugging by
-; follwing the example in the output_masks logical
-;
-; Bin.Li@noaa.gov (May 2019)
-; The scripts is revised for use in the coupled workflow.
-;
- load "$NCARG_ROOT/lib/ncarg/nclscripts/esmf/ESMF_regridding.ncl"
-
-;----------------------------------------------------------------------
-begin
-;
-
- ; warnings (generated by int2p_n_Wrap) can be supressed by
- ; the following (comment out to get the warnings)
- err = NhlGetErrorObjectId()
- setvalues err
-; "errLevel" : "Fatal" ; only report Fatal errors
- "errLevel" : "Verbose"
- end setvalues
-
- output_masks = False
-
- ; specify a location to use
- ; nemsrc = "/scratch4/NCEPDEV/ocean/save/Denise.Worthen/NEMS_INPUT0.1/ocnicepost/"
- ; interpolation methods
- methods = (/"bilinear" ,"conserve"/)
- ; ocean model output location
- ;dirsrc = "/scratch3/NCEPDEV/stmp2/Denise.Worthen/BM1_ocn/"
-
- ; destination grid sizes and name
- dsttype = (/"rect."/)
- ;dstgrds = (/"1p0", "0p5", "0p25"/)
- ;dstgrds = (/"0p5"/)
- dstgrds = (/"0p25"/)
-
- ; variables to be regridded with the native tripole stagger location
- ; and dimensionality
- ; first BM contained only field "mld", which was actually ePBL
- ; the remaining BMs contain ePBL, MLD_003 and MLD_0125
- ; the following NCO command will be issued at the end
- ; to rename the variable mld to ePBL if the variable mld is found
- ; ncocmd = "ncrename -O -v mld,ePBL "
- ncocmd = "ncrename -O -v MLD_003,mld"
-
- varlist = (/ (/ "SSH", "Ct", "bilinear", "2"/) \
- ,(/ "SST", "Ct", "bilinear", "2"/) \
- ,(/ "SSS", "Ct", "bilinear", "2"/) \
- ,(/ "speed", "Ct", "bilinear", "2"/) \
- ,(/ "temp", "Ct", "bilinear", "3"/) \
- ,(/ "so", "Ct", "bilinear", "3"/) \
- ,(/ "latent", "Ct", "conserve", "2"/) \
- ,(/ "sensible", "Ct", "conserve", "2"/) \
- ,(/ "SW", "Ct", "conserve", "2"/) \
- ,(/ "LW", "Ct", "conserve", "2"/) \
- ,(/ "evap", "Ct", "conserve", "2"/) \
- ,(/ "lprec", "Ct", "conserve", "2"/) \
- ,(/ "fprec", "Ct", "conserve", "2"/) \
- ,(/"LwLatSens", "Ct", "conserve", "2"/) \
- ,(/ "Heat_PmE", "Ct", "conserve", "2"/) \
-; ,(/ "mld", "Ct", "bilinear", "2"/) \
- ,(/ "ePBL", "Ct", "bilinear", "2"/) \
- ,(/ "MLD_003", "Ct", "bilinear", "2"/) \
- ,(/ "MLD_0125", "Ct", "bilinear", "2"/) \
- /)
- dims = dimsizes(varlist)
- nvars = dims(0)
- delete(dims)
- ;print(varlist)
-
- ; vectors to be regridded with the native tripole stagger location
- ; and dimensionality
- ; note: vectors are always unstaggered using bilinear weights, but can
- ; be remapped using conservative
- nvpairs = 3
- veclist = new( (/nvpairs,4,2/),"string")
- veclist = (/ (/ (/ "SSU", "SSV"/), (/"Cu", "Cv"/), (/"bilinear", "bilinear"/), (/"2", "2"/) /) \
- , (/ (/ "uo", "vo"/), (/"Cu", "Cv"/), (/"bilinear", "bilinear"/), (/"3", "3"/) /) \
- , (/ (/ "taux", "tauy"/), (/"Cu", "Cv"/), (/"conserve", "conserve"/), (/"2", "2"/) /) \
- /)
- ;print(veclist)
-
- begTime = get_cpu_time()
-;----------------------------------------------------------------------
-; make a list of the directories and files from the run
-;----------------------------------------------------------------------
-
-; idate = "20120101"
-
-; ocnfilelist = systemfunc("ls "+dirsrc+"gfs."+idate+"/00/"+"ocn*.nc")
-; ocnf = addfiles(ocnfilelist,"r")
-; nfiles = dimsizes(ocnfilelist)
-;
-
- ; get the rotation angles and vertical grid from the first file
- ; two different name were used for the angles, either sinrot,cosrot
- ; or sin_rot,cos_rot
- if(isfilevar(ocnf[0],"sin_rot"))then
- sinrot = ocnf[0]->sin_rot
- else
- sinrot = ocnf[0]->sinrot
- end if
- if(isfilevar(ocnf[0],"cos_rot"))then
- cosrot = ocnf[0]->cos_rot
- else
- cosrot = ocnf[0]->cosrot
- end if
- z_l = ocnf[0]->z_l
- z_i = ocnf[0]->z_i
- nlevs = dimsizes(z_l)
-
- ; get a 2 and 3 dimensional fields for creating the interpolation masks
- ; the mask2d,mask3d contain 1's on land and 0's at valid points.
- mask2d = where(ismissing(ocnf[0]->SST), 1.0, 0.0)
- mask3d = where(ismissing(ocnf[0]->temp), 1.0, 0.0)
- ;printVarSummary(mask2d)
- ;printVarSummary(mask3d)
-
- ; create conformed rotation arrays to make vector rotations cleaner
- sinrot2d=conform_dims(dimsizes(mask2d),sinrot,(/1,2/))
- cosrot2d=conform_dims(dimsizes(mask2d),cosrot,(/1,2/))
-
- sinrot3d=conform_dims(dimsizes(mask3d),sinrot,(/2,3/))
- cosrot3d=conform_dims(dimsizes(mask3d),cosrot,(/2,3/))
-
- ; check for variables in file. this is only required because
- ; of the missing/misnamed MLD variables in the first BM
- ; only the varlist is checked, since it is assumed there are
- ; no other variables missing after the first benchmark
- valid = new((/nvars/),"logical")
- valid = False
- do nv = 0,nvars-1
- varname = varlist(nv,0)
- if(isfilevar(ocnf[0],varname))then
- valid(nv) = True
- end if
- print(varlist(nv,0)+" "+valid(nv))
- end do
-
-;----------------------------------------------------------------------
-; loop over the output resolutions
-;----------------------------------------------------------------------
-
- jj = 1
- ii = 0
-
- do jj = 0,dimsizes(dstgrds)-1
- ;outres = "_"+dstgrds(jj)+"x"+dstgrds(jj)
- outres = dstgrds(jj)+"x"+dstgrds(jj)
- outgrid = dstgrds(jj)
-
- ; regrid a field to obtain the output xy dimensions
- wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+".bilinear.nc"
- tt = ESMF_regrid_with_weights(sinrot,wgtsfile,False)
- tt!0 = "lat"
- tt!1 = "lon"
- lat = tt&lat
- lon = tt&lon
- dims = dimsizes(tt)
- nlat = dims(0)
- nlon = dims(1)
-
- print("fields will be remapped to destination grid size "\
- +nlon+" "+nlat)
-
- delete(tt)
- delete(dims)
-
- ; regrid the masks to obtain the interpolation masks.
- ; the mask2d,mask3d contain 1's on land and 0's at valid points.
- ; when remapped, any mask value > 0 identifies land values that
- ; have crept into the field. remapped model fields are then
- ; masked with this interpolation mask
-
- wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+".bilinear.nc"
- rgmask2d = ESMF_regrid_with_weights(mask2d, wgtsfile,False)
- rgmask3d = ESMF_regrid_with_weights(mask3d, wgtsfile,False)
-
- if(output_masks)then
- testfile = "masks_"+dstgrds(jj)+".nc"
- system("/bin/rm -f "+testfile)
- ; create
- testcdf = addfile(testfile,"c")
- testcdf->rgmask2d = rgmask2d
- testcdf->rgmask3d = rgmask3d
- ; close
- delete(testcdf)
- end if
-
- ; create the interpolation mask
- rgmask2d = where(rgmask2d .gt. 0.0, rgmask2d@_FillValue, 1.0)
- rgmask3d = where(rgmask3d .gt. 0.0, rgmask3d@_FillValue, 1.0)
-
- ; conformed depth array
- depth = conform_dims(dimsizes(mask3d), z_l, (/1/))
- ;print(dimsizes(depth))
-
-;----------------------------------------------------------------------
-; loop over each file in the ocnfilelist
-;----------------------------------------------------------------------
-;
-
- ; retrieve the time stamp
- time = ocnf[0]->time
- delete(time@bounds)
-
-;----------------------------------------------------------------------
-; set up the output netcdf file
-;----------------------------------------------------------------------
-; system("/bin/rm -f " + outfile) ; remove if exists
-; outcdf = addfile (outfile, "c") ; open output file
-; specify output file information and open file for output
- FILENAME_REGRID = DATA_TMP+"/ocnr"+VDATE+"."+ENSMEM+"."+IDATE+"_"+outres+"_MOM6.nc"
- if (isfilepresent(FILENAME_REGRID)) then
- system("rm -f "+FILENAME_REGRID)
- end if
- outcdf = addfile(FILENAME_REGRID,"c")
- outfile=FILENAME_REGRID
-
- ; explicitly declare file definition mode. Improve efficiency.
- setfileoption(outcdf,"DefineMode",True)
-
- ; create global attributes of the file
- fAtt = True ; assign file attributes
- fAtt@creation_date = systemfunc ("date")
- fAtt@source_file = infile
- fileattdef( outcdf, fAtt ) ; copy file attributes
-
- ; predefine the coordinate variables and their dimensionality
- ; dimNames = (/"time", "z_l", "z_i", "z_T", "lat", "lon"/)
- dimNames = (/"time", "z_l", "z_i", "lat", "lon"/)
- ;dimSizes = (/ -1 , nlevs, nlevs+1, nTd, nlat, nlon/)
- dimSizes = (/ -1 , nlevs, nlevs+1, nlat, nlon/)
- ;dimUnlim = (/ True , False, False, False, False, False/)
- dimUnlim = (/ True , False, False, False, False/)
- filedimdef(outcdf,dimNames,dimSizes,dimUnlim)
-
- ; predefine the the dimensionality of the variables to be written out
- filevardef(outcdf, "time", typeof(time), getvardims(time))
- filevardef(outcdf, "z_l", typeof(z_l), getvardims(z_l))
- filevardef(outcdf, "z_i", typeof(z_i), getvardims(z_i))
- ;filevardef(outcdf, "z_T", typeof(z_T), getvardims(z_T))
- filevardef(outcdf, "lat", typeof(lat), getvardims(lat))
- filevardef(outcdf, "lon", typeof(lon), getvardims(lon))
-
- ; Copy attributes associated with each variable to the file
- filevarattdef(outcdf, "time", time)
- filevarattdef(outcdf, "z_l", z_l)
- filevarattdef(outcdf, "z_i", z_i)
- ;filevarattdef(outcdf, "z_T", z_T)
- filevarattdef(outcdf, "lat", lat)
- filevarattdef(outcdf, "lon", lon)
-
- ; predefine variables
- do nv = 0,nvars-1
- varname = varlist(nv,0)
- vardims = varlist(nv,3)
- if(valid(nv))then
- if(vardims .eq. "2")then
- odims = (/"time", "lat", "lon"/)
- else
- odims = (/"time", "z_l", "lat", "lon"/)
- end if
- ;print("creating variable "+varname+" in file")
- filevardef(outcdf, varname, "float", odims)
- delete(odims)
- end if
- end do
-
- do nv = 0,nvpairs-1
- do nn = 0,1
- vecname = veclist(nv,0,nn)
- vecdims = veclist(nv,3,nn)
- if(vecdims .eq. "2")then
- odims = (/"time", "lat", "lon"/)
- else
- odims = (/"time", "z_l", "lat", "lon"/)
- end if
- ;print("creating variable "+vecname+" in file")
- filevardef(outcdf, vecname, "float", odims)
- delete(odims)
- delete(vecdims)
- end do
- end do
-
- ; explicitly exit file definition mode.
- setfileoption(outcdf,"DefineMode",False)
-
- ; write the dimensions to the file
- outcdf->time = (/time/)
- outcdf->z_l = (/z_l/)
- outcdf->z_i = (/z_i/)
-; outcdf->z_T = (/z_T/)
-;
- outcdf->lat = (/lat/)
- outcdf->lon = (/lon/)
-
-;----------------------------------------------------------------------
-; loop over nvars variables
-;----------------------------------------------------------------------
-
- do nv = 0,nvars-1
- varname = varlist(nv,0)
- vargrid = varlist(nv,1)
- varmeth = varlist(nv,2)
- vardims = varlist(nv,3)
-
- if(valid(nv))then
- ;print(nv+" "+varname+" "+vargrid+" "+varmeth)
- ocnvar = ocnf[ii]->$varname$
- ndims = dimsizes(dimsizes(ocnvar))
- ;print(ndims+" "+dimsizes(ocnvar))
-
- if(vargrid .ne. "Ct")then
- ; print error if the variable is not on the Ct grid
- print("Variable is not on Ct grid")
- exit
- end if
-
- ; regrid to dsttype+dstgrd with method
- ;print("remapping "+varname+" to grid "+dsttype+dstgrds(jj))
- wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+"."+varmeth+".nc"
-
- rgtt = ESMF_regrid_with_weights(ocnvar,wgtsfile,False)
- if(vardims .eq. "2")then
- rgtt = where(ismissing(rgmask2d),ocnvar@_FillValue,rgtt)
- rgtt=rgtt(:,::-1,:)
- else
- rgtt = where(ismissing(rgmask3d),ocnvar@_FillValue,rgtt)
- rgtt=rgtt(:,:,::-1,:)
- end if
-
- ; enter file definition mode to add variable attributes
- setfileoption(outcdf,"DefineMode",True)
- filevarattdef(outcdf, varname, rgtt)
- setfileoption(outcdf,"DefineMode",False)
-
- outcdf->$varname$ = (/rgtt/)
-
- delete(ocnvar)
- delete(rgtt)
-
- ; variable exists
- end if
- ; nv, loop over number of variables
- end do
-
-;----------------------------------------------------------------------
-;
-;----------------------------------------------------------------------
-
- ;nv = 2
- do nv = 0,nvpairs-1
- vecnames = veclist(nv,0,:)
- vecgrids = veclist(nv,1,:)
- vecmeth = veclist(nv,2,:)
- vecdims = veclist(nv,3,:)
- ;print(nv+" "+vecnames+" "+vecgrids+" "+vecmeth)
-
- ; create a vector pair list
- vecpairs = NewList("fifo")
- n = 0
- uvel = ocnf[ii]->$vecnames(n)$
- vecfld = where(ismissing(uvel),0.0,uvel)
- copy_VarAtts(uvel,vecfld)
- ;print("unstagger "+vecnames(n)+" from "+vecgrids(n)+" to Ct")
- wgtsfile = nemsrc+"/"+"tripole.mx025."+vecgrids(n)+".to.Ct.bilinear.nc"
- ut = ESMF_regrid_with_weights(vecfld,wgtsfile,False)
- delete(ut@remap)
-
- n = 1
- vvel = ocnf[ii]->$vecnames(n)$
- vecfld = where(ismissing(vvel),0.0,vvel)
- copy_VarAtts(vvel,vecfld)
- ;print("unstagger "+vecnames(n)+" from "+vecgrids(n)+" to Ct")
- wgtsfile = nemsrc+"/"+"tripole.mx025."+vecgrids(n)+".to.Ct.bilinear.nc"
- vt = ESMF_regrid_with_weights(vecfld,wgtsfile,False)
- delete(vt@remap)
-
- ListAppend(vecpairs,ut)
- ListAppend(vecpairs,vt)
- ;print(vecpairs)
-
- ; rotate
- ; first copy Metadata
- urot = vecpairs[0]
- vrot = vecpairs[1]
- if(vecdims(0) .eq. "2")then
- urot = ut*cosrot2d + vt*sinrot2d
- vrot = vt*cosrot2d - ut*sinrot2d
- else
- urot = ut*cosrot3d + vt*sinrot3d
- vrot = vt*cosrot3d - ut*sinrot3d
- end if
- ; change attribute to indicate these are now rotated velocities
- urot@long_name=str_sub_str(urot@long_name,"X","Zonal")
- vrot@long_name=str_sub_str(vrot@long_name,"Y","Meridional")
- ; copy back
- vecpairs[0] = urot
- vecpairs[1] = vrot
- delete([/urot, vrot/])
-
- ; remap
- do n = 0,1
- vecfld = vecpairs[n]
- ; regrid to dsttype+dstgrd with method
- ;print("remapping "+vecnames(n)+" to grid "+dsttype+dstgrds(jj))
- wgtsfile = nemsrc+"/"+"tripole.mx025.Ct.to."+dsttype+dstgrds(jj)+"."+vecmeth(n)+".nc"
-
- rgtt = ESMF_regrid_with_weights(vecfld,wgtsfile,False)
- if(vecdims(n) .eq. "2")then
- rgtt = where(ismissing(rgmask2d),vecfld@_FillValue,rgtt)
- rgtt=rgtt(:,::-1,:)
- else
- rgtt = where(ismissing(rgmask3d),vecfld@_FillValue,rgtt)
- rgtt=rgtt(:,:,::-1,:)
- end if
-
- ; enter file definition mode to add variable attributes
- setfileoption(outcdf,"DefineMode",True)
- filevarattdef(outcdf, vecnames(n), rgtt)
- setfileoption(outcdf,"DefineMode",False)
-
- outcdf->$vecnames(n)$ = (/rgtt/)
- delete(rgtt)
- end do
- delete([/uvel,vvel,ut,vt,vecfld,vecpairs/])
- delete([/vecnames,vecgrids,vecmeth,vecdims/])
- ; nv, loop over number of vector pairs
- end do
-
-;----------------------------------------------------------------------
-; close the outcdf and continue through filelist
-;----------------------------------------------------------------------
-
- delete(outcdf)
- ; rename mld to ePBL if required
- do nv = 0,nvars-1
- varname = varlist(nv,0)
- ; if(varname .eq. "mld" .and. valid(nv))then
- if(varname .eq. "MLD_003" .and. valid(nv))then
- print("Renaming MLD_003 to mld")
- ;print(ncocmd+" "+outfile)
- system(ncocmd+" "+outfile)
- end if
- end do
-
- ; ii, loop over files
-;
- ;jj, loop over destination grids
- delete([/lat,lon,nlon,nlat/])
- delete([/rgmask2d,rgmask3d/])
- end do
- print("One complete ocn file in " + (get_cpu_time() - begTime) + " seconds")
-exit
-end
diff --git a/ush/parsing_ufs_configure.sh b/ush/parsing_ufs_configure.sh
index 2071586905..bec5c8f0f6 100755
--- a/ush/parsing_ufs_configure.sh
+++ b/ush/parsing_ufs_configure.sh
@@ -1,20 +1,15 @@
#! /usr/bin/env bash
#####
-## This script writes ufs.configure file
-## first, select a "*.IN" templates based on
-## $confignamevarforufs and parse values based on
-## $cpl** switches.
-##
-## This is a child script of modular
-## forecast script. This script is definition only (Is it? There is nothing defined here being used outside this script.)
+## This script writes ufs.configure file based on a template defined in
+## ${ufs_configure_template}
#####
# Disable variable not used warnings
# shellcheck disable=SC2034
writing_ufs_configure() {
-echo "SUB ${FUNCNAME[0]}: ufs.configure.sh begins"
+echo "SUB ${FUNCNAME[0]}: ufs.configure begins"
# Setup ufs.configure
local esmf_logkind=${esmf_logkind:-"ESMF_LOGKIND_MULTI"} #options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE
@@ -24,14 +19,13 @@ local cap_dbug_flag=${cap_dbug_flag:-0}
# Determine "cmeps_run_type" based on the availability of the mediator restart file
# If it is a warm_start, we already copied the mediator restart to DATA, if it was present
# If the mediator restart was not present, despite being a "warm_start", we put out a WARNING
-# in forecast_postdet.sh
+# in forecast_postdet.sh function CMEPS_postdet
if [[ -f "${DATA}/ufs.cpld.cpl.r.nc" ]]; then
local cmeps_run_type='continue'
else
local cmeps_run_type='startup'
fi
-
# Atm-related
local atm_model="fv3"
local atm_petlist_bounds="0 $(( ATMPETS-1 ))"
@@ -54,7 +48,7 @@ if [[ "${cplflx}" = ".true." ]]; then
local ocn_petlist_bounds="${ATMPETS} $(( ATMPETS+OCNPETS-1 ))"
local ocn_omp_num_threads="${OCNTHREADS}"
local RUNTYPE="${cmeps_run_type}"
- local CMEPS_RESTART_DIR="RESTART/"
+ local CMEPS_RESTART_DIR="CMEPS_RESTART/"
local CPLMODE="${cplmode}"
local coupling_interval_fast_sec="${CPL_FAST}"
local RESTART_N="${restart_interval}"
@@ -95,6 +89,8 @@ fi
if [[ ! -r "${ufs_configure_template}" ]]; then
echo "FATAL ERROR: template '${ufs_configure_template}' does not exist, ABORT!"
exit 1
+else
+ echo "INFO: using ufs.configure template: '${ufs_configure_template}'"
fi
source "${HOMEgfs}/ush/atparse.bash"
@@ -105,6 +101,6 @@ cat ufs.configure
${NCP} "${HOMEgfs}/sorc/ufs_model.fd/tests/parm/fd_ufs.yaml" fd_ufs.yaml
-echo "SUB ${FUNCNAME[0]}: ufs.configure.sh ends for ${ufs_configure_template}"
+echo "SUB ${FUNCNAME[0]}: ufs.configure ends"
}
diff --git a/ush/python/pygfs/task/oceanice_products.py b/ush/python/pygfs/task/oceanice_products.py
new file mode 100644
index 0000000000..968acb0750
--- /dev/null
+++ b/ush/python/pygfs/task/oceanice_products.py
@@ -0,0 +1,337 @@
+#!/usr/bin/env python3
+
+import os
+from logging import getLogger
+from typing import List, Dict, Any
+from pprint import pformat
+import xarray as xr
+
+from wxflow import (AttrDict,
+ parse_j2yaml,
+ FileHandler,
+ Jinja,
+ logit,
+ Task,
+ add_to_datetime, to_timedelta,
+ WorkflowException,
+ Executable)
+
+logger = getLogger(__name__.split('.')[-1])
+
+
+class OceanIceProducts(Task):
+ """Ocean Ice Products Task
+ """
+
+ VALID_COMPONENTS = ['ocean', 'ice']
+ COMPONENT_RES_MAP = {'ocean': 'OCNRES', 'ice': 'ICERES'}
+ VALID_PRODUCT_GRIDS = {'mx025': ['1p00', '0p25'],
+ 'mx050': ['1p00', '0p50'],
+ 'mx100': ['1p00'],
+ 'mx500': ['5p00']}
+
+ # These could be read from the yaml file
+ TRIPOLE_DIMS_MAP = {'mx025': [1440, 1080], 'mx050': [720, 526], 'mx100': [360, 320], 'mx500': [72, 35]}
+ LATLON_DIMS_MAP = {'0p25': [1440, 721], '0p50': [720, 361], '1p00': [360, 181], '5p00': [72, 36]}
+
+ @logit(logger, name="OceanIceProducts")
+ def __init__(self, config: Dict[str, Any]) -> None:
+ """Constructor for the Ocean/Ice Productstask
+
+ Parameters
+ ----------
+ config : Dict[str, Any]
+ Incoming configuration for the task from the environment
+
+ Returns
+ -------
+ None
+ """
+ super().__init__(config)
+
+ if self.config.COMPONENT not in self.VALID_COMPONENTS:
+ raise NotImplementedError(f'{self.config.COMPONENT} is not a valid model component.\n' +
+ 'Valid model components are:\n' +
+ f'{", ".join(self.VALID_COMPONENTS)}')
+
+ model_grid = f"mx{self.config[self.COMPONENT_RES_MAP[self.config.COMPONENT]]:03d}"
+
+ valid_datetime = add_to_datetime(self.runtime_config.current_cycle, to_timedelta(f"{self.config.FORECAST_HOUR}H"))
+
+ # TODO: This is a bit of a hack, but it works for now
+ # FIXME: find a better way to provide the averaging period
+ # This will be different for ocean and ice, so when they are made flexible, this will need to be addressed
+ avg_period = f"{self.config.FORECAST_HOUR-self.config.FHOUT_GFS:03d}-{self.config.FORECAST_HOUR:03d}"
+
+ localdict = AttrDict(
+ {'component': self.config.COMPONENT,
+ 'forecast_hour': self.config.FORECAST_HOUR,
+ 'valid_datetime': valid_datetime,
+ 'avg_period': avg_period,
+ 'model_grid': model_grid,
+ 'product_grids': self.VALID_PRODUCT_GRIDS[model_grid]}
+ )
+ self.task_config = AttrDict(**self.config, **self.runtime_config, **localdict)
+
+ # Read the oceanice_products.yaml file for common configuration
+ logger.info(f"Read the ocean ice products configuration yaml file {self.config.OCEANICEPRODUCTS_CONFIG}")
+ self.task_config.oceanice_yaml = parse_j2yaml(self.config.OCEANICEPRODUCTS_CONFIG, self.task_config)
+ logger.debug(f"oceanice_yaml:\n{pformat(self.task_config.oceanice_yaml)}")
+
+ @staticmethod
+ @logit(logger)
+ def initialize(config: Dict) -> None:
+ """Initialize the work directory by copying all the common fix data
+
+ Parameters
+ ----------
+ config : Dict
+ Configuration dictionary for the task
+
+ Returns
+ -------
+ None
+ """
+
+ # Copy static data to run directory
+ logger.info("Copy static data to run directory")
+ FileHandler(config.oceanice_yaml.ocnicepost.fix_data).sync()
+
+ # Copy "component" specific model data to run directory (e.g. ocean/ice forecast output)
+ logger.info(f"Copy {config.component} data to run directory")
+ FileHandler(config.oceanice_yaml[config.component].data_in).sync()
+
+ @staticmethod
+ @logit(logger)
+ def configure(config: Dict, product_grid: str) -> None:
+ """Configure the namelist for the product_grid in the work directory.
+ Create namelist 'ocnicepost.nml' from template
+
+ Parameters
+ ----------
+ config : Dict
+ Configuration dictionary for the task
+ product_grid : str
+ Target product grid to process
+
+ Returns
+ -------
+ None
+ """
+
+ # Make a localconf with the "component" specific configuration for parsing the namelist
+ localconf = AttrDict()
+ localconf.DATA = config.DATA
+ localconf.component = config.component
+
+ localconf.source_tripole_dims = ', '.join(map(str, OceanIceProducts.TRIPOLE_DIMS_MAP[config.model_grid]))
+ localconf.target_latlon_dims = ', '.join(map(str, OceanIceProducts.LATLON_DIMS_MAP[product_grid]))
+
+ localconf.maskvar = config.oceanice_yaml[config.component].namelist.maskvar
+ localconf.sinvar = config.oceanice_yaml[config.component].namelist.sinvar
+ localconf.cosvar = config.oceanice_yaml[config.component].namelist.cosvar
+ localconf.angvar = config.oceanice_yaml[config.component].namelist.angvar
+ localconf.debug = ".true." if config.oceanice_yaml.ocnicepost.namelist.debug else ".false."
+
+ logger.debug(f"localconf:\n{pformat(localconf)}")
+
+ # Configure the namelist and write to file
+ logger.info("Create namelist for ocnicepost.x")
+ nml_template = os.path.join(localconf.DATA, "ocnicepost.nml.jinja2")
+ nml_data = Jinja(nml_template, localconf).render
+ logger.debug(f"ocnicepost_nml:\n{nml_data}")
+ nml_file = os.path.join(localconf.DATA, "ocnicepost.nml")
+ with open(nml_file, "w") as fho:
+ fho.write(nml_data)
+
+ @staticmethod
+ @logit(logger)
+ def execute(config: Dict, product_grid: str) -> None:
+ """Run the ocnicepost.x executable to interpolate and convert to grib2
+
+ Parameters
+ ----------
+ config : Dict
+ Configuration dictionary for the task
+ product_grid : str
+ Target product grid to process
+
+ Returns
+ -------
+ None
+ """
+
+ # Run the ocnicepost.x executable
+ OceanIceProducts.interp(config.DATA, config.APRUN_OCNICEPOST, exec_name="ocnicepost.x")
+
+ # Convert interpolated netCDF file to grib2
+ OceanIceProducts.netCDF_to_grib2(config, product_grid)
+
+ @staticmethod
+ @logit(logger)
+ def interp(workdir: str, aprun_cmd: str, exec_name: str = "ocnicepost.x") -> None:
+ """
+ Run the interpolation executable to generate rectilinear netCDF file
+
+ Parameters
+ ----------
+ config : Dict
+ Configuration dictionary for the task
+ workdir : str
+ Working directory for the task
+ aprun_cmd : str
+ aprun command to use
+ exec_name : str
+ Name of the executable e.g. ocnicepost.x
+
+ Returns
+ -------
+ None
+ """
+ os.chdir(workdir)
+ logger.debug(f"Current working directory: {os.getcwd()}")
+
+ exec_cmd = Executable(aprun_cmd)
+ exec_cmd.add_default_arg(os.path.join(workdir, exec_name))
+
+ OceanIceProducts._call_executable(exec_cmd)
+
+ @staticmethod
+ @logit(logger)
+ def netCDF_to_grib2(config: Dict, grid: str) -> None:
+ """Convert interpolated netCDF file to grib2
+
+ Parameters
+ ----------
+ config : Dict
+ Configuration dictionary for the task
+ grid : str
+ Target product grid to process
+
+ Returns
+ ------
+ None
+ """
+
+ os.chdir(config.DATA)
+
+ exec_cmd = Executable(config.oceanice_yaml.nc2grib2.script)
+ arguments = [config.component, grid, config.current_cycle.strftime("%Y%m%d%H"), config.avg_period]
+ if config.component == 'ocean':
+ levs = config.oceanice_yaml.ocean.namelist.ocean_levels
+ arguments.append(':'.join(map(str, levs)))
+
+ logger.info(f"Executing {exec_cmd} with arguments {arguments}")
+ try:
+ exec_cmd(*arguments)
+ except OSError:
+ logger.exception(f"FATAL ERROR: Failed to execute {exec_cmd}")
+ raise OSError(f"{exec_cmd}")
+ except Exception:
+ logger.exception(f"FATAL ERROR: Error occurred during execution of {exec_cmd}")
+ raise WorkflowException(f"{exec_cmd}")
+
+ @staticmethod
+ @logit(logger)
+ def subset(config: Dict) -> None:
+ """
+ Subset a list of variables from a netcdf file and save to a new netcdf file.
+ Also save global attributes and history from the old netcdf file into new netcdf file
+
+ Parameters
+ ----------
+ config : Dict
+ Configuration dictionary for the task
+
+ Returns
+ -------
+ None
+ """
+
+ os.chdir(config.DATA)
+
+ input_file = f"{config.component}.nc"
+ output_file = f"{config.component}_subset.nc"
+ varlist = config.oceanice_yaml[config.component].subset
+
+ logger.info(f"Subsetting {varlist} from {input_file} to {output_file}")
+
+ try:
+ # open the netcdf file
+ ds = xr.open_dataset(input_file)
+
+ # subset the variables
+ ds_subset = ds[varlist]
+
+ # save global attributes from the old netcdf file into new netcdf file
+ ds_subset.attrs = ds.attrs
+
+ # save subsetted variables to a new netcdf file
+ ds_subset.to_netcdf(output_file)
+
+ except FileNotFoundError:
+ logger.exception(f"FATAL ERROR: Input file not found: {input_file}")
+ raise FileNotFoundError(f"File not found: {input_file}")
+
+ except IOError as err:
+ logger.exception(f"FATAL ERROR: IOError occurred during netCDF subset: {input_file}")
+ raise IOError(f"An I/O error occurred: {err}")
+
+ except Exception as err:
+ logger.exception(f"FATAL ERROR: Error occurred during netCDF subset: {input_file}")
+ raise WorkflowException(f"{err}")
+
+ finally:
+ # close the netcdf files
+ ds.close()
+ ds_subset.close()
+
+ @staticmethod
+ @logit(logger)
+ def _call_executable(exec_cmd: Executable) -> None:
+ """Internal method to call executable
+
+ Parameters
+ ----------
+ exec_cmd : Executable
+ Executable to run
+
+ Raises
+ ------
+ OSError
+ Failure due to OS issues
+ WorkflowException
+ All other exceptions
+ """
+
+ logger.info(f"Executing {exec_cmd}")
+ try:
+ exec_cmd()
+ except OSError:
+ logger.exception(f"FATAL ERROR: Failed to execute {exec_cmd}")
+ raise OSError(f"{exec_cmd}")
+ except Exception:
+ logger.exception(f"FATAL ERROR: Error occurred during execution of {exec_cmd}")
+ raise WorkflowException(f"{exec_cmd}")
+
+ @staticmethod
+ @logit(logger)
+ def finalize(config: Dict) -> None:
+ """Perform closing actions of the task.
+ Copy data back from the DATA/ directory to COM/
+
+ Parameters
+ ----------
+ config: Dict
+ Configuration dictionary for the task
+
+ Returns
+ -------
+ None
+ """
+
+ # Copy "component" specific generated data to COM/ directory
+ data_out = config.oceanice_yaml[config.component].data_out
+
+ logger.info(f"Copy processed data to COM/ directory")
+ FileHandler(data_out).sync()
diff --git a/ush/syndat_getjtbul.sh b/ush/syndat_getjtbul.sh
index c17067ff72..a68187868a 100755
--- a/ush/syndat_getjtbul.sh
+++ b/ush/syndat_getjtbul.sh
@@ -52,8 +52,6 @@ hour=$(echo $CDATE10 | cut -c9-10)
echo $PDYm1
pdym1=$PDYm1
-#pdym1=$(sh $utilscript/finddate.sh $pdy d-1)
-
echo " " >> $pgmout
echo "Entering sub-shell syndat_getjtbul.sh to recover JTWC Bulletins" \
>> $pgmout
diff --git a/versions/run.spack.ver b/versions/run.spack.ver
index 5b245ef873..ef2dff0b80 100644
--- a/versions/run.spack.ver
+++ b/versions/run.spack.ver
@@ -25,6 +25,7 @@ export py_python_dateutil_ver=2.8.2
export met_ver=9.1.3
export metplus_ver=3.1.1
+export py_xarray_ver=2022.3.0
export obsproc_run_ver=1.1.2
export prepobs_run_ver=1.0.1
diff --git a/workflow/applications/gefs.py b/workflow/applications/gefs.py
index 9e8bb5c67e..0be4dc7124 100644
--- a/workflow/applications/gefs.py
+++ b/workflow/applications/gefs.py
@@ -20,7 +20,12 @@ def _get_app_configs(self):
configs += ['efcs']
if self.do_wave:
- configs += ['waveinit']
+ configs += ['waveinit', 'wavepostsbs', 'wavepostpnt']
+ if self.do_wave_bnd:
+ configs += ['wavepostbndpnt', 'wavepostbndpntbll']
+
+ if self.do_ocean or self.do_ice:
+ configs += ['oceanice_products']
return configs
@@ -45,6 +50,18 @@ def get_task_names(self):
if self.nens > 0:
tasks += ['efcs']
- tasks += ['atmprod']
+ tasks += ['atmos_prod']
+
+ if self.do_ocean:
+ tasks += ['ocean_prod']
+
+ if self.do_ice:
+ tasks += ['ice_prod']
+
+ if self.do_wave:
+ tasks += ['wavepostsbs']
+ if self.do_wave_bnd:
+ tasks += ['wavepostbndpnt', 'wavepostbndpntbll']
+ tasks += ['wavepostpnt']
return {f"{self._base['CDUMP']}": tasks}
diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py
index 6dd0342a78..040fc090cb 100644
--- a/workflow/applications/gfs_cycled.py
+++ b/workflow/applications/gfs_cycled.py
@@ -48,8 +48,8 @@ def _get_app_configs(self):
if self.do_vrfy_oceanda:
configs += ['ocnanalvrfy']
- if self.do_ocean:
- configs += ['ocnpost']
+ if self.do_ocean or self.do_ice:
+ configs += ['oceanice_products']
configs += ['sfcanl', 'analcalc', 'fcst', 'upp', 'atmos_products', 'arch', 'cleanup']
@@ -178,7 +178,7 @@ def get_task_names(self):
if self.do_upp:
gdas_tasks += ['atmupp']
- gdas_tasks += ['atmprod']
+ gdas_tasks += ['atmos_prod']
if self.do_wave and 'gdas' in self.wave_cdumps:
if self.do_wave_bnd:
@@ -210,9 +210,15 @@ def get_task_names(self):
gfs_tasks += ['atmanlupp', 'atmanlprod', 'fcst']
+ if self.do_ocean:
+ gfs_tasks += ['ocean_prod']
+
+ if self.do_ice:
+ gfs_tasks += ['ice_prod']
+
if self.do_upp:
gfs_tasks += ['atmupp']
- gfs_tasks += ['atmprod']
+ gfs_tasks += ['atmos_prod']
if self.do_goes:
gfs_tasks += ['goesupp']
diff --git a/workflow/applications/gfs_forecast_only.py b/workflow/applications/gfs_forecast_only.py
index 1145863210..0a9648ee65 100644
--- a/workflow/applications/gfs_forecast_only.py
+++ b/workflow/applications/gfs_forecast_only.py
@@ -49,7 +49,7 @@ def _get_app_configs(self):
configs += ['awips']
if self.do_ocean or self.do_ice:
- configs += ['ocnpost']
+ configs += ['oceanice_products']
if self.do_wave:
configs += ['waveinit', 'waveprep', 'wavepostsbs', 'wavepostpnt']
@@ -100,7 +100,7 @@ def get_task_names(self):
if self.do_upp:
tasks += ['atmupp']
- tasks += ['atmprod']
+ tasks += ['atmos_prod']
if self.do_goes:
tasks += ['goesupp']
@@ -126,8 +126,11 @@ def get_task_names(self):
if self.do_awips:
tasks += ['awips_20km_1p0deg', 'awips_g2', 'fbwind']
- if self.do_ocean or self.do_ice:
- tasks += ['ocnpost']
+ if self.do_ocean:
+ tasks += ['ocean_prod']
+
+ if self.do_ice:
+ tasks += ['ice_prod']
if self.do_wave:
if self.do_wave_bnd:
diff --git a/workflow/rocoto/gefs_tasks.py b/workflow/rocoto/gefs_tasks.py
index a72753eb90..50b24f3578 100644
--- a/workflow/rocoto/gefs_tasks.py
+++ b/workflow/rocoto/gefs_tasks.py
@@ -75,7 +75,7 @@ def stage_ic(self):
def waveinit(self):
resources = self.get_resource('waveinit')
- task_name = f'waveinit'
+ task_name = f'wave_init'
task_dict = {'task_name': task_name,
'resources': resources,
'envars': self.envars,
@@ -90,14 +90,12 @@ def waveinit(self):
return task
def fcst(self):
-
- # TODO: Add real dependencies
dependencies = []
dep_dict = {'type': 'task', 'name': f'stage_ic'}
dependencies.append(rocoto.add_dependency(dep_dict))
if self.app_config.do_wave:
- dep_dict = {'type': 'task', 'name': f'waveinit'}
+ dep_dict = {'type': 'task', 'name': f'wave_init'}
dependencies.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies)
@@ -124,7 +122,7 @@ def efcs(self):
dependencies.append(rocoto.add_dependency(dep_dict))
if self.app_config.do_wave:
- dep_dict = {'type': 'task', 'name': f'waveinit'}
+ dep_dict = {'type': 'task', 'name': f'wave_init'}
dependencies.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies)
@@ -150,7 +148,7 @@ def efcs(self):
'maxtries': '&MAXTRIES;'
}
- member_var_dict = {'member': ' '.join([str(mem).zfill(3) for mem in range(1, self.nmem + 1)])}
+ member_var_dict = {'member': ' '.join([f"{mem:03d}" for mem in range(1, self.nmem + 1)])}
metatask_dict = {'task_name': 'fcst_ens',
'var_dict': member_var_dict,
'task_dict': task_dict
@@ -160,46 +158,231 @@ def efcs(self):
return task
- def atmprod(self):
- atm_master_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_MASTER_TMPL"], {'MEMDIR': 'mem#member#'})
+ def atmos_prod(self):
+ return self._atmosoceaniceprod('atmos')
+
+ def ocean_prod(self):
+ return self._atmosoceaniceprod('ocean')
+
+ def ice_prod(self):
+ return self._atmosoceaniceprod('ice')
+
+ def _atmosoceaniceprod(self, component: str):
+
+ products_dict = {'atmos': {'config': 'atmos_products',
+ 'history_path_tmpl': 'COM_ATMOS_MASTER_TMPL',
+ 'history_file_tmpl': f'{self.cdump}.t@Hz.master.grb2f#fhr#'},
+ 'ocean': {'config': 'oceanice_products',
+ 'history_path_tmpl': 'COM_OCEAN_HISTORY_TMPL',
+ 'history_file_tmpl': f'{self.cdump}.ocean.t@Hz.6hr_avg.f#fhr#.nc'},
+ 'ice': {'config': 'oceanice_products',
+ 'history_path_tmpl': 'COM_ICE_HISTORY_TMPL',
+ 'history_file_tmpl': f'{self.cdump}.ice.t@Hz.6hr_avg.f#fhr#.nc'}}
+
+ component_dict = products_dict[component]
+ config = component_dict['config']
+ history_path_tmpl = component_dict['history_path_tmpl']
+ history_file_tmpl = component_dict['history_file_tmpl']
+
+ resources = self.get_resource(config)
+
+ history_path = self._template_to_rocoto_cycstring(self._base[history_path_tmpl], {'MEMDIR': 'mem#member#'})
deps = []
- data = f'{atm_master_path}/{self.cdump}.t@Hz.master.grb2f#fhr#'
+ data = f'{history_path}/{history_file_tmpl}'
dep_dict = {'type': 'data', 'data': data, 'age': 120}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
- atm_prod_envars = self.envars.copy()
+ postenvars = self.envars.copy()
postenvar_dict = {'ENSMEM': '#member#',
'MEMDIR': 'mem#member#',
'FHRLST': '#fhr#',
+ 'COMPONENT': component}
+ for key, value in postenvar_dict.items():
+ postenvars.append(rocoto.create_envar(name=key, value=str(value)))
+
+ task_name = f'{component}_prod_mem#member#_f#fhr#'
+ task_dict = {'task_name': task_name,
+ 'resources': resources,
+ 'dependency': dependencies,
+ 'envars': postenvars,
+ 'cycledef': 'gefs',
+ 'command': f'{self.HOMEgfs}/jobs/rocoto/{config}.sh',
+ 'job_name': f'{self.pslot}_{task_name}_@H',
+ 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+ 'maxtries': '&MAXTRIES;'}
+
+ fhrs = self._get_forecast_hours('gefs', self._configs[config])
+
+ # ocean/ice components do not have fhr 0 as they are averaged output
+ if component in ['ocean', 'ice']:
+ fhrs.remove(0)
+
+ fhr_var_dict = {'fhr': ' '.join([f"{fhr:03d}" for fhr in fhrs])}
+
+ fhr_metatask_dict = {'task_name': f'{component}_prod_#member#',
+ 'task_dict': task_dict,
+ 'var_dict': fhr_var_dict}
+
+ member_var_dict = {'member': ' '.join([f"{mem:03d}" for mem in range(0, self.nmem + 1)])}
+ member_metatask_dict = {'task_name': f'{component}_prod',
+ 'task_dict': fhr_metatask_dict,
+ 'var_dict': member_var_dict}
+
+ task = rocoto.create_task(member_metatask_dict)
+
+ return task
+
+ def wavepostsbs(self):
+ deps = []
+ for wave_grid in self._configs['wavepostsbs']['waveGRD'].split():
+ wave_hist_path = self._template_to_rocoto_cycstring(self._base["COM_WAVE_HISTORY_TMPL"], {'MEMDIR': 'mem#member#'})
+ data = f'{wave_hist_path}/gefswave.out_grd.{wave_grid}.@Y@m@d.@H0000'
+ dep_dict = {'type': 'data', 'data': data}
+ deps.append(rocoto.add_dependency(dep_dict))
+ dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
+
+ wave_post_envars = self.envars.copy()
+ postenvar_dict = {'ENSMEM': '#member#',
+ 'MEMDIR': 'mem#member#',
}
for key, value in postenvar_dict.items():
- atm_prod_envars.append(rocoto.create_envar(name=key, value=str(value)))
+ wave_post_envars.append(rocoto.create_envar(name=key, value=str(value)))
- resources = self.get_resource('atmos_products')
+ resources = self.get_resource('wavepostsbs')
- task_name = f'atm_prod_mem#member#_f#fhr#'
+ task_name = f'wave_post_grid_mem#member#'
task_dict = {'task_name': task_name,
'resources': resources,
'dependency': dependencies,
- 'envars': atm_prod_envars,
+ 'envars': wave_post_envars,
'cycledef': 'gefs',
- 'command': f'{self.HOMEgfs}/jobs/rocoto/atmos_products.sh',
+ 'command': f'{self.HOMEgfs}/jobs/rocoto/wavepostsbs.sh',
'job_name': f'{self.pslot}_{task_name}_@H',
'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
'maxtries': '&MAXTRIES;'
}
- fhr_var_dict = {'fhr': ' '.join([str(fhr).zfill(3) for fhr in
- self._get_forecast_hours('gefs', self._configs['atmos_products'])])}
- fhr_metatask_dict = {'task_name': 'atm_prod_#member#',
- 'task_dict': task_dict,
- 'var_dict': fhr_var_dict
- }
+ member_var_dict = {'member': ' '.join([str(mem).zfill(3) for mem in range(0, self.nmem + 1)])}
+ member_metatask_dict = {'task_name': 'wave_post_grid',
+ 'task_dict': task_dict,
+ 'var_dict': member_var_dict
+ }
+
+ task = rocoto.create_task(member_metatask_dict)
+
+ return task
+
+ def wavepostbndpnt(self):
+ deps = []
+ dep_dict = {'type': 'task', 'name': f'fcst_mem#member#'}
+ deps.append(rocoto.add_dependency(dep_dict))
+ dependencies = rocoto.create_dependency(dep=deps)
+
+ wave_post_bndpnt_envars = self.envars.copy()
+ postenvar_dict = {'ENSMEM': '#member#',
+ 'MEMDIR': 'mem#member#',
+ }
+ for key, value in postenvar_dict.items():
+ wave_post_bndpnt_envars.append(rocoto.create_envar(name=key, value=str(value)))
+
+ resources = self.get_resource('wavepostbndpnt')
+ task_name = f'wave_post_bndpnt_mem#member#'
+ task_dict = {'task_name': task_name,
+ 'resources': resources,
+ 'dependency': dependencies,
+ 'envars': wave_post_bndpnt_envars,
+ 'cycledef': 'gefs',
+ 'command': f'{self.HOMEgfs}/jobs/rocoto/wavepostbndpnt.sh',
+ 'job_name': f'{self.pslot}_{task_name}_@H',
+ 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+ 'maxtries': '&MAXTRIES;'
+ }
member_var_dict = {'member': ' '.join([str(mem).zfill(3) for mem in range(0, self.nmem + 1)])}
- member_metatask_dict = {'task_name': 'atm_prod',
- 'task_dict': fhr_metatask_dict,
+ member_metatask_dict = {'task_name': 'wave_post_bndpnt',
+ 'task_dict': task_dict,
+ 'var_dict': member_var_dict
+ }
+
+ task = rocoto.create_task(member_metatask_dict)
+
+ return task
+
+ def wavepostbndpntbll(self):
+ deps = []
+ atmos_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"], {'MEMDIR': 'mem#member#'})
+ # Is there any reason this is 180?
+ data = f'{atmos_hist_path}/{self.cdump}.t@Hz.atm.logf180.txt'
+ dep_dict = {'type': 'data', 'data': data}
+ deps.append(rocoto.add_dependency(dep_dict))
+
+ dep_dict = {'type': 'task', 'name': f'fcst_mem#member#'}
+ deps.append(rocoto.add_dependency(dep_dict))
+ dependencies = rocoto.create_dependency(dep_condition='or', dep=deps)
+
+ wave_post_bndpnt_bull_envars = self.envars.copy()
+ postenvar_dict = {'ENSMEM': '#member#',
+ 'MEMDIR': 'mem#member#',
+ }
+ for key, value in postenvar_dict.items():
+ wave_post_bndpnt_bull_envars.append(rocoto.create_envar(name=key, value=str(value)))
+
+ resources = self.get_resource('wavepostbndpntbll')
+ task_name = f'wave_post_bndpnt_bull_mem#member#'
+ task_dict = {'task_name': task_name,
+ 'resources': resources,
+ 'dependency': dependencies,
+ 'envars': wave_post_bndpnt_bull_envars,
+ 'cycledef': 'gefs',
+ 'command': f'{self.HOMEgfs}/jobs/rocoto/wavepostbndpntbll.sh',
+ 'job_name': f'{self.pslot}_{task_name}_@H',
+ 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+ 'maxtries': '&MAXTRIES;'
+ }
+
+ member_var_dict = {'member': ' '.join([str(mem).zfill(3) for mem in range(0, self.nmem + 1)])}
+ member_metatask_dict = {'task_name': 'wave_post_bndpnt_bull',
+ 'task_dict': task_dict,
+ 'var_dict': member_var_dict
+ }
+
+ task = rocoto.create_task(member_metatask_dict)
+
+ return task
+
+ def wavepostpnt(self):
+ deps = []
+ dep_dict = {'type': 'task', 'name': f'fcst_mem#member#'}
+ deps.append(rocoto.add_dependency(dep_dict))
+ if self.app_config.do_wave_bnd:
+ dep_dict = {'type': 'task', 'name': f'wave_post_bndpnt_bull_mem#member#'}
+ deps.append(rocoto.add_dependency(dep_dict))
+ dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
+
+ wave_post_pnt_envars = self.envars.copy()
+ postenvar_dict = {'ENSMEM': '#member#',
+ 'MEMDIR': 'mem#member#',
+ }
+ for key, value in postenvar_dict.items():
+ wave_post_pnt_envars.append(rocoto.create_envar(name=key, value=str(value)))
+
+ resources = self.get_resource('wavepostpnt')
+ task_name = f'wave_post_pnt_mem#member#'
+ task_dict = {'task_name': task_name,
+ 'resources': resources,
+ 'dependency': dependencies,
+ 'envars': wave_post_pnt_envars,
+ 'cycledef': 'gefs',
+ 'command': f'{self.HOMEgfs}/jobs/rocoto/wavepostpnt.sh',
+ 'job_name': f'{self.pslot}_{task_name}_@H',
+ 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+ 'maxtries': '&MAXTRIES;'
+ }
+
+ member_var_dict = {'member': ' '.join([str(mem).zfill(3) for mem in range(0, self.nmem + 1)])}
+ member_metatask_dict = {'task_name': 'wave_post_pnt',
+ 'task_dict': task_dict,
'var_dict': member_var_dict
}
diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py
index 9102c74e35..83623f42d2 100644
--- a/workflow/rocoto/gfs_tasks.py
+++ b/workflow/rocoto/gfs_tasks.py
@@ -99,7 +99,7 @@ def prep(self):
gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False
deps = []
- dep_dict = {'type': 'metatask', 'name': 'gdasatmprod', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
+ dep_dict = {'type': 'metatask', 'name': 'gdasatmos_prod', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
deps.append(rocoto.add_dependency(dep_dict))
data = f'{atm_hist_path}/gdas.t@Hz.atmf009.nc'
dep_dict = {'type': 'data', 'data': data, 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
@@ -583,7 +583,7 @@ def prepoceanobs(self):
ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"], {'RUN': 'gdas'})
deps = []
- data = f'{ocean_hist_path}/gdas.t@Hz.ocnf009.nc'
+ data = f'{ocean_hist_path}/gdas.ocean.t@Hz.inst.f009.nc'
dep_dict = {'type': 'data', 'data': data, 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -927,9 +927,21 @@ def atmanlprod(self):
return task
@staticmethod
- def _get_ufs_postproc_grps(cdump, config):
+ def _get_ufs_postproc_grps(cdump, config, component='atmos'):
- fhrs = Tasks._get_forecast_hours(cdump, config)
+ # Make a local copy of the config to avoid modifying the original
+ local_config = config.copy()
+
+ # Ocean/Ice components do not have a HF output option like the atmosphere
+ if component in ['ocean', 'ice']:
+ local_config['FHMAX_HF_GFS'] = config['FHMAX_GFS']
+ local_config['FHOUT_HF_GFS'] = config['FHOUT_GFS']
+
+ fhrs = Tasks._get_forecast_hours(cdump, local_config)
+
+ # ocean/ice components do not have fhr 0 as they are averaged output
+ if component in ['ocean', 'ice']:
+ fhrs.remove(0)
nfhrs_per_grp = config.get('NFHRS_PER_GROUP', 1)
ngrps = len(fhrs) // nfhrs_per_grp if len(fhrs) % nfhrs_per_grp == 0 else len(fhrs) // nfhrs_per_grp + 1
@@ -1002,83 +1014,63 @@ def _upptask(self, upp_run="forecast", task_id="atmupp"):
return task
- def atmprod(self):
+ def atmos_prod(self):
+ return self._atmosoceaniceprod('atmos')
- varname1, varname2, varname3 = 'grp', 'dep', 'lst'
- varval1, varval2, varval3 = self._get_ufs_postproc_grps(self.cdump, self._configs['atmos_products'])
- var_dict = {varname1: varval1, varname2: varval2, varname3: varval3}
+ def ocean_prod(self):
+ return self._atmosoceaniceprod('ocean')
- postenvars = self.envars.copy()
- postenvar_dict = {'FHRLST': '#lst#'}
- for key, value in postenvar_dict.items():
- postenvars.append(rocoto.create_envar(name=key, value=str(value)))
+ def ice_prod(self):
+ return self._atmosoceaniceprod('ice')
- atm_master_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_MASTER_TMPL"])
- deps = []
- data = f'{atm_master_path}/{self.cdump}.t@Hz.master.grb2#dep#'
- dep_dict = {'type': 'data', 'data': data, 'age': 120}
- deps.append(rocoto.add_dependency(dep_dict))
- dependencies = rocoto.create_dependency(dep=deps)
- cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump
- resources = self.get_resource('atmos_products')
+ def _atmosoceaniceprod(self, component: str):
- task_name = f'{self.cdump}atmprod#{varname1}#'
- task_dict = {'task_name': task_name,
- 'resources': resources,
- 'dependency': dependencies,
- 'envars': postenvars,
- 'cycledef': cycledef,
- 'command': f'{self.HOMEgfs}/jobs/rocoto/atmos_products.sh',
- 'job_name': f'{self.pslot}_{task_name}_@H',
- 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
- 'maxtries': '&MAXTRIES;'
- }
+ products_dict = {'atmos': {'config': 'atmos_products',
+ 'history_path_tmpl': 'COM_ATMOS_MASTER_TMPL',
+ 'history_file_tmpl': f'{self.cdump}.t@Hz.master.grb2#dep#'},
+ 'ocean': {'config': 'oceanice_products',
+ 'history_path_tmpl': 'COM_OCEAN_HISTORY_TMPL',
+ 'history_file_tmpl': f'{self.cdump}.ocean.t@Hz.6hr_avg.#dep#.nc'},
+ 'ice': {'config': 'oceanice_products',
+ 'history_path_tmpl': 'COM_ICE_HISTORY_TMPL',
+ 'history_file_tmpl': f'{self.cdump}.ice.t@Hz.6hr_avg.#dep#.nc'}}
- metatask_dict = {'task_name': f'{self.cdump}atmprod',
- 'task_dict': task_dict,
- 'var_dict': var_dict
- }
-
- task = rocoto.create_task(metatask_dict)
-
- return task
-
- def ocnpost(self):
+ component_dict = products_dict[component]
+ config = component_dict['config']
+ history_path_tmpl = component_dict['history_path_tmpl']
+ history_file_tmpl = component_dict['history_file_tmpl']
varname1, varname2, varname3 = 'grp', 'dep', 'lst'
- varval1, varval2, varval3 = self._get_ufs_postproc_grps(self.cdump, self._configs['ocnpost'])
+ varval1, varval2, varval3 = self._get_ufs_postproc_grps(self.cdump, self._configs[config], component=component)
var_dict = {varname1: varval1, varname2: varval2, varname3: varval3}
postenvars = self.envars.copy()
- postenvar_dict = {'FHRLST': '#lst#',
- 'ROTDIR': self.rotdir}
+ postenvar_dict = {'FHRLST': '#lst#', 'COMPONENT': component}
for key, value in postenvar_dict.items():
postenvars.append(rocoto.create_envar(name=key, value=str(value)))
+ history_path = self._template_to_rocoto_cycstring(self._base[history_path_tmpl])
deps = []
- atm_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"])
- data = f'{atm_hist_path}/{self.cdump}.t@Hz.atm.log#dep#.txt'
- dep_dict = {'type': 'data', 'data': data}
- deps.append(rocoto.add_dependency(dep_dict))
- dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'}
+ data = f'{history_path}/{history_file_tmpl}'
+ dep_dict = {'type': 'data', 'data': data, 'age': 120}
deps.append(rocoto.add_dependency(dep_dict))
- dependencies = rocoto.create_dependency(dep_condition='or', dep=deps)
+ dependencies = rocoto.create_dependency(dep=deps)
cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump
- resources = self.get_resource('ocnpost')
+ resources = self.get_resource(component_dict['config'])
- task_name = f'{self.cdump}ocnpost#{varname1}#'
+ task_name = f'{self.cdump}{component}_prod#{varname1}#'
task_dict = {'task_name': task_name,
'resources': resources,
'dependency': dependencies,
'envars': postenvars,
'cycledef': cycledef,
- 'command': f'{self.HOMEgfs}/jobs/rocoto/ocnpost.sh',
+ 'command': f"{self.HOMEgfs}/jobs/rocoto/{config}.sh",
'job_name': f'{self.pslot}_{task_name}_@H',
'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
'maxtries': '&MAXTRIES;'
}
- metatask_dict = {'task_name': f'{self.cdump}ocnpost',
+ metatask_dict = {'task_name': f'{self.cdump}{component}_prod',
'task_dict': task_dict,
'var_dict': var_dict
}
@@ -1357,7 +1349,7 @@ def _get_awipsgroups(cdump, config):
def awips_20km_1p0deg(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1398,7 +1390,7 @@ def awips_20km_1p0deg(self):
def awips_g2(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1439,7 +1431,7 @@ def awips_g2(self):
def gempak(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1462,7 +1454,7 @@ def gempak(self):
def gempakmeta(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1485,7 +1477,7 @@ def gempakmeta(self):
def gempakmetancdc(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1508,7 +1500,7 @@ def gempakmetancdc(self):
def gempakncdcupapgif(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1647,7 +1639,7 @@ def vminmon(self):
def tracker(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1670,7 +1662,7 @@ def tracker(self):
def genesis(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1693,7 +1685,7 @@ def genesis(self):
def genesis_fsu(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1716,7 +1708,7 @@ def genesis_fsu(self):
def fit2obs(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1781,7 +1773,7 @@ def metp(self):
def mos_stn_prep(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1804,7 +1796,7 @@ def mos_stn_prep(self):
def mos_grd_prep(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1827,7 +1819,7 @@ def mos_grd_prep(self):
def mos_ext_stn_prep(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1850,7 +1842,7 @@ def mos_ext_stn_prep(self):
def mos_ext_grd_prep(self):
deps = []
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -2168,7 +2160,7 @@ def arch(self):
dep_dict = {'type': 'task', 'name': f'{self.cdump}genesis_fsu'}
deps.append(rocoto.add_dependency(dep_dict))
# Post job dependencies
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'}
deps.append(rocoto.add_dependency(dep_dict))
if self.app_config.do_wave:
dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostsbs'}
@@ -2179,8 +2171,12 @@ def arch(self):
dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostbndpnt'}
deps.append(rocoto.add_dependency(dep_dict))
if self.app_config.do_ocean:
- if self.app_config.mode in ['forecast-only']: # TODO: fix ocnpost to run in cycled mode
- dep_dict = {'type': 'metatask', 'name': f'{self.cdump}ocnpost'}
+ if self.cdump in ['gfs']:
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}ocean_prod'}
+ deps.append(rocoto.add_dependency(dep_dict))
+ if self.app_config.do_ice:
+ if self.cdump in ['gfs']:
+ dep_dict = {'type': 'metatask', 'name': f'{self.cdump}ice_prod'}
deps.append(rocoto.add_dependency(dep_dict))
# MOS job dependencies
if self.cdump in ['gfs'] and self.app_config.do_mos:
diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py
index 110dc286b5..540f6ebe47 100644
--- a/workflow/rocoto/tasks.py
+++ b/workflow/rocoto/tasks.py
@@ -22,8 +22,8 @@ class Tasks:
'aeroanlinit', 'aeroanlrun', 'aeroanlfinal',
'preplandobs', 'landanl',
'fcst',
- 'atmanlupp', 'atmanlprod', 'atmupp', 'atmprod', 'goesupp',
- 'ocnpost',
+ 'atmanlupp', 'atmanlprod', 'atmupp', 'goesupp',
+ 'atmosprod', 'oceanprod', 'iceprod',
'verfozn', 'verfrad', 'vminmon',
'metp',
'tracker', 'genesis', 'genesis_fsu',
@@ -128,7 +128,7 @@ def _get_forecast_hours(cdump, config) -> List[str]:
# Get a list of all forecast hours
fhrs = []
if cdump in ['gdas']:
- fhrs = range(fhmin, fhmax + fhout, fhout)
+ fhrs = list(range(fhmin, fhmax + fhout, fhout))
elif cdump in ['gfs', 'gefs']:
fhmax = config['FHMAX_GFS']
fhout = config['FHOUT_GFS']