From a7391d70716c5a543ec57cf74f2c5cf133c15330 Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Thu, 28 Jan 2021 19:04:52 +0000 Subject: [PATCH] Update free-forecast mode chgres jobs for chgres_cube Update free-forecast mode to interface with UFS_UTILS gdas_init utility scripts. Update getic job to use gdas_init get scripts to pull ICs off HPSS for GFS versions 13 and later. Rename fv3ic job to "init" and update it to interface with gdas_init run scripts to run chgres_cube and produce GFSv16 ICs. Update job dependencies to detect need to run chgres jobs and hold forecast jobs until ICs are generated or present. Further updates coming for this task. Tested on WCOSS-Dell, need to test elsewhere still. Will disable getic job on Orion. Refs: #1, #178 --- env/HERA.env | 6 +- env/JET.env | 6 +- env/ORION.env | 6 +- env/WCOSS_C.env | 6 +- env/WCOSS_DELL_P3.env | 6 +- jobs/rocoto/fv3ic.sh | 69 --------- jobs/rocoto/getic.sh | 212 +++++++------------------- jobs/rocoto/init.sh | 81 ++++++++++ parm/config/config.getic | 10 +- parm/config/config.init | 19 +++ parm/config/config.resources | 7 + sorc/build_ufs_utils.sh | 4 +- sorc/link_fv3gfs.sh | 2 +- ush/rocoto/setup_workflow_fcstonly.py | 59 ++----- 14 files changed, 193 insertions(+), 300 deletions(-) delete mode 100755 jobs/rocoto/fv3ic.sh create mode 100755 jobs/rocoto/init.sh create mode 100755 parm/config/config.init diff --git a/env/HERA.env b/env/HERA.env index f71531c1ed..4290bdb259 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -214,11 +214,9 @@ elif [ $step = "epos" ]; then [[ $NTHREADS_EPOS -gt $nth_max ]] && export NTHREADS_EPOS=$nth_max export APRUN_EPOS="$launcher" -elif [ $step = "fv3ic" ]; then +elif [ $step = "init" ]; then - export NTHREADS_CHGRES=${nth_chgres:-$npe_node_max} - [[ $NTHREADS_CHGRES -gt $npe_node_max ]] && export NTHREADS_CHGRES=$npe_node_max - export APRUN_CHGRES="time" + export APRUN="$launcher" elif [ $step = "postsnd" ]; then diff --git a/env/JET.env b/env/JET.env index ef37e468df..5bbb903bd1 100755 --- a/env/JET.env +++ b/env/JET.env @@ -179,11 +179,9 @@ elif [ $step = "epos" ]; then [[ $NTHREADS_EPOS -gt $nth_max ]] && export NTHREADS_EPOS=$nth_max export APRUN_EPOS="$launcher ${npe_epos:-$PBS_NP}" -elif [ $step = "fv3ic" ]; then +elif [ $step = "init" ]; then - export NTHREADS_CHGRES=${nth_chgres:-$npe_node_max} - [[ $NTHREADS_CHGRES -gt $npe_node_max ]] && export NTHREADS_CHGRES=$npe_node_max - export APRUN_CHGRES="time" + export APRUN="$launcher" elif [ $step = "postsnd" ]; then diff --git a/env/ORION.env b/env/ORION.env index de989331ff..69721c21ed 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -202,11 +202,9 @@ elif [ $step = "epos" ]; then [[ $NTHREADS_EPOS -gt $nth_max ]] && export NTHREADS_EPOS=$nth_max export APRUN_EPOS="$launcher" -elif [ $step = "fv3ic" ]; then +elif [ $step = "init" ]; then - export NTHREADS_CHGRES=${nth_chgres:-$npe_node_max} - [[ $NTHREADS_CHGRES -gt $npe_node_max ]] && export NTHREADS_CHGRES=$npe_node_max - export APRUN_CHGRES="time" + export APRUN="$launcher" elif [ $step = "postsnd" ]; then diff --git a/env/WCOSS_C.env b/env/WCOSS_C.env index 4885a966a0..bd88a44f31 100755 --- a/env/WCOSS_C.env +++ b/env/WCOSS_C.env @@ -198,11 +198,9 @@ elif [ $step = "epos" ]; then [[ $NTHREADS_EPOS -gt $nth_max ]] && export NTHREADS_EPOS=$nth_max export APRUN_EPOS="$launcher -j 1 -n $npe_epos -N $npe_node_epos -d $NTHREADS_EPOS -cc depth" -elif [ $step = "fv3ic" ]; then +elif [ $step = "init" ]; then - export NTHREADS_CHGRES=${nth_chgres:-$npe_node_max} - [[ $NTHREADS_CHGRES -gt $npe_node_max ]] && export NTHREADS_CHGRES=$npe_node_max - export APRUN_CHGRES="$launcher -j 1 -n 1 -N 1 -d $NTHREADS_CHGRES -cc depth" + export APRUN="$launcher" elif [ $step = "vrfy" ]; then diff --git a/env/WCOSS_DELL_P3.env b/env/WCOSS_DELL_P3.env index a028826b93..e2a6989f1d 100755 --- a/env/WCOSS_DELL_P3.env +++ b/env/WCOSS_DELL_P3.env @@ -204,11 +204,9 @@ elif [ $step = "epos" ]; then [[ $NTHREADS_EPOS -gt $nth_max ]] && export NTHREADS_EPOS=$nth_max export APRUN_EPOS="$launcher ${npe_epos:-$PBS_NP}" -elif [ $step = "fv3ic" ]; then +elif [ $step = "init" ]; then - export NTHREADS_CHGRES=${nth_chgres:-$npe_node_max} - [[ $NTHREADS_CHGRES -gt $npe_node_max ]] && export NTHREADS_CHGRES=$npe_node_max - export APRUN_CHGRES="time" + export APRUN="mpirun" elif [ $step = "postsnd" ]; then diff --git a/jobs/rocoto/fv3ic.sh b/jobs/rocoto/fv3ic.sh deleted file mode 100755 index b5c2e40b48..0000000000 --- a/jobs/rocoto/fv3ic.sh +++ /dev/null @@ -1,69 +0,0 @@ -#!/bin/ksh -x - -############################################################### -## Abstract: -## Create FV3 initial conditions from GFS intitial conditions -## RUN_ENVIR : runtime environment (emc | nco) -## HOMEgfs : /full/path/to/workflow -## EXPDIR : /full/path/to/config/files -## CDATE : current date (YYYYMMDDHH) -## CDUMP : cycle name (gdas / gfs) -## PDY : current date (YYYYMMDD) -## cyc : current cycle (HH) -############################################################### - -############################################################### -# Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh -status=$? -[[ $status -ne 0 ]] && exit $status - -############################################################### -# Source relevant configs -configs="base fv3ic" -for config in $configs; do - . $EXPDIR/config.${config} - status=$? - [[ $status -ne 0 ]] && exit $status -done - -############################################################### -# Source machine runtime environment -. $BASE_ENV/${machine}.env fv3ic -status=$? -[[ $status -ne 0 ]] && exit $status - -# Set component -export COMPONENT=${COMPONENT:-atmos} - -# Temporary runtime directory -export DATA="$RUNDIR/$CDATE/$CDUMP/fv3ic$$" -[[ -d $DATA ]] && rm -rf $DATA - -# Input GFS initial condition directory -export INIDIR="$ICSDIR/$CDATE/$CDUMP/$CDUMP.$PDY/$cyc" - -# Output FV3 initial condition directory -export OUTDIR="$ICSDIR/$CDATE/$CDUMP/$CASE/INPUT" - -export OMP_NUM_THREADS_CH=$NTHREADS_CHGRES -export APRUNC=$APRUN_CHGRES - -# Call global_chgres_driver.sh -$HOMEgfs/ush/global_chgres_driver.sh -status=$? -if [ $status -ne 0 ]; then - echo "global_chgres_driver.sh returned with a non-zero exit code, ABORT!" - exit $status -fi - -# Stage the FV3 initial conditions to ROTDIR -COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" -[[ ! -d $COMOUT ]] && mkdir -p $COMOUT -cd $COMOUT || exit 99 -rm -rf INPUT -$NLN $OUTDIR . - -############################################################### -# Exit cleanly -exit 0 diff --git a/jobs/rocoto/getic.sh b/jobs/rocoto/getic.sh index 120e3b9454..fd07a9ead4 100755 --- a/jobs/rocoto/getic.sh +++ b/jobs/rocoto/getic.sh @@ -36,166 +36,68 @@ status=$? ############################################################### # Set script and dependency variables -yyyy=$(echo $CDATE | cut -c1-4) +yy=$(echo $CDATE | cut -c1-4) mm=$(echo $CDATE | cut -c5-6) dd=$(echo $CDATE | cut -c7-8) -cyc=${cyc:-$(echo $CDATE | cut -c9-10)} - -export COMPONENT=${COMPONENT:-atmos} - -############################################################### - -target_dir=$ICSDIR/$CDATE/$CDUMP -mkdir -p $target_dir -cd $target_dir - -# Initialize return code to 0 -rc=1 - -if [ $ics_from = "opsgfs" ]; then - - # Location of production tarballs on HPSS - hpssdir="/NCEPPROD/hpssprod/runhistory/rh$yyyy/$yyyy$mm/$PDY" - - # Handle nemsio and pre-nemsio GFS filenames - if [ $CDATE -le "2019061118" ]; then #GFSv14 - # Add CDUMP.PDY/CYC to target_dir - target_dir=$ICSDIR/$CDATE/$CDUMP/${CDUMP}.$yyyy$mm$dd/$cyc - mkdir -p $target_dir - cd $target_dir - - nfanal=4 - fanal[1]="./${CDUMP}.t${cyc}z.atmanl.nemsio" - fanal[2]="./${CDUMP}.t${cyc}z.sfcanl.nemsio" - fanal[3]="./${CDUMP}.t${cyc}z.nstanl.nemsio" - fanal[4]="./${CDUMP}.t${cyc}z.pgrbanl" - flanal="${fanal[1]} ${fanal[2]} ${fanal[3]} ${fanal[4]}" - tarpref="gpfs_hps_nco_ops_com" - if [ $CDUMP = "gdas" ]; then - tarball="$hpssdir/${tarpref}_gfs_prod_${CDUMP}.${CDATE}.tar" - elif [ $CDUMP = "gfs" ]; then - tarball="$hpssdir/${tarpref}_gfs_prod_${CDUMP}.${CDATE}.anl.tar" - fi - else #GFSv15 - nfanal=2 - fanal[1]="./${CDUMP}.$yyyy$mm$dd/$cyc/${CDUMP}.t${cyc}z.atmanl.nemsio" - fanal[2]="./${CDUMP}.$yyyy$mm$dd/$cyc/${CDUMP}.t${cyc}z.sfcanl.nemsio" - flanal="${fanal[1]} ${fanal[2]}" - if [ $CDATE -ge "2020022600" ]; then - tarpref="com" - else - tarpref="gpfs_dell1_nco_ops_com" - fi - if [ $CDUMP = "gdas" ]; then - tarball="$hpssdir/${tarpref}_gfs_prod_${CDUMP}.${yyyy}${mm}${dd}_${cyc}.${CDUMP}_nemsio.tar" - elif [ $CDUMP = "gfs" ]; then - tarball="$hpssdir/${tarpref}_gfs_prod_${CDUMP}.${yyyy}${mm}${dd}_${cyc}.${CDUMP}_nemsioa.tar" - fi - fi - - # First check the COMROOT for files, if present copy over - if [ $machine = "WCOSS_C" ]; then - - # Need COMROOT - module load prod_envir/1.1.0 >> /dev/null 2>&1 - - comdir="$COMROOT/$CDUMP/prod/$CDUMP.$PDY" - rc=0 - for i in `seq 1 $nfanal`; do - if [ -f $comdir/${fanal[i]} ]; then - $NCP $comdir/${fanal[i]} ${fanal[i]} - else - rb=1 ; ((rc+=rb)) - fi - done - - fi - - # Get initial conditions from HPSS - if [ $rc -ne 0 ]; then - - # check if the tarball exists - hsi ls -l $tarball - rc=$? - if [ $rc -ne 0 ]; then - echo "$tarball does not exist and should, ABORT!" - exit $rc - fi - # get the tarball - htar -xvf $tarball $flanal - rc=$? - if [ $rc -ne 0 ]; then - echo "untarring $tarball failed, ABORT!" - exit $rc - fi - - # Move the files to legacy EMC filenames - if [ $CDATE -le "2019061118" ]; then #GFSv14 - for i in `seq 1 $nfanal`; do - $NMV ${fanal[i]} ${flanal[i]} - done - fi - - fi - - # If found, exit out - if [ $rc -ne 0 ]; then - echo "Unable to obtain operational GFS initial conditions, ABORT!" - exit 1 - fi - -elif [ $ics_from = "pargfs" ]; then - - # Add CDUMP.PDY/CYC to target_dir - target_dir=$ICSDIR/$CDATE/$CDUMP/${CDUMP}.$yyyy$mm$dd/$cyc - mkdir -p $target_dir - cd $target_dir - - # Filenames in parallel - nfanal=4 - fanal[1]="gfnanl.${CDUMP}.$CDATE" - fanal[2]="sfnanl.${CDUMP}.$CDATE" - fanal[3]="nsnanl.${CDUMP}.$CDATE" - fanal[4]="pgbanl.${CDUMP}.$CDATE" - flanal="${fanal[1]} ${fanal[2]} ${fanal[3]} ${fanal[4]}" - - # Get initial conditions from HPSS from retrospective parallel - tarball="$HPSS_PAR_PATH/${CDATE}${CDUMP}.tar" - - # check if the tarball exists - hsi ls -l $tarball - rc=$? - if [ $rc -ne 0 ]; then - echo "$tarball does not exist and should, ABORT!" - exit $rc - fi - # get the tarball - htar -xvf $tarball $flanal - rc=$? - if [ $rc -ne 0 ]; then - echo "untarring $tarball failed, ABORT!" - exit $rc - fi - - # If found, exit out - if [ $rc -ne 0 ]; then - echo "Unable to obtain parallel GFS initial conditions, ABORT!" - exit 1 - fi - -else +hh=${cyc:-$(echo $CDATE | cut -c9-10)} + +EXTRACT_DIR=${PTMP}/gdas.init_${CDATE}/input +OUTDIR=${PTMP}/gdas.init_${CDATE}/output +PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory/rh${yy}/${yy}${mm}/${yy}${mm}${dd} + +COMPONENT="atmos" + +gfs_ver=v16 +GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +# No ENKF data prior to 2012/05/21/00z +if [ $yy$mm$dd$hh -lt 2012052100 ]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + exit 2 +elif [ $yy$mm$dd$hh -lt 2016051000 ]; then + gfs_ver=v12 + GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh +elif [ $yy$mm$dd$hh -lt 2017072000 ]; then + gfs_ver=v13 + GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh +elif [ $yy$mm$dd$hh -lt 2019061200 ]; then + gfs_ver=v14 + GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh +elif [ $yy$mm$dd$hh -lt 2021020300 ]; then + gfs_ver=v15 + GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh +fi - echo "ics_from = $ics_from is not supported, ABORT!" - exit 1 +export EXTRACT_DIR yy mm dd hh UFS_DIR OUTDIR CRES_HIRES CRES_ENKF +export LEVS gfs_ver -fi -############################################################### +# Run get data script +if [ ! -d $EXTRACT_DIR ]; then mkdir -p $EXTRACT_DIR ; fi +sh ${GETICSH} ${CDUMP} +status=$? +[[ $status -ne 0 ]] && exit $status -# Copy pgbanl file to COMROT for verification - GFSv14 only -if [ $CDATE -le "2019061118" ]; then #GFSv14 - COMROT=$ROTDIR/${CDUMP}.$PDY/$cyc/$COMPONENT - [[ ! -d $COMROT ]] && mkdir -p $COMROT - $NCP ${fanal[4]} $COMROT/${CDUMP}.t${cyc}z.pgrbanl +# Copy pgbanl file to ROTDIR for verification/archival - v14+ +cd $EXTRACT_DIR +OUTDIR2=${ROTDIR}/gfs.${yy}${mm}${dd}/${hh}/${COMPONENT} +if [ ! -d ${OUTDIR2} ]; then mkdir -p ${OUTDIR2} ; fi +if [ $gfs_ver = v14 ]; then + for grid in 0p25 0p50 1p00 + do + tarball=gpfs_hps_nco_ops_com_gfs_prod_gfs.${yy}${mm}${dd}_${hh}.pgrb2_${grid}.tar + file=gfs.t${hh}z.pgrb2.${grid}.anl + htar -xvf ${PRODHPSSDIR}/${tarball} ./gfs.${yy}${mm}${dd}/${hh}/${file} + mv ${EXTRACT_DIR}/gfs.${yy}${mm}${dd}/${hh}/${file} ${OUTDIR2}/${file} + done +elif [ $gfs_ver = v15 ]; then + tarball=com_gfs_prod_gfs.${yy}${mm}${dd}_${hh}.gfs_pgrb2.tar + for grid in 0p25 0p50 1p00 + do + file=gfs.t${hh}z.pgrb2.${grid}.anl + htar -xvf ${PRODHPSSDIR}/${tarball} ./gfs.${yy}${mm}${dd}/${hh}/${file} + mv ${EXTRACT_DIR}/gfs.${yy}${mm}${dd}/${hh}/${file} ${OUTDIR2}/${file} + done fi ############################################################### diff --git a/jobs/rocoto/init.sh b/jobs/rocoto/init.sh new file mode 100755 index 0000000000..c36addb2ed --- /dev/null +++ b/jobs/rocoto/init.sh @@ -0,0 +1,81 @@ +#!/bin/ksh -x + +############################################################### +## Abstract: +## Get GFS intitial conditions +## RUN_ENVIR : runtime environment (emc | nco) +## HOMEgfs : /full/path/to/workflow +## EXPDIR : /full/path/to/config/files +## CDATE : current date (YYYYMMDDHH) +## CDUMP : cycle name (gdas / gfs) +## PDY : current date (YYYYMMDD) +## cyc : current cycle (HH) +############################################################### + +############################################################### +# Source FV3GFS workflow modules +. $HOMEgfs/ush/load_fv3gfs_modules.sh +status=$? +[[ $status -ne 0 ]] && exit $status + +############################################################### +# Source relevant configs +configs="base init" +for config in $configs; do + . $EXPDIR/config.${config} + status=$? + [[ $status -ne 0 ]] && exit $status +done + +############################################################### +# Source machine runtime environment +. $BASE_ENV/${machine}.env init +status=$? +[[ $status -ne 0 ]] && exit $status + +############################################################### +# Set script and dependency variables + +yy=$(echo $CDATE | cut -c1-4) +mm=$(echo $CDATE | cut -c5-6) +dd=$(echo $CDATE | cut -c7-8) +hh=${cyc:-$(echo $CDATE | cut -c9-10)} + +EXTRACT_DIR=${PTMP}/gdas.init_${CDATE}/input +WORKDIR=${PTMP}/gdas.init_${CDATE}/output +OUTDIR=${ROTDIR} + +gfs_ver=v16 +RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh + +# No ENKF data prior to 2012/05/21/00z +if [ $yy$mm$dd$hh -lt 2012052100 ]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + exit 2 +elif [ $yy$mm$dd$hh -lt 2016051000 ]; then + gfs_ver=v12 + RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh +elif [ $yy$mm$dd$hh -lt 2017072000 ]; then + gfs_ver=v13 + RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh +elif [ $yy$mm$dd$hh -lt 2019061200 ]; then + gfs_ver=v14 + RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh +elif [ $yy$mm$dd$hh -lt 2021020300 ]; then + gfs_ver=v15 + RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh +fi + +export EXTRACT_DIR yy mm dd hh UFS_DIR OUTDIR CRES_HIRES CRES_ENKF +export LEVS gfs_ver + +# Run chgres_cube +if [ ! -d $OUTDIR ]; then mkdir -p $OUTDIR ; fi +sh ${RUNICSH} ${CDUMP} +status=$? +[[ $status -ne 0 ]] && exit $status + +############################################################### +# Exit out cleanly +exit 0 diff --git a/parm/config/config.getic b/parm/config/config.getic index a754454264..64f7159cc9 100755 --- a/parm/config/config.getic +++ b/parm/config/config.getic @@ -8,13 +8,9 @@ echo "BEGIN: config.getic" # Get task specific resources . $EXPDIR/config.resources getic -# We should just be supporting the OPSGFS only -export ics_from="opsgfs" # initial conditions from opsgfs or pargfs +UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +GDASINIT_DIR=${UFS_DIR}/util/gdas_init -# Provide a parallel experiment name and path to HPSS archive -if [ $ics_from = "pargfs" ]; then - export parexp="prnemsrn" - export HPSS_PAR_PATH="/5year/NCEPDEV/emc-global/emc.glopara/WCOSS_C/$parexp" -fi +EXTRACT_DATA="yes" echo "END: config.getic" diff --git a/parm/config/config.init b/parm/config/config.init new file mode 100755 index 0000000000..f9efbde888 --- /dev/null +++ b/parm/config/config.init @@ -0,0 +1,19 @@ +#!/bin/ksh -x + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +RUN_CHGRES="yes" + +CRES_HIRES=$CASE +CRES_ENKF=$CASE_ENKF + +echo "END: config.init" diff --git a/parm/config/config.resources b/parm/config/config.resources index 3d996d96b6..d62424aa32 100755 --- a/parm/config/config.resources +++ b/parm/config/config.resources @@ -262,6 +262,13 @@ elif [ $step = "echgres" ]; then export nth_echgres=$npe_node_max export npe_node_echgres=1 +elif [ $step = "init" ]; then + + export wtime_init="00:15:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then eval "export wtime_$step='06:00:00'" diff --git a/sorc/build_ufs_utils.sh b/sorc/build_ufs_utils.sh index 292145d764..0a22135a15 100755 --- a/sorc/build_ufs_utils.sh +++ b/sorc/build_ufs_utils.sh @@ -7,9 +7,9 @@ cwd=`pwd` if [ $target = wcoss_dell_p3 ]; then target=dell; fi if [ $target = wcoss_cray ]; then target=cray; fi -cd ufs_utils.fd/sorc +cd ufs_utils.fd -./build_all_ufs_utils.sh +./build_all.sh exit diff --git a/sorc/link_fv3gfs.sh b/sorc/link_fv3gfs.sh index e5f0f08ad4..403c7364ca 100755 --- a/sorc/link_fv3gfs.sh +++ b/sorc/link_fv3gfs.sh @@ -49,7 +49,7 @@ for dir in fix_am fix_fv3_gmted2010 fix_gldas fix_orog fix_verif fix_wave_gfs ; done if [ -d ${pwd}/ufs_utils.fd ]; then - cd ${pwd}/ufs_utils.fd/sorc + cd ${pwd}/ufs_utils.fd/fix ./link_fixdirs.sh $RUN_ENVIR $machine fi diff --git a/ush/rocoto/setup_workflow_fcstonly.py b/ush/rocoto/setup_workflow_fcstonly.py index c7021f904e..795602204e 100755 --- a/ush/rocoto/setup_workflow_fcstonly.py +++ b/ush/rocoto/setup_workflow_fcstonly.py @@ -27,8 +27,7 @@ import rocoto import workflow_utils as wfu -#taskplan = ['getic', 'fv3ic', 'waveinit', 'waveprep', 'fcst', 'post', 'wavepostsbs', 'wavegempak', 'waveawipsbulls', 'waveawipsgridded', 'wavepost', 'wavestat', 'wafs', 'wafsgrib2', 'wafsblending', 'wafsgcip', 'wafsgrib20p25', 'wafsblending0p25', 'vrfy', 'metp', 'arch'] -taskplan = ['getic', 'fv3ic', 'waveinit', 'waveprep', 'fcst', 'post', 'wavepostsbs', 'wavepostbndpnt', 'wavepostpnt', 'wavegempak', 'waveawipsbulls', 'waveawipsgridded', 'wafs', 'wafsgrib2', 'wafsblending', 'wafsgcip', 'wafsgrib20p25', 'wafsblending0p25', 'vrfy', 'metp', 'arch'] +taskplan = ['getic', 'init', 'waveinit', 'waveprep', 'fcst', 'post', 'wavepostsbs', 'wavepostbndpnt', 'wavepostpnt', 'wavegempak', 'waveawipsbulls', 'waveawipsgridded', 'wafs', 'wafsgrib2', 'wafsblending', 'wafsgcip', 'wafsgrib20p25', 'wafsblending0p25', 'vrfy', 'metp', 'arch'] def main(): parser = ArgumentParser(description='Setup XML workflow and CRONTAB for a forecast only experiment.', formatter_class=ArgumentDefaultsHelpFormatter) @@ -244,73 +243,41 @@ def get_workflow(dict_configs, cdump='gdas'): tasks = [] - # getics + # getic deps = [] - data = '&ICSDIR;/@Y@m@d@H/&CDUMP;/&CDUMP;.@Y@m@d/@H/siganl.&CDUMP;.@Y@m@d@H' + data = '&ROTDIR;/&CDUMP;.@Y@m@d/@H/INPUT/sfc_data.tile6.nc' dep_dict = {'type':'data', 'data':data} deps.append(rocoto.add_dependency(dep_dict)) - data = '&ICSDIR;/@Y@m@d@H/&CDUMP;/&CDUMP;.@Y@m@d/@H/&CDUMP;.t@Hz.sanl' - dep_dict = {'type':'data', 'data':data} - deps.append(rocoto.add_dependency(dep_dict)) - data = '&ICSDIR;/@Y@m@d@H/&CDUMP;/&CDUMP;.@Y@m@d/@H/gfnanl.&CDUMP;.@Y@m@d@H' - dep_dict = {'type':'data', 'data':data} - deps.append(rocoto.add_dependency(dep_dict)) - data = '&ICSDIR;/@Y@m@d@H/&CDUMP;/&CDUMP;.@Y@m@d/@H/&CDUMP;.t@Hz.atmanl.nemsio' - dep_dict = {'type':'data', 'data':data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) - - deps = [] - data = '&ICSDIR;/@Y@m@d@H/&CDUMP;/&CASE;/INPUT/gfs_data.tile6.nc' - dep_dict = {'type':'data', 'data':data} - deps.append(rocoto.add_dependency(dep_dict)) - data = '&ICSDIR;/@Y@m@d@H/&CDUMP;/&CASE;/INPUT/sfc_data.tile6.nc' + data = '&ROTDIR;/&CDUMP;.@Y@m@d/@H/RESTART/@Y@m@d.@H0000.sfcanl_data.tile6.nc' dep_dict = {'type':'data', 'data':data} deps.append(rocoto.add_dependency(dep_dict)) - deps = rocoto.create_dependency(dep_condition='and', dep=deps) - dependencies2 = rocoto.create_dependency(dep_condition='not', dep=deps) - - deps = [] - deps.append(dependencies) - deps.append(dependencies2) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + dependencies = rocoto.create_dependency(dep_condition='nor', dep=deps) task = wfu.create_wf_task('getic', cdump=cdump, envar=envars, dependency=dependencies) tasks.append(task) tasks.append('\n') - # chgres fv3ic + # init - chgres_cube deps = [] - data = '&ICSDIR;/@Y@m@d@H/&CDUMP;/&CDUMP;.@Y@m@d/@H/siganl.&CDUMP;.@Y@m@d@H' + data = '&ROTDIR;/&CDUMP;.@Y@m@d/@H/INPUT/sfc_data.tile6.nc' dep_dict = {'type':'data', 'data':data} deps.append(rocoto.add_dependency(dep_dict)) - data = '&ICSDIR;/@Y@m@d@H/&CDUMP;/&CDUMP;.@Y@m@d/@H/&CDUMP;.t@Hz.sanl' - dep_dict = {'type':'data', 'data':data} - deps.append(rocoto.add_dependency(dep_dict)) - data = '&ICSDIR;/@Y@m@d@H/&CDUMP;/&CDUMP;.@Y@m@d/@H/gfnanl.&CDUMP;.@Y@m@d@H' - dep_dict = {'type':'data', 'data':data} - deps.append(rocoto.add_dependency(dep_dict)) - data = '&ICSDIR;/@Y@m@d@H/&CDUMP;/&CDUMP;.@Y@m@d/@H/&CDUMP;.t@Hz.atmanl.nemsio' + data = '&ROTDIR;/&CDUMP;.@Y@m@d/@H/RESTART/@Y@m@d.@H0000.sfcanl_data.tile6.nc' dep_dict = {'type':'data', 'data':data} deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) + dependencies = rocoto.create_dependency(dep_condition='nor', dep=deps) deps = [] - data = '&ICSDIR;/@Y@m@d@H/&CDUMP;/&CASE;/INPUT/gfs_data.tile6.nc' - dep_dict = {'type':'data', 'data':data} - deps.append(rocoto.add_dependency(dep_dict)) - data = '&ICSDIR;/@Y@m@d@H/&CDUMP;/&CASE;/INPUT/sfc_data.tile6.nc' - dep_dict = {'type':'data', 'data':data} + dep_dict = {'type': 'task', 'name': '%sgetic' % cdump} deps.append(rocoto.add_dependency(dep_dict)) - deps = rocoto.create_dependency(dep_condition='and', dep=deps) - dependencies2 = rocoto.create_dependency(dep_condition='not', dep=deps) + dependencies2 = rocoto.create_dependency(dep=deps) deps = [] deps.append(dependencies) deps.append(dependencies2) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('fv3ic', cdump=cdump, envar=envars, dependency=dependencies) + task = wfu.create_wf_task('init', cdump=cdump, envar=envars, dependency=dependencies) tasks.append(task) tasks.append('\n') @@ -332,7 +299,7 @@ def get_workflow(dict_configs, cdump='gdas'): # fcst deps = [] - data = '&ICSDIR;/@Y@m@d@H/&CDUMP;/&CASE;/INPUT/sfc_data.tile6.nc' + data = '&ROTDIR;/&CDUMP;.@Y@m@d/@H/INPUT/sfc_data.tile6.nc' dep_dict = {'type':'data', 'data':data} deps.append(rocoto.add_dependency(dep_dict)) data = '&ROTDIR;/&CDUMP;.@Y@m@d/@H/RESTART/@Y@m@d.@H0000.sfcanl_data.tile6.nc'