From fcbfcea025a24ee1716771fb3a020862e9ee38b4 Mon Sep 17 00:00:00 2001 From: George Gayno Date: Mon, 11 Jul 2022 15:19:53 -0500 Subject: [PATCH 01/18] Remove references to wcoss1 from ./fix/link_fixdirs.sh Fixes #665. --- fix/link_fixdirs.sh | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/fix/link_fixdirs.sh b/fix/link_fixdirs.sh index bc5ea6f41..2d3c3837a 100755 --- a/fix/link_fixdirs.sh +++ b/fix/link_fixdirs.sh @@ -9,20 +9,20 @@ machine=${2} if [ $# -lt 2 ]; then set +x echo '***ERROR*** must specify two arguements: (1) RUN_ENVIR, (2) machine' - echo ' Syntax: link_fv3gfs.sh ( nco | emc ) ( cray | dell | hera | jet | orion | s4 )' + echo ' Syntax: link_fv3gfs.sh ( nco | emc ) ( hera | jet | orion | s4 )' exit 1 fi if [ $RUN_ENVIR != emc -a $RUN_ENVIR != nco ]; then set +x echo '***ERROR*** unsupported run environment' - echo 'Syntax: link_fv3gfs.sh ( nco | emc ) ( cray | dell | hera | jet | orion | s4 )' + echo 'Syntax: link_fv3gfs.sh ( nco | emc ) ( hera | jet | orion | s4 )' exit 1 fi -if [ $machine != cray -a $machine != hera -a $machine != dell -a $machine != jet -a $machine != orion -a $machine != s4 ]; then +if [ $machine != hera -a $machine != jet -a $machine != orion -a $machine != s4 ]; then set +x echo '***ERROR*** unsupported machine' - echo 'Syntax: link_fv3gfs.sh ( nco | emc ) ( cray | dell | hera | jet | orion | s4 )' + echo 'Syntax: link_fv3gfs.sh ( nco | emc ) ( hera | jet | orion | s4 )' exit 1 fi @@ -35,11 +35,7 @@ pwd=$(pwd -P) #------------------------------ #--model fix fields #------------------------------ -if [ $machine == "cray" ]; then - FIX_DIR="/gpfs/hps3/emc/global/noscrub/emc.glopara/git/fv3gfs/fix" -elif [ $machine = "dell" ]; then - FIX_DIR="/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix" -elif [ $machine = "hera" ]; then +if [ $machine = "hera" ]; then FIX_DIR="/scratch1/NCEPDEV/global/glopara/fix" elif [ $machine = "jet" ]; then FIX_DIR="/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix" From 73ffbda89db583d9c1c0d9651441d01f8f5d091f Mon Sep 17 00:00:00 2001 From: George Gayno Date: Tue, 12 Jul 2022 07:36:26 -0500 Subject: [PATCH 02/18] Remove references to wcoss1 from ./sorc/machine-setup.sh Fixes #665. --- sorc/machine-setup.sh | 42 ------------------------------------------ 1 file changed, 42 deletions(-) diff --git a/sorc/machine-setup.sh b/sorc/machine-setup.sh index ad82266e3..74e86e4d2 100644 --- a/sorc/machine-setup.sh +++ b/sorc/machine-setup.sh @@ -35,48 +35,6 @@ elif [[ -d /scratch1 ]] ; then fi target=hera module purge -elif [[ -d /gpfs/hps && -e /etc/SuSE-release ]] ; then - # We are on NOAA Luna or Surge - if ( ! eval module help > /dev/null 2>&1 ) ; then - echo load the module command 1>&2 - source /opt/modules/default/init/$__ms_shell - fi - target=wcoss_cray - - # Silence the "module purge" to avoid the expected error messages - # related to modules that load modules. - module purge > /dev/null 2>&1 - module use /usrx/local/prod/modulefiles - module use /gpfs/hps/nco/ops/nwprod/lib/modulefiles - module use /gpfs/hps/nco/ops/nwprod/modulefiles - module use /opt/cray/alt-modulefiles - module use /opt/cray/craype/default/alt-modulefiles - module use /opt/cray/ari/modulefiles - module use /opt/modulefiles - module purge > /dev/null 2>&1 - - # Workaround until module issues are fixed: - #unset _LMFILES_ - #unset LOADEDMODULES - echo y 2> /dev/null | module clear > /dev/null 2>&1 - - module use /usrx/local/prod/modulefiles - module use /gpfs/hps/nco/ops/nwprod/lib/modulefiles - module use /gpfs/hps/nco/ops/nwprod/modulefiles - module use /opt/cray/alt-modulefiles - module use /opt/cray/craype/default/alt-modulefiles - module use /opt/cray/ari/modulefiles - module use /opt/modulefiles - module load modules - -elif [[ -L /usrx && "$( readlink /usrx 2> /dev/null )" =~ dell ]] ; then - # We are on NOAA Venus or Mars - if ( ! eval module help > /dev/null 2>&1 ) ; then - echo load the module command 1>&2 - source /usrx/local/prod/lmod/lmod/init/$__ms_shell - fi - target=wcoss_dell_p3 - module purge elif [[ -d /glade ]] ; then # We are on NCAR Cheyenne if ( ! eval module help > /dev/null 2>&1 ) ; then From 59de683aadb171e36b7b0084e3e79fb5d311b470 Mon Sep 17 00:00:00 2001 From: George Gayno Date: Tue, 12 Jul 2022 07:47:47 -0500 Subject: [PATCH 03/18] Remove references to wcoss1 from the vcoord_gen utility. Fixes #665. --- util/vcoord_gen/run.cray.sh | 52 ------------------------------------- util/vcoord_gen/run.sh | 3 +-- 2 files changed, 1 insertion(+), 54 deletions(-) delete mode 100755 util/vcoord_gen/run.cray.sh diff --git a/util/vcoord_gen/run.cray.sh b/util/vcoord_gen/run.cray.sh deleted file mode 100755 index 968625a19..000000000 --- a/util/vcoord_gen/run.cray.sh +++ /dev/null @@ -1,52 +0,0 @@ -#!/bin/bash - -#BSUB -W 0:01 -#BSUB -o log -#BSUB -e log -#BSUB -J vcoord -#BSUB -q debug -#BSUB -R "rusage[mem=100]" -#BSUB -P GFS-DEV - -#------------------------------------------------------------------------------- -# -# Generate a hybrid coordinate interface profile on WCOSS-Cray. -# -# Build the repository using the ./build_all.sh script before running. -# -# Output 'ak' and 'bk' values are placed in $outfile. -# -#------------------------------------------------------------------------------- - -set -x - -source ../../sorc/machine-setup.sh > /dev/null 2>&1 -module use ../../modulefiles -module load build.$target.intel -module list - -outfile="./global_hyblev.txt" - -levs=128 # integer number of levels -lupp=88 # integer number of levels below pupp -pbot=100000.0 # real nominal surface pressure (Pa) -psig=99500.0 # real nominal pressure where coordinate changes - # from pure sigma (Pa) -ppre=7000.0 # real nominal pressure where coordinate changes - # to pure pressure (Pa) -pupp=7000.0 # real nominal pressure where coordinate changes - # to upper atmospheric profile (Pa) -ptop=0.0 # real pressure at top (Pa) -dpbot=240.0 # real coordinate thickness at bottom (Pa) -dpsig=1200.0 # real thickness of zone within which coordinate changes - # to pure sigma (Pa) -dppre=18000.0 # real thickness of zone within which coordinate changes - # to pure pressure (Pa) -dpupp=550.0 # real coordinate thickness at pupp (Pa) -dptop=1.0 # real coordinate thickness at top (Pa) - -rm -f $outfile - -echo $levs $lupp $pbot $psig $ppre $pupp $ptop $dpbot $dpsig $dppre $dpupp $dptop | $PWD/../../exec/vcoord_gen > $outfile - -exit diff --git a/util/vcoord_gen/run.sh b/util/vcoord_gen/run.sh index b0d5c3aff..931de0a0a 100755 --- a/util/vcoord_gen/run.sh +++ b/util/vcoord_gen/run.sh @@ -2,8 +2,7 @@ #------------------------------------------------------------------------------- # -# Generate a hybrid coordinate interface profile. On WCOSS-Cray, use -# 'run.cray.sh'. +# Generate a hybrid coordinate interface profile. # # Build the repository using the ./build_all.sh script before running. # From ed79a012c4a0eddd9e9e3141df57e8bdf640244e Mon Sep 17 00:00:00 2001 From: George Gayno Date: Tue, 12 Jul 2022 07:53:24 -0500 Subject: [PATCH 04/18] Remove Dell script from sfc_climo_gen utility. Fixes #665. --- util/sfc_climo_gen/run.dell.sh | 85 ---------------------------------- 1 file changed, 85 deletions(-) delete mode 100755 util/sfc_climo_gen/run.dell.sh diff --git a/util/sfc_climo_gen/run.dell.sh b/util/sfc_climo_gen/run.dell.sh deleted file mode 100755 index b262bf510..000000000 --- a/util/sfc_climo_gen/run.dell.sh +++ /dev/null @@ -1,85 +0,0 @@ -#!/bin/bash - -#------------------------------------------------------------ -# Run the sfc_climo_gen program stand-alone on Dell using -# pre-exiting 'grid' and 'orography files. -#------------------------------------------------------------ - -#BSUB -oo log -#BSUB -eo log -#BSUB -q debug -#BSUB -P GFS-DEV -#BSUB -J grid_fv3 -#BSUB -W 0:10 -#BSUB -x # run not shared -#BSUB -n 24 # total tasks -#BSUB -R span[ptile=24] # tasks per node -#BSUB -R affinity[core(1):distribute=balance] - -set -x - -export BASE_DIR=$LS_SUBCWD/../.. - -source ${BASE_DIR}/sorc/machine-setup.sh > /dev/null 2>&1 -module use ${BASE_DIR}/modulefiles -module load build.$target.intel -module list - -#------------------------------------- -# Set model resolution. -#------------------------------------- - -export res=384 - -#------------------------------------- -# Where the model "grid", "mosaic" and "oro" files reside. -#------------------------------------- - -export FIX_FV3=${BASE_DIR}/fix/fix_fv3_gmted2010/C${res} - -#------------------------------------- -# Uncomment for regional grids. -#------------------------------------- - -##HALO=3 -##export GRIDTYPE=regional - -#------------------------------------- -# Choose which virrs data to use. -#------------------------------------- - -export veg_type_src="viirs.igbp.0.05" # Use global 0.05-degree viirs data -#export veg_type_src="viirs.igbp.0.1" # Use global 0.1-degree viirs data -#export veg_type_src="viirs.igbp.0.03" # Use global 0.03-degree viirs data -#export veg_type_src="viirs.igbp.conus.0.01" # Use CONUS 0.01-degree virrs data. Do not - # use for global grids. - -#------------------------------------- -# Set working directory and directory where output files will be saved. -#------------------------------------- - -export WORK_DIR=/gpfs/dell1/stmp/$LOGNAME/work.sfc -export SAVE_DIR=/gpfs/dell1/stmp/$LOGNAME/sfc.C${res} - -#------------------------------------- -# Should not have to touch anything below here. -#------------------------------------- - -if [[ $GRIDTYPE = "regional" ]]; then - HALO=$(( $HALO + 1 )) - export HALO - ln -fs $FIX_FV3/C${res}_grid.tile7.halo${HALO}.nc $FIX_FV3/C${res}_grid.tile7.nc - ln -fs $FIX_FV3/C${res}_oro_data.tile7.halo${HALO}.nc $FIX_FV3/C${res}_oro_data.tile7.nc -fi - -export input_sfc_climo_dir=${BASE_DIR}/fix/fix_sfc_climo -export APRUN_SFC="mpirun -l" - -ulimit -a -ulimit -s unlimited - -rm -fr $WORK_DIR $SAVE_DIR - -${BASE_DIR}/ush/sfc_climo_gen.sh - -exit From 182bdcb50f688e9fcb9eeb01eb5918f0391ca4d5 Mon Sep 17 00:00:00 2001 From: George Gayno Date: Tue, 12 Jul 2022 07:59:14 -0500 Subject: [PATCH 05/18] Remove wcoss1 scripts from gdas_init utility. Fixes #665. --- util/gdas_init/driver.cray.sh | 188 ------------------------------- util/gdas_init/driver.dell.sh | 206 ---------------------------------- 2 files changed, 394 deletions(-) delete mode 100755 util/gdas_init/driver.cray.sh delete mode 100755 util/gdas_init/driver.dell.sh diff --git a/util/gdas_init/driver.cray.sh b/util/gdas_init/driver.cray.sh deleted file mode 100755 index c7a6a9b97..000000000 --- a/util/gdas_init/driver.cray.sh +++ /dev/null @@ -1,188 +0,0 @@ -#!/bin/bash - -#---------------------------------------------------------------------- -# Driver script for running on Cray. -# -# Edit the 'config' file before running. -#---------------------------------------------------------------------- - -set -x - -source ../../sorc/machine-setup.sh > /dev/null 2>&1 -module use ../../modulefiles -module load build.$target.intel -module list - -PROJECT_CODE=GFS-DEV - -source config - -if [ $EXTRACT_DATA == yes ]; then - - rm -fr $EXTRACT_DIR - mkdir -p $EXTRACT_DIR - - QUEUE=dev_transfer - - MEM=6000 - WALLT="2:00" - - case $gfs_ver in - v12 | v13) - bsub -o log.data.$CDUMP -e log.data.$CDUMP -q $QUEUE -P $PROJECT_CODE -J get.data.$CDUMP -W $WALLT \ - -R "rusage[mem=$MEM]" "./get_pre-v14.data.sh ${CDUMP}" - if [ "$CDUMP" = "gdas" ] ; then - bsub -o log.data.enkf -e log.data.enkf -q $QUEUE -P $PROJECT_CODE -J get.data.enkf -W $WALLT \ - -R "rusage[mem=$MEM]" "./get_pre-v14.data.sh enkf" - fi - DEPEND="-w ended(get.data.*)" - ;; - v14) - bsub -o log.data.$CDUMP -e log.data.$CDUMP -q $QUEUE -P $PROJECT_CODE -J get.data.$CDUMP -W $WALLT \ - -R "rusage[mem=$MEM]" "./get_v14.data.sh ${CDUMP}" - if [ "$CDUMP" = "gdas" ] ; then - bsub -o log.data.enkf -e log.data.enkf -q $QUEUE -P $PROJECT_CODE -J get.data.enkf -W $WALLT \ - -R "rusage[mem=$MEM]" "./get_v14.data.sh enkf" - fi - DEPEND="-w ended(get.data.*)" - ;; - v15) - bsub -o log.data.${CDUMP} -e log.data.${CDUMP} -q $QUEUE -P $PROJECT_CODE -J get.data.${CDUMP} -W $WALLT \ - -R "rusage[mem=$MEM]" "./get_v15.data.sh ${CDUMP}" - if [ "$CDUMP" = "gdas" ] ; then - for group in grp1 grp2 grp3 grp4 grp5 grp6 grp7 grp8 - do - bsub -o log.data.${group} -e log.data.${group} -q $QUEUE -P $PROJECT_CODE -J get.data.${group} -W $WALLT \ - -R "rusage[mem=$MEM]" "./get_v15.data.sh ${group}" - done - fi - DEPEND="-w ended(get.data.*)" - ;; - v16retro) - bsub -o log.data.$CDUMP -e log.data.$CDUMP -q $QUEUE -P $PROJECT_CODE -J get.data.$CDUMP -W $WALLT \ - -R "rusage[mem=$MEM]" "./get_v16retro.data.sh ${CDUMP}" - DEPEND="-w ended(get.data.${CDUMP})" - ;; - v16) - bsub -o log.data.${CDUMP} -e log.data.${CDUMP} -q $QUEUE -P $PROJECT_CODE -J get.data.${CDUMP} -W $WALLT \ - -R "rusage[mem=$MEM]" "./get_v16.data.sh ${CDUMP}" - if [ "$CDUMP" = "gdas" ] ; then - for group in grp1 grp2 grp3 grp4 grp5 grp6 grp7 grp8 - do - bsub -o log.data.${group} -e log.data.${group} -q $QUEUE -P $PROJECT_CODE -J get.data.${group} -W $WALLT \ - -R "rusage[mem=$MEM]" "./get_v16.data.sh ${group}" - done - fi - DEPEND="-w ended(get.data.*)" - ;; - esac - -else - - DEPEND=' ' - -fi - -if [ $RUN_CHGRES == yes ]; then - MEM=2000 - QUEUE=dev - WALLT="0:15" - NUM_NODES=2 - case $gfs_ver in - v12 | v13) - export OMP_NUM_THREADS=2 - export OMP_STACKSIZE=1024M - ;; - *) - export OMP_NUM_THREADS=1 - ;; - esac - export APRUN="aprun -j 1 -n 24 -N 12 -d ${OMP_NUM_THREADS} -cc depth" - if [ $CRES_HIRES == 'C768' ] ; then - WALLT="0:20" - NUM_NODES=3 - export APRUN="aprun -j 1 -n 36 -N 12 -d ${OMP_NUM_THREADS} -cc depth" - elif [ $CRES_HIRES == 'C1152' ] ; then - WALLT="0:20" - NUM_NODES=4 - export APRUN="aprun -j 1 -n 48 -N 12 -d ${OMP_NUM_THREADS} -cc depth" - fi - case $gfs_ver in - v12 | v13) - bsub -e log.${CDUMP} -o log.${CDUMP} -q $QUEUE -P $PROJECT_CODE -J chgres_${CDUMP} -M $MEM -W $WALLT \ - -extsched 'CRAYLINUX[]' $DEPEND "export NODES=$NUM_NODES; ./run_pre-v14.chgres.sh ${CDUMP}" - ;; - v14) - bsub -e log.${CDUMP} -o log.${CDUMP} -q $QUEUE -P $PROJECT_CODE -J chgres_${CDUMP} -M $MEM -W $WALLT \ - -extsched 'CRAYLINUX[]' $DEPEND "export NODES=$NUM_NODES; ./run_v14.chgres.sh ${CDUMP}" - ;; - v15) - if [ "$CDUMP" = "gdas" ]; then - bsub -e log.${CDUMP} -o log.${CDUMP} -q $QUEUE -P $PROJECT_CODE -J chgres_${CDUMP} -M $MEM -W $WALLT \ - -extsched 'CRAYLINUX[]' $DEPEND "export NODES=$NUM_NODES; ./run_v15.chgres.sh ${CDUMP}" - else - bsub -e log.${CDUMP} -o log.${CDUMP} -q $QUEUE -P $PROJECT_CODE -J chgres_${CDUMP} -M $MEM -W $WALLT \ - -extsched 'CRAYLINUX[]' $DEPEND "export NODES=$NUM_NODES; ./run_v15.chgres.gfs.sh" - fi - ;; - v16retro) - if [ "$CDUMP" = "gdas" ] ; then - bsub -e log.gdas -o log.gdas -q $QUEUE -P $PROJECT_CODE -J chgres_gdas -M $MEM -W $WALLT \ - -extsched 'CRAYLINUX[]' $DEPEND "export NODES=$NUM_NODES; ./run_v16retro.chgres.sh hires" - else - bsub -e log.${CDUMP} -o log.${CDUMP} -q $QUEUE -P $PROJECT_CODE -J chgres_${CDUMP} -M $MEM -W $WALLT \ - -extsched 'CRAYLINUX[]' $DEPEND "export NODES=$NUM_NODES; ./run_v16.chgres.sh ${CDUMP}" - fi - ;; - v16) - bsub -e log.${CDUMP} -o log.${CDUMP} -q $QUEUE -P $PROJECT_CODE -J chgres_${CDUMP} -M $MEM -W $WALLT \ - -extsched 'CRAYLINUX[]' $DEPEND "export NODES=$NUM_NODES; ./run_v16.chgres.sh ${CDUMP}" - ;; - esac - - if [ "$CDUMP" = 'gdas' ]; then - - WALLT="0:15" - NUM_NODES=1 - export APRUN="aprun -j 1 -n 12 -N 12 -d ${OMP_NUM_THREADS} -cc depth" - - if [ "$gfs_ver" = "v16retro" ]; then - - bsub -e log.enkf -o log.enkf -q $QUEUE -P $PROJECT_CODE -J chgres_enkf -M $MEM -W $WALLT \ - -extsched 'CRAYLINUX[]' $DEPEND "export NODES=$NUM_NODES; ./run_v16retro.chgres.sh enkf" - - else - - MEMBER=1 - while [ $MEMBER -le 80 ]; do - if [ $MEMBER -lt 10 ]; then - MEMBER_CH="00${MEMBER}" - else - MEMBER_CH="0${MEMBER}" - fi - case $gfs_ver in - v12 | v13) - bsub -e log.${MEMBER_CH} -o log.${MEMBER_CH} -q $QUEUE -P $PROJECT_CODE -J chgres_${MEMBER_CH} -M $MEM -W $WALLT \ - -extsched 'CRAYLINUX[]' $DEPEND "export NODES=$NUM_NODES; ./run_pre-v14.chgres.sh ${MEMBER_CH}" - ;; - v14) - bsub -e log.${MEMBER_CH} -o log.${MEMBER_CH} -q $QUEUE -P $PROJECT_CODE -J chgres_${MEMBER_CH} -M $MEM -W $WALLT \ - -extsched 'CRAYLINUX[]' $DEPEND "export NODES=$NUM_NODES; ./run_v14.chgres.sh ${MEMBER_CH}" - ;; - v15) - bsub -e log.${MEMBER_CH} -o log.${MEMBER_CH} -q $QUEUE -P $PROJECT_CODE -J chgres_${MEMBER_CH} -M $MEM -W $WALLT \ - -extsched 'CRAYLINUX[]' $DEPEND "export NODES=$NUM_NODES; ./run_v15.chgres.sh ${MEMBER_CH}" - ;; - v16) - bsub -e log.${MEMBER_CH} -o log.${MEMBER_CH} -q $QUEUE -P $PROJECT_CODE -J chgres_${MEMBER_CH} -M $MEM -W $WALLT \ - -extsched 'CRAYLINUX[]' $DEPEND "export NODES=$NUM_NODES; ./run_v16.chgres.sh ${MEMBER_CH}" - ;; - esac - MEMBER=$(( $MEMBER + 1 )) - done - - fi # is this v16 retro? - - fi - -fi diff --git a/util/gdas_init/driver.dell.sh b/util/gdas_init/driver.dell.sh deleted file mode 100755 index d3804535e..000000000 --- a/util/gdas_init/driver.dell.sh +++ /dev/null @@ -1,206 +0,0 @@ -#!/bin/bash - -#---------------------------------------------------------------------- -# Driver script for running on Dell. -# -# Edit the 'config' file before running. -#---------------------------------------------------------------------- - -set -x - -source ../../sorc/machine-setup.sh > /dev/null 2>&1 -module use ../../modulefiles -module load build.$target.intel -module list - -PROJECT_CODE=GFS-DEV - -source config - -#---------------------------------------------------------------------- -# Extract data. -#---------------------------------------------------------------------- - -if [ "$EXTRACT_DATA" = "yes" ]; then - - rm -fr $EXTRACT_DIR - mkdir -p $EXTRACT_DIR - - QUEUE=dev_transfer - - MEM=6000M - WALLT="2:00" - - case $gfs_ver in - v12 | v13 ) - bsub -o log.data.$CDUMP -e log.data.$CDUMP -q $QUEUE -P $PROJECT_CODE -J get.data.$CDUMP -W $WALLT \ - -R "affinity[core(1)]" -M $MEM "./get_pre-v14.data.sh ${CDUMP}" - if [ "$CDUMP" = "gdas" ] ; then - bsub -o log.data.enkf -e log.data.enkf -q $QUEUE -P $PROJECT_CODE -J get.data.enkf -W $WALLT \ - -R "affinity[core(1)]" -M $MEM "./get_pre-v14.data.sh enkf" - fi - DEPEND="-w ended(get.data.*)" - ;; - v14) - bsub -o log.data.${CDUMP} -e log.data.${CDUMP} -q $QUEUE -P $PROJECT_CODE -J get.data.${CDUMP} -W $WALLT \ - -R "affinity[core(1)]" -M $MEM "./get_v14.data.sh ${CDUMP}" - - if [ "$CDUMP" = "gdas" ] ; then - bsub -o log.data.enkf -e log.data.enkf -q $QUEUE -P $PROJECT_CODE -J get.data.enkf -W $WALLT \ - -R "affinity[core(1)]" -M $MEM "./get_v14.data.sh enkf" - fi - DEPEND="-w ended(get.data.*)" - ;; - v15) - bsub -o log.data.${CDUMP} -e log.data.${CDUMP} -q $QUEUE -P $PROJECT_CODE -J get.data.${CDUMP} -W $WALLT \ - -R "affinity[core(1)]" -M $MEM "./get_v15.data.sh ${CDUMP}" - if [ "$CDUMP" = "gdas" ] ; then - for group in grp1 grp2 grp3 grp4 grp5 grp6 grp7 grp8 - do - bsub -o log.data.enkf.${group} -e log.data.enkf.${group} -q $QUEUE -P $PROJECT_CODE -J get.data.enkf.${group} -W $WALLT \ - -R "affinity[core(1)]" -M $MEM "./get_v15.data.sh ${group}" - done - fi - DEPEND="-w ended(get.data.*)" - ;; - v16retro) - bsub -o log.data.v16retro -e log.data.v16retro -q $QUEUE -P $PROJECT_CODE -J get.data.v16retro -W $WALLT \ - -R "affinity[core(1)]" -M $MEM "./get_v16retro.data.sh ${CDUMP}" - DEPEND="-w ended(get.data.v16retro)" - ;; - v16) - bsub -o log.data.${CDUMP} -e log.data.${CDUMP} -q $QUEUE -P $PROJECT_CODE -J get.data.${CDUMP} -W $WALLT \ - -R "affinity[core(1)]" -M $MEM "./get_v16.data.sh ${CDUMP}" - if [ "$CDUMP" = "gdas" ] ; then - for group in grp1 grp2 grp3 grp4 grp5 grp6 grp7 grp8 - do - bsub -o log.data.enkf.${group} -e log.data.enkf.${group} -q $QUEUE -P $PROJECT_CODE -J get.data.enkf.${group} -W $WALLT \ - -R "affinity[core(1)]" -M $MEM "./get_v16.data.sh ${group}" - done - fi - DEPEND="-w ended(get.data.*)" - ;; - esac - -else # do not extract data. - - DEPEND=' ' - -fi # extract data? - -#---------------------------------------------------------------------- -# Run chgres. -#---------------------------------------------------------------------- - -if [ "$RUN_CHGRES" = "yes" ]; then - - QUEUE=dev2 - WALLT="0:15" - export OMP_NUM_THREADS=1 - NODES="-n 18 -R "span[ptile=9]"" - export APRUN="mpirun" - if [ "$CRES_HIRES" = "C768" ] ; then - NODES="-n 24 -R "span[ptile=6]"" - elif [ "$CRES_HIRES" = "C1152" ] ; then - NODES="-n 36 -R "span[ptile=6]"" - WALLT="0:20" - fi - - case $gfs_ver in - v12 | v13) - export OMP_STACKSIZE=1024M - export OMP_NUM_THREADS=2 - bsub -e log.${CDUMP} -o log.${CDUMP} -q $QUEUE -P $PROJECT_CODE -J chgres_${CDUMP} -W $WALLT \ - -x $NODES -R "affinity[core(${OMP_NUM_THREADS}):distribute=balance]" $DEPEND \ - "./run_pre-v14.chgres.sh ${CDUMP}" - ;; - v14) - bsub -e log.${CDUMP} -o log.${CDUMP} -q $QUEUE -P $PROJECT_CODE -J chgres_${CDUMP} -W $WALLT \ - -x $NODES -R "affinity[core(1):distribute=balance]" $DEPEND \ - "./run_v14.chgres.sh ${CDUMP}" - ;; - v15) - if [ "$CDUMP" = "gdas" ]; then - bsub -e log.${CDUMP} -o log.${CDUMP} -q $QUEUE -P $PROJECT_CODE -J chgres_${CDUMP} -W $WALLT \ - -x $NODES -R "affinity[core(1):distribute=balance]" $DEPEND \ - "./run_v15.chgres.sh ${CDUMP}" - else - bsub -e log.${CDUMP} -o log.${CDUMP} -q $QUEUE -P $PROJECT_CODE -J chgres_${CDUMP} -W $WALLT \ - -x $NODES -R "affinity[core(1):distribute=balance]" $DEPEND \ - "./run_v15.chgres.gfs.sh" - fi - ;; - v16retro) - if [ "$CDUMP" = "gdas" ]; then - bsub -e log.${CDUMP} -o log.${CDUMP} -q $QUEUE -P $PROJECT_CODE -J chgres_${CDUMP} -W $WALLT \ - -x $NODES -R "affinity[core(1):distribute=balance]" $DEPEND \ - "./run_v16retro.chgres.sh hires" - else - bsub -e log.${CDUMP} -o log.${CDUMP} -q $QUEUE -P $PROJECT_CODE -J chgres_${CDUMP} -W $WALLT \ - -x $NODES -R "affinity[core(1):distribute=balance]" $DEPEND \ - "./run_v16.chgres.sh ${CDUMP}" - fi - ;; - v16) - bsub -e log.${CDUMP} -o log.${CDUMP} -q $QUEUE -P $PROJECT_CODE -J chgres_${CDUMP} -W $WALLT \ - -x $NODES -R "affinity[core(1):distribute=balance]" $DEPEND \ - "./run_v16.chgres.sh ${CDUMP}" - ;; - esac - -#---------------------------------------------------------------------- -# If selected, run chgres for enkf members. -#---------------------------------------------------------------------- - - if [ "$CDUMP" = "gdas" ]; then - - NODES="-n 18 -R "span[ptile=9]"" - WALLT="0:15" - - if [ "$gfs_ver" = "v16retro" ]; then - - bsub -e log.enkf -o log.enkf -q $QUEUE -P $PROJECT_CODE -J chgres_enkf -W $WALLT \ - -x $NODES -R "affinity[core(1):distribute=balance]" $DEPEND \ - "./run_v16retro.chgres.sh enkf" - - else - - MEMBER=1 - while [ $MEMBER -le 80 ]; do - if [ $MEMBER -lt 10 ]; then - MEMBER_CH="00${MEMBER}" - else - MEMBER_CH="0${MEMBER}" - fi - case $gfs_ver in - v12 | v13) - export OMP_STACKSIZE=1024M - export OMP_NUM_THREADS=2 - bsub -e log.${MEMBER_CH} -o log.${MEMBER_CH} -q $QUEUE -P $PROJECT_CODE -J chgres_${MEMBER_CH} -W $WALLT \ - -x $NODES -R "affinity[core(${OMP_NUM_THREADS}):distribute=balance]" $DEPEND \ - "./run_pre-v14.chgres.sh ${MEMBER_CH}" - ;; - v14) - bsub -e log.${MEMBER_CH} -o log.${MEMBER_CH} -q $QUEUE -P $PROJECT_CODE -J chgres_${MEMBER_CH} -W $WALLT \ - -x $NODES -R "affinity[core(1):distribute=balance]" $DEPEND \ - "./run_v14.chgres.sh ${MEMBER_CH}" - ;; - v15) - bsub -e log.${MEMBER_CH} -o log.${MEMBER_CH} -q $QUEUE -P $PROJECT_CODE -J chgres_${MEMBER_CH} -W $WALLT \ - -x $NODES -R "affinity[core(1):distribute=balance]" $DEPEND \ - "./run_v15.chgres.sh ${MEMBER_CH}" - ;; - v16) - bsub -e log.${MEMBER_CH} -o log.${MEMBER_CH} -q $QUEUE -P $PROJECT_CODE -J chgres_${MEMBER_CH} -W $WALLT \ - -x $NODES -R "affinity[core(1):distribute=balance]" $DEPEND \ - "./run_v16.chgres.sh ${MEMBER_CH}" - ;; - esac - MEMBER=$(( $MEMBER + 1 )) - done - - fi # is this v16 retro? - - fi # is this gdas? then process enkf. - -fi # run chgres? From 3a9d8750737cb81e6e336ee285cd4d0e23df6a65 Mon Sep 17 00:00:00 2001 From: George Gayno Date: Tue, 12 Jul 2022 08:31:20 -0500 Subject: [PATCH 06/18] Remove references to wcoss1 from the 'build_all.sh' script. Fixes #665. --- build_all.sh | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/build_all.sh b/build_all.sh index 6b5dad10d..9659b9cab 100755 --- a/build_all.sh +++ b/build_all.sh @@ -1,8 +1,7 @@ #! /usr/bin/env bash # -# This build script is only used on NOAA WCOSS systems. All other -# users should set module files as needed, and build directly with -# CMake. +# This build script is only used on officially supported machines. All other +# users should set module files as needed, and build directly with CMake. # # George Gayno @@ -28,9 +27,9 @@ fi # The unit test data download is part of the build system. Not all machines can # access the EMC ftp site, so turn off the build (-DBUILD_TESTING=OFF) of the units tests accordingly. -# Those with access to the EMC ftp site are: Orion, Hera, WCOSS-Cray, WCOSS-Dell. +# Those with access to the EMC ftp site are: Orion and Hera. -if [[ "$target" == "hera" || "$target" == "orion" || "$target" == "wcoss_cray" || "$target" == "wcoss_dell_p3" ]]; then +if [[ "$target" == "hera" || "$target" == "orion" ]]; then CMAKE_FLAGS="-DCMAKE_INSTALL_PREFIX=../ -DEMC_EXEC_DIR=ON -DBUILD_TESTING=OFF" #CMAKE_FLAGS="-DCMAKE_INSTALL_PREFIX=../ -DEMC_EXEC_DIR=ON -DBUILD_TESTING=ON" #CMAKE_FLAGS="-DCMAKE_INSTALL_PREFIX=../ -DEMC_EXEC_DIR=ON -DENABLE_DOCS=ON -DBUILD_TESTING=ON" From 80c7b585bdd04b433a51bf9af99d8fa425115261 Mon Sep 17 00:00:00 2001 From: George Gayno Date: Tue, 12 Jul 2022 08:55:46 -0500 Subject: [PATCH 07/18] Remove wcoss1 references from ./cmake/LibMPI.cmake. Fixes #665. --- cmake/LibMPI.cmake | 34 ---------------------------------- 1 file changed, 34 deletions(-) diff --git a/cmake/LibMPI.cmake b/cmake/LibMPI.cmake index 68c20dda6..27c2683be 100644 --- a/cmake/LibMPI.cmake +++ b/cmake/LibMPI.cmake @@ -45,40 +45,6 @@ function (platform_name RETURN_VARIABLE) set (${RETURN_VARIABLE} "hera" PARENT_SCOPE) - # wcoss_cray (Luna) - elseif (SITENAME MATCHES "^llogin1" OR - SITENAME MATCHES "^llogin2" OR - SITENAME MATCHES "^llogin3") - - set (${RETURN_VARIABLE} "wcoss_cray" PARENT_SCOPE) - - # wcoss_cray (Surge) - elseif (SITENAME MATCHES "^slogin1" OR - SITENAME MATCHES "^slogin2" OR - SITENAME MATCHES "^slogin3") - - set (${RETURN_VARIABLE} "wcoss_cray" PARENT_SCOPE) - - # wcoss_dell_p3 (Venus) - elseif (SITENAME MATCHES "^v71a1.ncep.noaa.gov" OR - SITENAME MATCHES "^v71a2.ncep.noaa.gov" OR - SITENAME MATCHES "^v71a3.ncep.noaa.gov" OR - SITENAME MATCHES "^v72a1.ncep.noaa.gov" OR - SITENAME MATCHES "^v72a2.ncep.noaa.gov" OR - SITENAME MATCHES "^v72a3.ncep.noaa.gov") - - set (${RETURN_VARIABLE} "wcoss_dell_p3" PARENT_SCOPE) - - # wcoss_dell_p3 (Mars) - elseif (SITENAME MATCHES "^m71a1.ncep.noaa.gov" OR - SITENAME MATCHES "^m71a2.ncep.noaa.gov" OR - SITENAME MATCHES "^m71a3.ncep.noaa.gov" OR - SITENAME MATCHES "^m72a1.ncep.noaa.gov" OR - SITENAME MATCHES "^m72a2.ncep.noaa.gov" OR - SITENAME MATCHES "^m72a3.ncep.noaa.gov") - - set (${RETURN_VARIABLE} "wcoss_dell_p3" PARENT_SCOPE) - # wcoss2 elseif (SITENAME MATCHES "^along01" OR SITENAME MATCHES "^alogin02") From cea9f01c460c93add60ff725fe47ac836c08285e Mon Sep 17 00:00:00 2001 From: George Gayno Date: Tue, 12 Jul 2022 09:11:16 -0500 Subject: [PATCH 08/18] Remove grid generation scripts for wcoss1. Fixes #665. --- driver_scripts/driver_grid.cray.sh | 157 ----------------------------- driver_scripts/driver_grid.dell.sh | 155 ---------------------------- 2 files changed, 312 deletions(-) delete mode 100755 driver_scripts/driver_grid.cray.sh delete mode 100755 driver_scripts/driver_grid.dell.sh diff --git a/driver_scripts/driver_grid.cray.sh b/driver_scripts/driver_grid.cray.sh deleted file mode 100755 index cb8132535..000000000 --- a/driver_scripts/driver_grid.cray.sh +++ /dev/null @@ -1,157 +0,0 @@ -#!/bin/bash - -#BSUB -L /bin/sh -#BSUB -P GFS-DEV -#BSUB -oo log.grid.%J -#BSUB -eo log.grid.%J -#BSUB -J grid_fv3 -#BSUB -q debug -#BSUB -M 2400 -#BSUB -W 00:30 -#BSUB -extsched 'CRAYLINUX[]' - -#----------------------------------------------------------------------- -# Driver script to create a cubic-sphere based model grid on Cray. -# -# Produces the following files (netcdf, each tile in separate file): -# 1) 'mosaic' and 'grid' files containing lat/lon and other -# records that describe the model grid. -# 2) 'oro' files containing land mask, terrain and gravity -# wave drag fields. -# 3) Surface climo fields, such as soil type, vegetation -# greenness and albedo. -# -# Note: The sfc_climo_gen program only runs with an -# mpi task count that is a multiple of six. This is -# an ESMF library requirement. Large grids may require -# tasks spread across multiple nodes. -# -# To run, do the following: -# -# 1) Set "C" resolution, "res" - Example: res=96. -# 2) Set grid type ("gtype"). Valid choices are -# "uniform" - global uniform grid -# "stretch" - global stretched grid -# "nest" - global stretched grid with nest -# "regional_gfdl" - stand-alone gfdl regional grid -# "regional_esg" - stand-alone extended Schmidt gnomonic -# (esg) regional grid -# 3) For "uniform" and "regional_gfdl" grids - to include lake -# fraction and depth, set "add_lake" to true, and the -# "lake_cutoff" value. -# 4) For "stretch" and "nest" grids, set the stretching factor - -# "stretch_fac", and center lat/lon of highest resolution -# tile - "target_lat" and "target_lon". -# 5) For "nest" grids, set the refinement ratio - "refine_ratio", -# the starting/ending i/j index location within the parent -# tile - "istart_nest", "jstart_nest", "iend_nest", "jend_nest" -# 6) For "regional_gfdl" grids, set the "halo". Default is three -# rows/columns. -# 7) For "regional_esg" grids, set center lat/lon of grid, -# - "target_lat/lon" - the i/j dimensions - "i/jdim", the -# x/y grid spacing - "delx/y", and halo. -# 8) Set working directory - TEMP_DIR - and path to the repository -# clone - home_dir. -# 9) Check settings for 'make_gsl_orog' and 'veg_type_src' -# below. -# 10) Submit script: "cat $script | bsub". -# 11) All files will be placed in "out_dir". -# -#----------------------------------------------------------------------- - -source ../sorc/machine-setup.sh > /dev/null 2>&1 -module use ../modulefiles -module load build.$target.intel -module list - -#----------------------------------------------------------------------- -# Set grid specs here. -#----------------------------------------------------------------------- - -export gtype=uniform # 'uniform', 'stretch', 'nest', - # 'regional_gfdl', 'regional_esg' -export make_gsl_orog=false # 'true' if user needs 'oro' files for GSL - # orographic drag suite -export veg_type_src="modis.igbp.0.05" # veg type data. - # For viirs-based vegetation type data, set to: - # 1) "viirs.igbp.0.05" for global 5km data - # 2) "viirs.igbp.0.1" for global 10km data - # 3) "viirs.igbp.0.03" for global 3km data - # 4) "viirs.igbp.conus.0.01" for regional 1km data - # For the modis-based data, set to: - # 1) "modis.igbp.0.05" for global 5km data - # 2) "modis.igbp.0.03" for global 3km data - # 3) "modis.igbp.conus.0.01" for regional 1km data - -if [ $gtype = uniform ]; then - export res=96 - export add_lake=false # Add lake frac and depth to orography data. - export lake_cutoff=0.20 # lake frac < lake_cutoff ignored when add_lake=T -elif [ $gtype = stretch ]; then - export res=96 - export stretch_fac=1.5 # Stretching factor for the grid - export target_lon=-97.5 # Center longitude of the highest resolution tile - export target_lat=35.5 # Center latitude of the highest resolution tile -elif [ $gtype = nest ] || [ $gtype = regional_gfdl ]; then - export add_lake=false # Add lake frac and depth to orography data. - export lake_cutoff=0.20 # lake frac < lake_cutoff ignored when add_lake=T - export res=768 - export stretch_fac=1.5 # Stretching factor for the grid - export target_lon=-97.5 # Center longitude of the highest resolution tile - export target_lat=38.5 # Center latitude of the highest resolution tile - export refine_ratio=3 # The refinement ratio - export istart_nest=123 # Starting i-direction index of nest grid in parent tile supergrid - export jstart_nest=331 # Starting j-direction index of nest grid in parent tile supergrid - export iend_nest=1402 # Ending i-direction index of nest grid in parent tile supergrid - export jend_nest=1194 # Ending j-direction index of nest grid in parent tile supergrid - export halo=3 # Lateral boundary halo -elif [ $gtype = regional_esg ] ; then - export res=-999 # equivalent resolution is computed - export target_lon=-97.5 # Center longitude of grid - export target_lat=35.5 # Center latitude of grid - export idim=301 # Dimension of grid in 'i' direction - export jdim=200 # Dimension of grid in 'j' direction - export delx=0.0585 # Grid spacing (in degrees) in the 'i' direction - # on the SUPERGRID (which has twice the resolution of - # the model grid). The physical grid spacing in the 'i' - # direction is related to delx as follows: - # distance = 2*delx*(circumf_Earth/360 deg) - export dely=0.0585 # Grid spacing (in degrees) in the 'j' direction. - export halo=3 # number of row/cols for halo -fi - -#----------------------------------------------------------------------- -# Check paths. -# home_dir - location of repository. -# TEMP_DIR - working directory. -# out_dir - where files will be placed upon completion. -#----------------------------------------------------------------------- - -export home_dir=$LS_SUBCWD/.. -export TEMP_DIR=/gpfs/hps3/stmp/$LOGNAME/fv3_grid.$gtype -export out_dir=/gpfs/hps3/stmp/$LOGNAME/my_grids - -#----------------------------------------------------------------------- -# Should not need to change anything below here. -#----------------------------------------------------------------------- - -export NODES=1 -export APRUN="aprun -n 1 -N 1 -j 1 -d 1 -cc depth" -export APRUN_SFC="aprun -j 1 -n 24 -N 24" -# The orography code is optimized for six threads. -export OMP_NUM_THREADS=6 -export OMP_STACKSIZE=2048m -export KMP_AFFINITY=disabled -export machine=WCOSS_C -export NCDUMP=/gpfs/hps/usrx/local/prod/NetCDF/4.2/intel/sandybridge/bin/ncdump - -ulimit -a -ulimit -s unlimited - -#----------------------------------------------------------------------- -# Start script. -#----------------------------------------------------------------------- - -$home_dir/ush/fv3gfs_driver_grid.sh - -exit diff --git a/driver_scripts/driver_grid.dell.sh b/driver_scripts/driver_grid.dell.sh deleted file mode 100755 index c5b7c2c86..000000000 --- a/driver_scripts/driver_grid.dell.sh +++ /dev/null @@ -1,155 +0,0 @@ -#!/bin/bash - -#BSUB -oo log.grid.%J -#BSUB -eo log.grid.%J -#BSUB -q debug -#BSUB -P GFS-DEV -#BSUB -J grid_fv3 -#BSUB -W 0:30 -#BSUB -x # run not shared -#BSUB -n 24 # total tasks -#BSUB -R span[ptile=24] # tasks per node -#BSUB -R affinity[core(1):distribute=balance] - -#----------------------------------------------------------------------- -# Driver script to create a cubic-sphere based model grid on Dell. -# -# Produces the following files (netcdf, each tile in separate file): -# 1) 'mosaic' and 'grid' files containing lat/lon and other -# records that describe the model grid. -# 2) 'oro' files containing land mask, terrain and gravity -# wave drag fields. -# 3) Surface climo fields, such as soil type, vegetation -# greenness and albedo. -# -# Note: The sfc_climo_gen program only runs with an -# mpi task count that is a multiple of six. This is -# an ESMF library requirement. Large grids may require -# tasks spread across multiple nodes. The orography code -# benefits from threads. -# -# To run, do the following: -# -# 1) Set "C" resolution, "res" - Example: res=96. -# 2) Set grid type ("gtype"). Valid choices are -# "uniform" - global uniform grid -# "stretch" - global stretched grid -# "nest" - global stretched grid with nest -# "regional_gfdl" - stand-alone gfdl regional grid -# "regional_esg" - stand-alone extended Schmidt gnomonic -# (esg) regional grid -# 3) For "uniform" and "regional_gfdl" grids - to include lake -# fraction and depth, set "add_lake" to true, and the -# "lake_cutoff" value. -# 4) For "stretch" and "nest" grids, set the stretching factor - -# "stretch_fac", and center lat/lon of highest resolution -# tile - "target_lat" and "target_lon". -# 5) For "nest" grids, set the refinement ratio - "refine_ratio", -# the starting/ending i/j index location within the parent -# tile - "istart_nest", "jstart_nest", "iend_nest", "jend_nest" -# 6) For "regional_gfdl" grids, set the "halo". Default is three -# rows/columns. -# 7) For "regional_esg" grids, set center lat/lon of grid, -# - "target_lat/lon" - the i/j dimensions - "i/jdim", the -# x/y grid spacing - "delx/y", and halo. -# 8) Set working directory - TEMP_DIR - and path to the repository -# clone - home_dir. -# 9) Check settings for 'make_gsl_orog' and 'veg_type_src' -# below. -# 10) Submit script: "cat $script | bsub". -# 11) All files will be placed in "out_dir". -# -#----------------------------------------------------------------------- - -source ../sorc/machine-setup.sh > /dev/null 2>&1 -module use ../modulefiles -module load build.$target.intel -module list - -#----------------------------------------------------------------------- -# Set grid specs here. -#----------------------------------------------------------------------- - -export gtype=uniform # 'uniform', 'stretch', 'nest', - # 'regional_gfdl', 'regional_esg' -export make_gsl_orog=false # 'true' if user needs 'oro' files for GSL - # orographic drag suite -export veg_type_src="modis.igbp.0.05" # veg type data. - # For viirs-based vegetation type data, set to: - # 1) "viirs.igbp.0.05" for global 5km data - # 2) "viirs.igbp.0.1" for global 10km data - # 3) "viirs.igbp.0.03" for global 3km data - # 4) "viirs.igbp.conus.0.01" for regional 1km data - # For the modis-based data, set to: - # 1) "modis.igbp.0.05" for global 5km data - # 2) "modis.igbp.0.03" for global 3km data - # 3) "modis.igbp.conus.0.01" for regional 1km data - -if [ $gtype = uniform ]; then - export res=96 - export add_lake=false # Add lake frac and depth to orography data. - export lake_cutoff=0.20 # lake frac < lake_cutoff ignored when add_lake=T -elif [ $gtype = stretch ]; then - export res=96 - export stretch_fac=1.5 # Stretching factor for the grid - export target_lon=-97.5 # Center longitude of the highest resolution tile - export target_lat=35.5 # Center latitude of the highest resolution tile -elif [ $gtype = nest ] || [ $gtype = regional_gfdl ]; then - export add_lake=false # Add lake frac and depth to orography data. - export lake_cutoff=0.20 # lake frac < lake_cutoff ignored when add_lake=T - export res=768 - export stretch_fac=1.5 # Stretching factor for the grid - export target_lon=-97.5 # Center longitude of the highest resolution tile - export target_lat=38.5 # Center latitude of the highest resolution tile - export refine_ratio=3 # The refinement ratio - export istart_nest=123 # Starting i-direction index of nest grid in parent tile supergrid - export jstart_nest=331 # Starting j-direction index of nest grid in parent tile supergrid - export iend_nest=1402 # Ending i-direction index of nest grid in parent tile supergrid - export jend_nest=1194 # Ending j-direction index of nest grid in parent tile supergrid - export halo=3 # Lateral boundary halo -elif [ $gtype = regional_esg ] ; then - export res=-999 # equivalent resolution is computed - export target_lon=-97.5 # Center longitude of grid - export target_lat=35.5 # Center latitude of grid - export idim=301 # Dimension of grid in 'i' direction - export jdim=200 # Dimension of grid in 'j' direction - export delx=0.0585 # Grid spacing (in degrees) in the 'i' direction - # on the SUPERGRID (which has twice the resolution of - # the model grid). The physical grid spacing in the 'i' - # direction is related to delx as follows: - # distance = 2*delx*(circumf_Earth/360 deg) - export dely=0.0585 # Grid spacing (in degrees) in the 'j' direction. - export halo=3 # number of row/cols for halo -fi - -#----------------------------------------------------------------------- -# Check paths. -# home_dir - location of repository. -# TEMP_DIR - working directory. -# out_dir - where files will be placed upon completion. -#----------------------------------------------------------------------- - -export home_dir=$LS_SUBCWD/.. -export TEMP_DIR=/gpfs/dell1/stmp/$LOGNAME/fv3_grid.$gtype -export out_dir=/gpfs/dell1/stmp/$LOGNAME/my_grids - -#----------------------------------------------------------------------- -# Should not need to change anything below here. -#----------------------------------------------------------------------- - -export APRUN=time -export APRUN_SFC="mpirun -l" -export OMP_NUM_THREADS=24 # orog code worked best with 24 threads. -export OMP_STACKSIZE=2048m -export machine=WCOSS_DELL_P3 - -ulimit -a -ulimit -s unlimited - -#----------------------------------------------------------------------- -# Start script. -#----------------------------------------------------------------------- - -$home_dir/ush/fv3gfs_driver_grid.sh - -exit From 8feb989fb7ffaadf4e57684b781b2f26a3a7134a Mon Sep 17 00:00:00 2001 From: George Gayno Date: Tue, 12 Jul 2022 09:18:52 -0500 Subject: [PATCH 09/18] Delete wcoss1 build modules. Fixes #665. --- modulefiles/build.wcoss_cray.intel | 39 ------------ modulefiles/build.wcoss_dell_p3.intel.lua | 72 ----------------------- 2 files changed, 111 deletions(-) delete mode 100644 modulefiles/build.wcoss_cray.intel delete mode 100644 modulefiles/build.wcoss_dell_p3.intel.lua diff --git a/modulefiles/build.wcoss_cray.intel b/modulefiles/build.wcoss_cray.intel deleted file mode 100644 index d2c7c438e..000000000 --- a/modulefiles/build.wcoss_cray.intel +++ /dev/null @@ -1,39 +0,0 @@ -#%Module##################################################### -## Build and run module for WCOSS-Cray -############################################################# - -module load prod_util/1.1.0 -module load hpss/4.1.0.3 -module load xt-lsfhpc/9.1.3 -module load cfp-intel-sandybridge/1.1.0 -module load cmake/3.16.2 -module load PrgEnv-intel/5.2.56 -module rm intel -module load intel/18.1.163 -module load cray-mpich/7.2.0 -module load craype-haswell -module load alps/5.2.4-2.0502.9822.32.1.ari -module load cray-netcdf/4.3.3.1 -module load cray-hdf5/1.8.14 - -module use /usrx/local/nceplibs/NCEPLIBS/cmake/install/NCEPLIBS-v1.3.0/modules -module load bacio/2.4.1 -module load ip/3.3.3 -module load nemsio/2.5.2 -module load sp/2.3.3 -module load w3nco/2.4.1 -module load sfcio/1.4.1 -module load sigio/2.3.2 - -setenv ZLIB_ROOT /usrx/local/prod/zlib/1.2.7/intel/haswell -setenv PNG_ROOT /usrx/local/prod/png/1.2.49/intel/haswell -setenv Jasper_ROOT /usrx/local/prod/jasper/1.900.1/intel/haswell - -module use /usrx/local/nceplibs/NCEPLIBS/cmake/install/NCEPLIBS-v1.4.0/modules -module load g2/3.4.5 - -module load esmf/820 - -setenv NETCDF /opt/cray/netcdf/4.3.3.1/INTEL/14.0 -module rm gcc -module load gcc/6.3.0 diff --git a/modulefiles/build.wcoss_dell_p3.intel.lua b/modulefiles/build.wcoss_dell_p3.intel.lua deleted file mode 100644 index 9d7408bc8..000000000 --- a/modulefiles/build.wcoss_dell_p3.intel.lua +++ /dev/null @@ -1,72 +0,0 @@ -help([[ -Load environment to compile UFS_UTILS on WCOSS-Dell P3 -]]) - -lsf_ver=os.getenv("lsf_ver") or "10.1" -load(pathJoin("lsf", lsf_ver)) - -HPSS_ver=os.getenv("HPSS_ver") or "5.0.2.5" -load(pathJoin("HPSS", HPSS_ver)) - -cmake_ver=os.getenv("cmake_ver") or "3.16.2" -load(pathJoin("cmake", cmake_ver)) - -prepend_path("MODULEPATH", "/usrx/local/nceplibs/dev/hpc-stack/libs/hpc-stack/modulefiles/stack") - -hpc_ver=os.getenv("hpc_ver") or "1.1.0" -load(pathJoin("hpc", hpc_ver)) - -ips_ver=os.getenv("ips_ver") or "18.0.5.274" -load(pathJoin("hpc-ips", ips_ver)) - -impi_ver=os.getenv("impi_ver") or "18.0.1" -load(pathJoin("hpc-impi", impi_ver)) - -zlib_ver=os.getenv("zlib_ver") or "1.2.11" -load(pathJoin("zlib", zlib_ver)) - -png_ver=os.getenv("png_ver") or "1.6.35" -load(pathJoin("png", png_ver)) - -hdf5_ver=os.getenv("hdf5_ver") or "1.10.6" -load(pathJoin("hdf5", hdf5_ver)) - -netcdf_ver=os.getenv("netcdf_ver") or "4.7.4" -load(pathJoin("netcdf", netcdf_ver)) - -nccmp_ver=os.getenv("nccmp_ver") or "1.8.9.0" -load(pathJoin("nccmp", nccmp_ver)) - -esmf_ver=os.getenv("esmf_ver") or "8_2_0" -load(pathJoin("esmf", esmf_ver)) - -bacio_ver=os.getenv("bacio_ver") or "2.4.1" -load(pathJoin("bacio", bacio_ver)) - -g2_ver=os.getenv("g2_ver") or "3.4.5" -load(pathJoin("g2", g2_ver)) - -ip_ver=os.getenv("ip_ver") or "3.3.3" -load(pathJoin("ip", ip_ver)) - -nemsio_ver=os.getenv("nemsio_ver") or "2.5.2" -load(pathJoin("nemsio", nemsio_ver)) - -sp_ver=os.getenv("sp_ver") or "2.3.3" -load(pathJoin("sp", sp_ver)) - -w3nco_ver=os.getenv("w3nco_ver") or "2.4.1" -load(pathJoin("w3nco", w3nco_ver)) - -sfcio_ver=os.getenv("sfcio_ver") or "1.4.1" -load(pathJoin("sfcio", sfcio_ver)) - -sigio_ver=os.getenv("sigio_ver") or "2.3.2" -load(pathJoin("sigio", sigio_ver)) - -prepend_path("MODULEPATH", "/usrx/local/dev/modulefiles") - -prod_util_ver=os.getenv("prod_util_ver") or "1.1.3" -load(pathJoin("prod_util", prod_util_ver)) - -whatis("Description: UFS_UTILS build environment") From 9bb7af122635c16224f3f053d5649670cb85092f Mon Sep 17 00:00:00 2001 From: George Gayno Date: Tue, 12 Jul 2022 09:27:37 -0500 Subject: [PATCH 10/18] Remove wcoss1 references from chgres_cube consistency tests. Fixes #665. --- reg_tests/chgres_cube/README | 2 - reg_tests/chgres_cube/driver.wcoss_cray.sh | 230 ------------------ reg_tests/chgres_cube/driver.wcoss_dell_p3.sh | 216 ---------------- 3 files changed, 448 deletions(-) delete mode 100755 reg_tests/chgres_cube/driver.wcoss_cray.sh delete mode 100755 reg_tests/chgres_cube/driver.wcoss_dell_p3.sh diff --git a/reg_tests/chgres_cube/README b/reg_tests/chgres_cube/README index 424accd4b..ae25bf871 100644 --- a/reg_tests/chgres_cube/README +++ b/reg_tests/chgres_cube/README @@ -10,8 +10,6 @@ To run the consistency tests: 2) Invoke driver script for your machine. See script prolog for details. Supported machines are: - Hera (driver.hera.sh) - - WCOSS-Cray (driver.cray.sh) - - WCOSS-Dell (driver.dell.sh) - Jet (driver.jet.sh) - Orion (driver.orion.sh) diff --git a/reg_tests/chgres_cube/driver.wcoss_cray.sh b/reg_tests/chgres_cube/driver.wcoss_cray.sh deleted file mode 100755 index 96337cc03..000000000 --- a/reg_tests/chgres_cube/driver.wcoss_cray.sh +++ /dev/null @@ -1,230 +0,0 @@ -#!/bin/bash - -#----------------------------------------------------------------------------- -# -# Run the chgres_cube consistency tests on WCOSS-Cray. -# -# Set WORK_DIR to a general working location outside the UFS_UTILS directory. -# The exact working directory (OUTDIR) will be WORK_DIR/reg_tests/chgres-cube. -# Set the PROJECT_CODE and QUEUE as appropriate. -# -# Invoke the script with no arguments. A series of daily- -# chained jobs will be submitted. To check the queue, type: "bjobs". -# -# The run output will be stored in OUTDIR. Log output from the suite -# will be in LOG_FILE. Once the suite has completed, a summary is -# placed in SUM_FILE. -# -# A test fails when its output does not match the baseline files as -# determined by the "nccmp" utility. The baseline files are stored in -# HOMEreg. -# -#----------------------------------------------------------------------------- - -set -x - -source ../../sorc/machine-setup.sh > /dev/null 2>&1 -module use ../../modulefiles -module load build.$target.intel -module list - -export OUTDIR="${WORK_DIR:-/gpfs/hps3/stmp/$LOGNAME}" -export OUTDIR="${OUTDIR}/reg-tests/chgres-cube" - -QUEUE="${QUEUE:-dev}" -PROJECT_CODE="${PROJECT_CODE:-GFS-DEV}" - -#----------------------------------------------------------------------------- -# Should not have to change anything below here. HOMEufs is the root -# directory of your UFS_UTILS clone. HOMEreg contains the input data -# and baseline data for each test. -#----------------------------------------------------------------------------- - -export HOMEufs=$PWD/../.. - -export UPDATE_BASELINE="FALSE" -#export UPDATE_BASELINE="TRUE" - -if [ "$UPDATE_BASELINE" = "TRUE" ]; then - source ../get_hash.sh -fi - -export HOMEreg=/gpfs/hps3/emc/global/noscrub/George.Gayno/ufs_utils.git/reg_tests/chgres_cube - -LOG_FILE=consistency.log -SUM_FILE=summary.log -rm -f ${LOG_FILE}* $SUM_FILE - -export NCCMP=/gpfs/hps3/emc/global/noscrub/George.Gayno/util/netcdf/nccmp - -export OMP_STACKSIZE=1024M - -export KMP_AFFINITY=disabled - -#----------------------------------------------------------------------------- -# Initialize CONUS 25-KM USING GFS GRIB2 files. -#----------------------------------------------------------------------------- - -LOG_FILE1=${LOG_FILE}01 -export OMP_NUM_THREADS=1 -export APRUN="aprun -j 1 -n 6 -N 6 -d ${OMP_NUM_THREADS} -cc depth" -bsub -e $LOG_FILE1 -o $LOG_FILE1 -q $QUEUE -P $PROJECT_CODE -J chgres01 -M 1000 -W 0:05 -extsched 'CRAYLINUX[]' \ - "export NODES=1; $PWD/25km.conus.gfs.grib2.sh" - -#----------------------------------------------------------------------------- -# Initialize CONUS 3-KM USING HRRR GRIB2 file WITH GFS PHYSICS. -#----------------------------------------------------------------------------- - -LOG_FILE2=${LOG_FILE}02 -export OMP_NUM_THREADS=1 -export APRUN="aprun -j 1 -n 6 -N 6 -d ${OMP_NUM_THREADS} -cc depth" -bsub -e $LOG_FILE2 -o $LOG_FILE2 -q $QUEUE -P $PROJECT_CODE -J chgres02 -M 1000 -W 0:07 -extsched 'CRAYLINUX[]' \ - "export NODES=1; $PWD/3km.conus.hrrr.gfssdf.grib2.sh" - -#----------------------------------------------------------------------------- -# Initialize CONUS 3-KM USING HRRR GRIB2 file WITH GSD PHYSICS AND SFC VARS FROM FILE. -#----------------------------------------------------------------------------- - -LOG_FILE3=${LOG_FILE}03 -export OMP_NUM_THREADS=1 -export APRUN="aprun -j 1 -n 12 -N 6 -d ${OMP_NUM_THREADS} -cc depth" -bsub -e $LOG_FILE3 -o $LOG_FILE3 -q $QUEUE -P $PROJECT_CODE -J chgres03 -M 1000 -W 0:07 -extsched 'CRAYLINUX[]' \ - "export NODES=2; $PWD/3km.conus.hrrr.newsfc.grib2.sh" - -#----------------------------------------------------------------------------- -# Initialize CONUS 13-KM USING NAM GRIB2 file WITH GFS PHYSICS . -#----------------------------------------------------------------------------- - -LOG_FILE4=${LOG_FILE}04 -export OMP_NUM_THREADS=1 -export APRUN="aprun -j 1 -n 6 -N 6 -d ${OMP_NUM_THREADS} -cc depth" -bsub -e $LOG_FILE4 -o $LOG_FILE4 -q $QUEUE -P $PROJECT_CODE -J chgres04 -M 1000 -W 0:07 -extsched 'CRAYLINUX[]' \ - "export NODES=1; $PWD/13km.conus.nam.grib2.sh" - -#----------------------------------------------------------------------------- -# Initialize CONUS 13-KM USING RAP GRIB2 file WITH GSD PHYSICS . -#----------------------------------------------------------------------------- - -LOG_FILE5=${LOG_FILE}05 -export OMP_NUM_THREADS=1 -export APRUN="aprun -j 1 -n 6 -N 6 -d ${OMP_NUM_THREADS} -cc depth" -bsub -e $LOG_FILE5 -o $LOG_FILE5 -q $QUEUE -P $PROJECT_CODE -J chgres05 -M 1000 -W 0:07 -extsched 'CRAYLINUX[]' \ - "export NODES=1; $PWD/13km.conus.rap.grib2.sh" - -#----------------------------------------------------------------------------- -# Initialize CONUS 13-KM NA USING NCEI GFS GRIB2 file WITH GFS PHYSICS . -#----------------------------------------------------------------------------- - -LOG_FILE6=${LOG_FILE}06 -export OMP_NUM_THREADS=1 -export APRUN="aprun -j 1 -n 6 -N 6 -d ${OMP_NUM_THREADS} -cc depth" -bsub -e $LOG_FILE6 -o $LOG_FILE6 -q $QUEUE -P $PROJECT_CODE -J chgres06 -M 1000 -W 0:07 -extsched 'CRAYLINUX[]' \ - "export NODES=1; $PWD/13km.na.gfs.ncei.grib2.sh" - -#----------------------------------------------------------------------------- -# Initialize C96 using FV3 warm restart files. -#----------------------------------------------------------------------------- - -LOG_FILE7=${LOG_FILE}07 -export OMP_NUM_THREADS=1 -export APRUN="aprun -j 1 -n 6 -N 6 -d ${OMP_NUM_THREADS} -cc depth" -bsub -e $LOG_FILE7 -o $LOG_FILE7 -q $QUEUE -P $PROJECT_CODE -J chgres07 -M 1000 -W 0:15 -extsched 'CRAYLINUX[]' \ - "export NODES=1; $PWD/c96.fv3.restart.sh" - -#----------------------------------------------------------------------------- -# Initialize C192 using FV3 tiled history files. -#----------------------------------------------------------------------------- - -LOG_FILE8=${LOG_FILE}08 -export OMP_NUM_THREADS=1 -export APRUN="aprun -j 1 -n 6 -N 6 -d ${OMP_NUM_THREADS} -cc depth" -bsub -e $LOG_FILE8 -o $LOG_FILE8 -q $QUEUE -P $PROJECT_CODE -J chgres08 -M 1000 -W 0:15 -extsched 'CRAYLINUX[]' \ - "export NODES=1; $PWD/c192.fv3.history.sh" - -#----------------------------------------------------------------------------- -# Initialize C96 using FV3 gaussian nemsio files. -#----------------------------------------------------------------------------- - -LOG_FILE9=${LOG_FILE}09 -export OMP_NUM_THREADS=1 -export APRUN="aprun -j 1 -n 6 -N 6 -d ${OMP_NUM_THREADS} -cc depth" -bsub -e $LOG_FILE9 -o $LOG_FILE9 -q $QUEUE -P $PROJECT_CODE -J chgres09 -M 1000 -W 0:15 -extsched 'CRAYLINUX[]' \ - "export NODES=1; $PWD/c96.fv3.nemsio.sh" - -#----------------------------------------------------------------------------- -# Initialize C96 using spectral GFS sigio/sfcio files. -#----------------------------------------------------------------------------- - -LOG_FILE10=${LOG_FILE}10 -export OMP_NUM_THREADS=4 -export APRUN="aprun -j 1 -n 6 -N 6 -d ${OMP_NUM_THREADS} -cc depth" -bsub -e $LOG_FILE10 -o $LOG_FILE10 -q $QUEUE -P $PROJECT_CODE -J chgres10 -M 1000 -W 0:15 -extsched 'CRAYLINUX[]' \ - "export NODES=1; $PWD/c96.gfs.sigio.sh" - -#----------------------------------------------------------------------------- -# Initialize C96 using spectral GFS gaussian nemsio files. -#----------------------------------------------------------------------------- - -LOG_FILE11=${LOG_FILE}11 -export OMP_NUM_THREADS=1 -export APRUN="aprun -j 1 -n 6 -N 6 -d ${OMP_NUM_THREADS} -cc depth" -bsub -e $LOG_FILE11 -o $LOG_FILE11 -q $QUEUE -P $PROJECT_CODE -J chgres11 -M 1000 -W 0:15 -extsched 'CRAYLINUX[]' \ - "export NODES=1; $PWD/c96.gfs.nemsio.sh" - -#----------------------------------------------------------------------------- -# Initialize regional C96 using FV3 gaussian nemsio files. -#----------------------------------------------------------------------------- - -LOG_FILE12=${LOG_FILE}12 -export OMP_NUM_THREADS=1 -export APRUN="aprun -j 1 -n 6 -N 6 -d ${OMP_NUM_THREADS} -cc depth" -bsub -e $LOG_FILE12 -o $LOG_FILE12 -q $QUEUE -P $PROJECT_CODE -J chgres12 -M 1000 -W 0:15 -extsched 'CRAYLINUX[]' \ - "export NODES=1; $PWD/c96.regional.sh" - -#----------------------------------------------------------------------------- -# Initialize C96 using FV3 gaussian netcdf files. -#----------------------------------------------------------------------------- - -LOG_FILE13=${LOG_FILE}13 -export OMP_NUM_THREADS=1 -export APRUN="aprun -j 1 -n 12 -N 6 -d ${OMP_NUM_THREADS} -cc depth" -bsub -e $LOG_FILE13 -o $LOG_FILE13 -q $QUEUE -P $PROJECT_CODE -J chgres13 -M 1000 -W 0:15 -extsched 'CRAYLINUX[]' \ - "export NODES=2; $PWD/c96.fv3.netcdf.sh" - -#----------------------------------------------------------------------------- -# Initialize global C192 using GFS GRIB2 data. -#----------------------------------------------------------------------------- - -LOG_FILE14=${LOG_FILE}14 -export OMP_NUM_THREADS=1 -export APRUN="aprun -j 1 -n 6 -N 6 -d ${OMP_NUM_THREADS} -cc depth" -bsub -e $LOG_FILE14 -o $LOG_FILE14 -q $QUEUE -P $PROJECT_CODE -J chgres14 -M 1000 -W 0:05 -extsched 'CRAYLINUX[]' \ - "export NODES=1; $PWD/c192.gfs.grib2.sh" - -#----------------------------------------------------------------------------- -# Initialize C96 WAM IC using FV3 gaussian netcdf files. -#----------------------------------------------------------------------------- - -LOG_FILE15=${LOG_FILE}15 -export OMP_NUM_THREADS=1 -export APRUN="aprun -j 1 -n 12 -N 6 -d ${OMP_NUM_THREADS} -cc depth" -bsub -e $LOG_FILE15 -o $LOG_FILE15 -q $QUEUE -P $PROJECT_CODE -J chgres15 -M 1000 -W 0:15 -extsched 'CRAYLINUX[]' \ - "export NODES=2; $PWD/c96.fv3.netcdf2wam.sh" - -#----------------------------------------------------------------------------- -# Initialize CONUS 25-KM USING GFS PGRIB2+BGRIB2 files. -#----------------------------------------------------------------------------- - -LOG_FILE16=${LOG_FILE}16 -export OMP_NUM_THREADS=1 -export APRUN="aprun -j 1 -n 6 -N 6 -d ${OMP_NUM_THREADS} -cc depth" -bsub -e $LOG_FILE16 -o $LOG_FILE16 -q $QUEUE -P $PROJECT_CODE -J chgres16 -M 1000 -W 0:05 -extsched 'CRAYLINUX[]' \ - "export NODES=1; $PWD/25km.conus.gfs.pbgrib2.sh" - -#----------------------------------------------------------------------------- -# Create summary log. -#----------------------------------------------------------------------------- - -bsub -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J summary -R "rusage[mem=100]" -W 0:01 -w 'ended(chgres*)' "grep -a '<<<' "*.log*" >> $SUM_FILE" - -exit diff --git a/reg_tests/chgres_cube/driver.wcoss_dell_p3.sh b/reg_tests/chgres_cube/driver.wcoss_dell_p3.sh deleted file mode 100755 index 6f4e8e821..000000000 --- a/reg_tests/chgres_cube/driver.wcoss_dell_p3.sh +++ /dev/null @@ -1,216 +0,0 @@ -#!/bin/bash - -#----------------------------------------------------------------------------- -# -# Run the chgres_cube consistency tests on WCOSS-Dell. -# -# Set WORK_DIR to a general working location outside the UFS_UTILS directory. -# The exact working directory (OUTDIR) will be WORK_DIR/reg_tests/chgres-cube. -# Set the PROJECT_CODE and QUEUE as appropriate. -# -# Invoke the script as follows with no arguments. A series of daily- -# chained jobs will be submitted. To check the queue, type: "bjobs". -# -# The run output will be stored in OUTDIR. Log output from the suite -# will be in LOG_FILE. Once the suite has completed, a summary is -# placed in SUM_FILE. -# -# A test fails when its output does not match the baseline files as -# determined by the "nccmp" utility. The baseline files are stored in -# HOMEreg. -# -#----------------------------------------------------------------------------- - -set -x - -source ../../sorc/machine-setup.sh > /dev/null 2>&1 -module use ../../modulefiles -module load build.$target.intel -module list - -export OUTDIR="${WORK_DIR:-/gpfs/dell1/stmp/$LOGNAME}" -export OUTDIR="${OUTDIR}/reg-tests/chgres-cube" - -QUEUE="${QUEUE:-dev}" -PROJECT_CODE="${PROJECT_CODE:-GFS-DEV}" - -#----------------------------------------------------------------------------- -# Should not have to change anything below here. HOMEufs is the root -# directory of your UFS_UTILS clone. HOMEreg contains the input data -# and baseline data for each test. -#----------------------------------------------------------------------------- - -export UPDATE_BASELINE="FALSE" -#export UPDATE_BASELINE="TRUE" - -if [ "$UPDATE_BASELINE" = "TRUE" ]; then - source ../get_hash.sh -fi - -export HOMEufs=$PWD/../.. - -export HOMEreg=/gpfs/dell2/emc/modeling/noscrub/George.Gayno/ufs_utils.git/reg_tests/chgres_cube - -SUM_FILE=summary.log - -rm -f $SUM_FILE consistency.log?? - -export OMP_STACKSIZE=1024M - -export APRUN=mpirun - -export NCCMP=${NCCMP:-nccmp} - -#----------------------------------------------------------------------------- -# Initialize CONUS 25-KM USING GFS GRIB2 files. -#----------------------------------------------------------------------------- - -LOG_FILE=consistency.log01 -export OMP_NUM_THREADS=1 -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J chgres01 -W 0:05 -x -n 6 \ - -R "span[ptile=6]" -R "affinity[core(${OMP_NUM_THREADS}):distribute=balance]" "$PWD/25km.conus.gfs.grib2.sh" - -#----------------------------------------------------------------------------- -# Initialize CONUS 3-KM USING HRRR GRIB2 file WITH GFS PHYSICS. -#----------------------------------------------------------------------------- - -LOG_FILE=consistency.log02 -export OMP_NUM_THREADS=1 -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J chgres02 -W 0:10 -x -n 6 \ - -R "span[ptile=6]" -R "affinity[core(${OMP_NUM_THREADS}):distribute=balance]" "$PWD/3km.conus.hrrr.gfssdf.grib2.sh" - -#----------------------------------------------------------------------------- -# Initialize CONUS 3-KM USING HRRR GRIB2 file WITH GSD PHYSICS AND SFC VARS FROM FILE. -#----------------------------------------------------------------------------- - -LOG_FILE=consistency.log03 -export OMP_NUM_THREADS=1 -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J chgres03 -W 0:10 -x -n 12 \ - -R "span[ptile=6]" -R "affinity[core(${OMP_NUM_THREADS}):distribute=balance]" "$PWD/3km.conus.hrrr.newsfc.grib2.sh" - -#----------------------------------------------------------------------------- -# Initialize CONUS 13-KM USING NAM GRIB2 file WITH GFS PHYSICS . -#----------------------------------------------------------------------------- - -LOG_FILE=consistency.log04 -export OMP_NUM_THREADS=1 -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J chgres04 -W 0:05 -x -n 6 \ - -R "span[ptile=6]" -R "affinity[core(${OMP_NUM_THREADS}):distribute=balance]" "$PWD/13km.conus.nam.grib2.sh" - -#----------------------------------------------------------------------------- -# Initialize CONUS 13-KM USING RAP GRIB2 file WITH GSD PHYSICS . -#----------------------------------------------------------------------------- - -LOG_FILE=consistency.log05 -export OMP_NUM_THREADS=1 -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J chgres05 -W 0:10 -x -n 6 \ - -R "span[ptile=6]" -R "affinity[core(${OMP_NUM_THREADS}):distribute=balance]" "$PWD/13km.conus.rap.grib2.sh" - -#----------------------------------------------------------------------------- -# Initialize CONUS 13-KM NA USING NCEI GFS GRIB2 file WITH GFS PHYSICS . -#----------------------------------------------------------------------------- - -LOG_FILE=consistency.log06 -export OMP_NUM_THREADS=1 -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J chgres06 -W 0:05 -x -n 6 \ - -R "span[ptile=6]" -R "affinity[core(${OMP_NUM_THREADS}):distribute=balance]" "$PWD/13km.na.gfs.ncei.grib2.sh" - -#----------------------------------------------------------------------------- -# Initialize C96 using FV3 warm restart files. -#----------------------------------------------------------------------------- - -LOG_FILE=consistency.log07 -export OMP_NUM_THREADS=1 -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J chgres07 -W 0:15 -x -n 6 \ - -R "span[ptile=6]" -R "affinity[core(${OMP_NUM_THREADS}):distribute=balance]" "$PWD/c96.fv3.restart.sh" - -#----------------------------------------------------------------------------- -# Initialize C192 using FV3 tiled history files. -#----------------------------------------------------------------------------- - -LOG_FILE=consistency.log08 -export OMP_NUM_THREADS=1 -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J chgres08 -W 0:15 -x -n 6 \ - -R "span[ptile=6]" -R "affinity[core(${OMP_NUM_THREADS}):distribute=balance]" "$PWD/c192.fv3.history.sh" - -#----------------------------------------------------------------------------- -# Initialize C96 using FV3 gaussian nemsio files. -#----------------------------------------------------------------------------- - -LOG_FILE=consistency.log09 -export OMP_NUM_THREADS=1 -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J chgres09 -W 0:15 -x -n 6 \ - -R "span[ptile=6]" -R "affinity[core(${OMP_NUM_THREADS}):distribute=balance]" "$PWD/c96.fv3.nemsio.sh" - -#----------------------------------------------------------------------------- -# Initialize C96 using spectral GFS sigio/sfcio files. -#----------------------------------------------------------------------------- - -LOG_FILE=consistency.log10 -export OMP_NUM_THREADS=4 -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J chgres10 -W 0:15 -x -n 6 \ - -R "span[ptile=6]" -R "affinity[core(${OMP_NUM_THREADS}):distribute=balance]" "$PWD/c96.gfs.sigio.sh" - -#----------------------------------------------------------------------------- -# Initialize C96 using spectral GFS gaussian nemsio files. -#----------------------------------------------------------------------------- - -LOG_FILE=consistency.log11 -export OMP_NUM_THREADS=1 -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J chgres11 -W 0:15 -x -n 6 \ - -R "span[ptile=6]" -R "affinity[core(${OMP_NUM_THREADS}):distribute=balance]" "$PWD/c96.gfs.nemsio.sh" - -#----------------------------------------------------------------------------- -# Initialize regional C96 using FV3 gaussian nemsio files. -#----------------------------------------------------------------------------- - -LOG_FILE=consistency.log12 -export OMP_NUM_THREADS=1 -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J chgres12 -W 0:15 -x -n 6 \ - -R "span[ptile=6]" -R "affinity[core(${OMP_NUM_THREADS}):distribute=balance]" "$PWD/c96.regional.sh" - -#----------------------------------------------------------------------------- -# Initialize C96 using FV3 gaussian netcdf files. -#----------------------------------------------------------------------------- - -LOG_FILE=consistency.log13 -export OMP_NUM_THREADS=1 -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J chgres13 -W 0:15 -x -n 12 \ - -R "span[ptile=6]" -R "affinity[core(${OMP_NUM_THREADS}):distribute=balance]" "$PWD/c96.fv3.netcdf.sh" - -#----------------------------------------------------------------------------- -# Initialize global C192 using GFS GRIB2 file. -#----------------------------------------------------------------------------- - -LOG_FILE=consistency.log14 -export OMP_NUM_THREADS=1 -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J chgres14 -W 0:05 -x -n 6 \ - -R "span[ptile=6]" -R "affinity[core(${OMP_NUM_THREADS}):distribute=balance]" "$PWD/c192.gfs.grib2.sh" - -#----------------------------------------------------------------------------- -# Initialize C96 WAM IC using FV3 gaussian netcdf files. -#----------------------------------------------------------------------------- - -LOG_FILE=consistency.log15 -export OMP_NUM_THREADS=1 -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J chgres15 -W 0:15 -x -n 12 \ - -R "span[ptile=6]" -R "affinity[core(${OMP_NUM_THREADS}):distribute=balance]" "$PWD/c96.fv3.netcdf2wam.sh" - -#----------------------------------------------------------------------------- -# Initialize CONUS 25-KM USING GFS PGRIB2+BGRIB2 files. -#----------------------------------------------------------------------------- - -LOG_FILE=consistency.log16 -export OMP_NUM_THREADS=1 -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J chgres16 -W 0:05 -x -n 6 \ - -R "span[ptile=6]" -R "affinity[core(${OMP_NUM_THREADS}):distribute=balance]" "$PWD/25km.conus.gfs.pbgrib2.sh" - -#----------------------------------------------------------------------------- -# Create summary log. -#----------------------------------------------------------------------------- - -LOG_FILE=consistency.log -bsub -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J summary -R "affinity[core(1)]" -R "rusage[mem=100]" -W 0:01 \ - -w 'ended(chgres*)' "grep -a '<<<' "*.log*" >> $SUM_FILE" - -exit From 59a1b7148bc1f36ac13a95beb3e2602e55f1f238 Mon Sep 17 00:00:00 2001 From: George Gayno Date: Tue, 12 Jul 2022 09:34:09 -0500 Subject: [PATCH 11/18] Remove global_cycle consistency scripts for wcoss1. Fixes #665. --- reg_tests/global_cycle/driver.wcoss_cray.sh | 82 ------------------- .../global_cycle/driver.wcoss_dell_p3.sh | 78 ------------------ 2 files changed, 160 deletions(-) delete mode 100755 reg_tests/global_cycle/driver.wcoss_cray.sh delete mode 100755 reg_tests/global_cycle/driver.wcoss_dell_p3.sh diff --git a/reg_tests/global_cycle/driver.wcoss_cray.sh b/reg_tests/global_cycle/driver.wcoss_cray.sh deleted file mode 100755 index 4c98a52d3..000000000 --- a/reg_tests/global_cycle/driver.wcoss_cray.sh +++ /dev/null @@ -1,82 +0,0 @@ -#!/bin/bash - -#----------------------------------------------------------------------------- -# -# Run global_cycle consistency tests on WCOSS-Cray. -# -# Set $WORK_DIR to your working directory. Set the project code nd -# and queue as appropriate. -# -# Invoke the script as follows: ./$script -# -# Log output is placed in consistency.log??. A summary is -# placed in summary.log -# -# A test fails when its output does not match the baseline files -# as determined by the 'nccmp' utility. This baseline files are -# stored in HOMEreg. -# -#----------------------------------------------------------------------------- - -source ../../sorc/machine-setup.sh > /dev/null 2>&1 -module use ../../modulefiles -module load build.$target.intel -module list - -WORK_DIR="${WORK_DIR:-/gpfs/hps3/stmp/$LOGNAME}" - -PROJECT_CODE="${PROJECT_CODE:-GDAS-T2O}" -QUEUE="${QUEUE:-dev}" - -#----------------------------------------------------------------------------- -# Should not have to change anything below. -#----------------------------------------------------------------------------- - -export UPDATE_BASELINE="FALSE" -#export UPDATE_BASELINE="TRUE" - -if [ "$UPDATE_BASELINE" = "TRUE" ]; then - source ../get_hash.sh -fi - -DATA_DIR="${WORK_DIR}/reg-tests/global-cycle" - -export HOMEreg=/gpfs/hps3/emc/global/noscrub/George.Gayno/ufs_utils.git/reg_tests/global_cycle - -export OMP_NUM_THREADS_CY=4 - -export KMP_AFFINITY=disabled - -export APRUNCY="aprun -n 6 -N 6 -j 1 -d $OMP_NUM_THREADS_CY -cc depth" - -export NWPROD=$PWD/../.. - -export COMOUT=$DATA - -export NCCMP=/gpfs/hps3/emc/global/noscrub/George.Gayno/util/netcdf/nccmp - -reg_dir=$PWD - -LOG_FILE=consistency.log01 -export DATA="${DATA_DIR}/test1" -export COMOUT=$DATA -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J c768.fv3gfs -M 2400 -W 0:05 \ - -extsched 'CRAYLINUX[]' "export NODES=1; $PWD/C768.fv3gfs.sh" - -LOG_FILE=consistency.log02 -export DATA="${DATA_DIR}/test2" -export COMOUT=$DATA -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J c768.lndincsoil -M 2400 -W 0:05 \ - -extsched 'CRAYLINUX[]' "export NODES=1; $PWD/C768.lndincsoil.sh" - -LOG_FILE=consistency.log03 -export DATA="${DATA_DIR}/test3" -export COMOUT=$DATA -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J c768.lndincsnow -M 2400 -W 0:05 \ - -extsched 'CRAYLINUX[]' "export NODES=1; $PWD/C768.lndincsnow.sh" - -LOG_FILE=consistency.log -bsub -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J summary -R "rusage[mem=100]" -W 0:01 \ - -w 'ended(c768.*)' "grep -a '<<<' "${LOG_FILE}*" >> summary.log" - -exit diff --git a/reg_tests/global_cycle/driver.wcoss_dell_p3.sh b/reg_tests/global_cycle/driver.wcoss_dell_p3.sh deleted file mode 100755 index 9b1c0cc10..000000000 --- a/reg_tests/global_cycle/driver.wcoss_dell_p3.sh +++ /dev/null @@ -1,78 +0,0 @@ -#!/bin/bash - -#----------------------------------------------------------------------------- -# -# Run global_cycle consistency tests on WCOSS-Dell. -# -# Set $WORK_DIR to your working directory. Set the project code -# and queue as appropriate. -# -# Invoke the script from the command line as follows: ./$script -# -# Log output is placed in consistency.log??. A summary is -# placed in summary.log. -# -# A test fails when its output does not match the baseline files -# as determined by the 'nccmp' utility. This baseline files are -# stored in HOMEreg. -# -#----------------------------------------------------------------------------- - -set -x - -source ../../sorc/machine-setup.sh > /dev/null 2>&1 -module use ../../modulefiles -module load build.$target.intel -module list - -WORK_DIR="${WORK_DIR:-/gpfs/dell1/stmp/$LOGNAME}" - -PROJECT_CODE="${PROJECT_CODE:-GFS-DEV}" -QUEUE="${QUEUE:-dev}" - -#----------------------------------------------------------------------------- -# Should not have to change anything below. -#----------------------------------------------------------------------------- - -export UPDATE_BASELINE="FALSE" -#export UPDATE_BASELINE="TRUE" - -if [ "$UPDATE_BASELINE" = "TRUE" ]; then - source ../get_hash.sh -fi - -DATA_DIR="${WORK_DIR}/reg-tests/global-cycle" - -export HOMEreg=/gpfs/dell2/emc/modeling/noscrub/George.Gayno/ufs_utils.git/reg_tests/global_cycle - -export OMP_NUM_THREADS_CY=2 - -export APRUNCY="mpirun -l" - -export NWPROD=$PWD/../.. - -reg_dir=$PWD - -LOG_FILE=consistency.log01 -export DATA="${DATA_DIR}/test1" -export COMOUT=$DATA -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J c768.fv3gfs -W 0:05 -x -n 6 \ - -M 2400 -R "span[ptile=6]" -R "affinity[core(1)]" "$PWD/C768.fv3gfs.sh" - -LOG_FILE=consistency.log02 -export DATA="${DATA_DIR}/test2" -export COMOUT=$DATA -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J c768.lndincsoil -W 0:05 -x -n 6 \ - -M 2400 -R "span[ptile=6]" -R "affinity[core(1)]" "$PWD/C768.lndincsoil.sh" - -LOG_FILE=consistency.log03 -export DATA="${DATA_DIR}/test3" -export COMOUT=$DATA -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J c768.lndincsnow -W 0:05 -x -n 6 \ - -M 2400 -R "span[ptile=6]" -R "affinity[core(1)]" "$PWD/C768.lndincsnow.sh" - -LOG_FILE=consistency.log -bsub -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J summary -R "affinity[core(1)]" -R "rusage[mem=100]" -W 0:01 \ - -w 'ended(c768.*)' "grep -a '<<<' "${LOG_FILE}*" >> summary.log" - -exit From a07fdf117a121f429bd099357700f02ef0ce2a77 Mon Sep 17 00:00:00 2001 From: George Gayno Date: Tue, 12 Jul 2022 09:43:20 -0500 Subject: [PATCH 12/18] Remove wcoss1 grid_gen consistency test scripts. Fixes #665. --- reg_tests/grid_gen/driver.wcoss_cray.sh | 104 --------------------- reg_tests/grid_gen/driver.wcoss_dell_p3.sh | 100 -------------------- 2 files changed, 204 deletions(-) delete mode 100755 reg_tests/grid_gen/driver.wcoss_cray.sh delete mode 100755 reg_tests/grid_gen/driver.wcoss_dell_p3.sh diff --git a/reg_tests/grid_gen/driver.wcoss_cray.sh b/reg_tests/grid_gen/driver.wcoss_cray.sh deleted file mode 100755 index de85aade2..000000000 --- a/reg_tests/grid_gen/driver.wcoss_cray.sh +++ /dev/null @@ -1,104 +0,0 @@ -#!/bin/bash - -#----------------------------------------------------------------------------- -# -# Run grid generation consistency tests on WCOSS-Cray. -# -# Set WORK_DIR to your working directory. Set the PROJECT_CODE and QUEUE -# as appropriate. -# -# Invoke the script with no arguments. A series of daily- -# chained jobs will be submitted. To check the queue, type: "bjobs". -# -# Log output from the suite will be in LOG_FILE. Once the suite -# has completed, a summary is placed in SUM_FILE. -# -# A test fails when its output does not match the baseline files as -# determined by the "nccmp" utility. The baseline files are stored in -# HOMEreg -# -#----------------------------------------------------------------------------- - -source ../../sorc/machine-setup.sh > /dev/null 2>&1 -module use ../../modulefiles -module load build.$target.intel -module list - -set -x - -QUEUE="${QUEUE:-debug}" -PROJECT_CODE="${PROJECT_CODE:-GFS-DEV}" -export WORK_DIR="${WORK_DIR:-/gpfs/hps3/stmp/$LOGNAME}" -export WORK_DIR="${WORK_DIR}/reg-tests/grid-gen" - -#----------------------------------------------------------------------------- -# Should not have to change anything below here. -#----------------------------------------------------------------------------- - -export UPDATE_BASELINE="FALSE" -#export UPDATE_BASELINE="TRUE" - -if [ "$UPDATE_BASELINE" = "TRUE" ]; then - source ../get_hash.sh -fi - -export home_dir=$PWD/../.. -LOG_FILE=consistency.log -SUM_FILE=summary.log -export APRUN="aprun -n 1 -N 1 -j 1 -d 1 -cc depth" -export APRUN_SFC="aprun -j 1 -n 24 -N 24" -export OMP_STACKSIZE=2048m -export OMP_NUM_THREADS=6 -export machine=WCOSS_C -export KMP_AFFINITY=disabled -export NCCMP=/gpfs/hps3/emc/global/noscrub/George.Gayno/util/netcdf/nccmp -export HOMEreg=/gpfs/hps3/emc/global/noscrub/George.Gayno/ufs_utils.git/reg_tests/grid_gen/baseline_data -export NCDUMP=/gpfs/hps/usrx/local/prod/NetCDF/4.2/intel/sandybridge/bin/ncdump - -rm -fr $WORK_DIR - -ulimit -a -ulimit -s unlimited - -#----------------------------------------------------------------------------- -# C96 uniform grid -#----------------------------------------------------------------------------- - -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J c96.uniform -W 0:15 -M 2400 \ - -extsched 'CRAYLINUX[]' "export NODES=1; $PWD/c96.uniform.sh" - -#----------------------------------------------------------------------------- -# C96 uniform grid using viirs vegetation type data. -#----------------------------------------------------------------------------- - -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J c96.viirs.vegt -W 0:15 -M 2400 \ - -w 'ended(c96.uniform)' -extsched 'CRAYLINUX[]' "export NODES=1; $PWD/c96.viirs.vegt.sh" - -#----------------------------------------------------------------------------- -# gfdl regional grid -#----------------------------------------------------------------------------- - -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J gfdl.regional -W 0:10 -M 2400 \ - -w 'ended(c96.viirs.vegt)' -extsched 'CRAYLINUX[]' "export NODES=1; $PWD/gfdl.regional.sh" - -#----------------------------------------------------------------------------- -# ESG regional grid -#----------------------------------------------------------------------------- - -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J esg.regional -W 0:10 -M 2400 \ - -w 'ended(gfdl.regional)' -extsched 'CRAYLINUX[]' "export NODES=1; $PWD/esg.regional.sh" - -#----------------------------------------------------------------------------- -# Regional GSL gravity wave drag. -#----------------------------------------------------------------------------- - -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J reg.gsl.gwd -W 0:08 -M 2400 \ - -w 'ended(esg.regional)' -extsched 'CRAYLINUX[]' "export NODES=1; $PWD/regional.gsl.gwd.sh" - -#----------------------------------------------------------------------------- -# Create summary log. -#----------------------------------------------------------------------------- - -bsub -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J summary -R "rusage[mem=100]" -W 0:01 -w 'ended(reg.gsl.gwd)' "grep -a '<<<' $LOG_FILE >> $SUM_FILE" - -exit diff --git a/reg_tests/grid_gen/driver.wcoss_dell_p3.sh b/reg_tests/grid_gen/driver.wcoss_dell_p3.sh deleted file mode 100755 index 9d6bb3eba..000000000 --- a/reg_tests/grid_gen/driver.wcoss_dell_p3.sh +++ /dev/null @@ -1,100 +0,0 @@ -#!/bin/bash - -#----------------------------------------------------------------------------- -# -# Run grid generation consistency tests on WCOSS-Dell. -# -# Set WORK_DIR to your working directory. Set the PROJECT_CODE and QUEUE -# as appropriate. -# -# Invoke the script with no arguments. A series of daily- -# chained jobs will be submitted. To check the queue, type: "bjobs". -# -# Log output from the suite will be in LOG_FILE. Once the suite -# has completed, a summary is placed in SUM_FILE. -# -# A test fails when its output does not match the baseline files as -# determined by the "nccmp" utility. The baseline files are stored in -# HOMEreg -# -#----------------------------------------------------------------------------- - -source ../../sorc/machine-setup.sh > /dev/null 2>&1 -module use ../../modulefiles -module load build.$target.intel -module list - -set -x - -QUEUE="${QUEUE:-debug}" -PROJECT_CODE="${PROJECT_CODE:-GFS-DEV}" -export WORK_DIR="${WORK_DIR:-/gpfs/dell1/stmp/$LOGNAME}" -export WORK_DIR="${WORK_DIR}/reg-tests/grid-gen" - -#----------------------------------------------------------------------------- -# Should not have to change anything below here. -#----------------------------------------------------------------------------- - -export UPDATE_BASELINE="FALSE" -#export UPDATE_BASELINE="TRUE" - -if [ "$UPDATE_BASELINE" = "TRUE" ]; then - source ../get_hash.sh -fi - -LOG_FILE=consistency.log -SUM_FILE=summary.log -export home_dir=$PWD/../.. -export APRUN=time -export APRUN_SFC="mpirun -l" -export OMP_STACKSIZE=2048m -export machine=WCOSS_DELL_P3 -export HOMEreg=/gpfs/dell2/emc/modeling/noscrub/George.Gayno/ufs_utils.git/reg_tests/grid_gen/baseline_data -export OMP_NUM_THREADS=24 - -rm -fr $WORK_DIR - -ulimit -a -ulimit -s unlimited - -#----------------------------------------------------------------------------- -# C96 uniform grid -#----------------------------------------------------------------------------- - -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J c96.uniform -W 0:15 -x -n 24 \ - -R "span[ptile=24]" -R "affinity[core(1):distribute=balance]" "$PWD/c96.uniform.sh" - -#----------------------------------------------------------------------------- -# C96 uniform grid using viirs vegetation type data. -#----------------------------------------------------------------------------- - -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J c96.viirs.vegt -W 0:15 -x -n 24 -w 'ended(c96.uniform)' \ - -R "span[ptile=24]" -R "affinity[core(1):distribute=balance]" "$PWD/c96.viirs.vegt.sh" - -#----------------------------------------------------------------------------- -# GFDL regional grid -#----------------------------------------------------------------------------- - -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J gfdl.regional -W 0:10 -x -n 24 -w 'ended(c96.viirs.vegt)' \ - -R "span[ptile=24]" -R "affinity[core(1):distribute=balance]" "$PWD/gfdl.regional.sh" - -#----------------------------------------------------------------------------- -# ESG regional grid -#----------------------------------------------------------------------------- - -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J esg.regional -W 0:10 -x -n 24 -w 'ended(gfdl.regional)' \ - -R "span[ptile=24]" -R "affinity[core(1):distribute=balance]" "$PWD/esg.regional.sh" - -#----------------------------------------------------------------------------- -# Regional GSL gravity wave drag. -#----------------------------------------------------------------------------- - -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J reg.gsl.gwd -W 0:08 -x -n 24 -w 'ended(esg.regional)' \ - -R "span[ptile=24]" -R "affinity[core(1):distribute=balance]" "$PWD/regional.gsl.gwd.sh" - -#----------------------------------------------------------------------------- -# Create summary log. -#----------------------------------------------------------------------------- - -bsub -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J summary -R "affinity[core(1)]" -R "rusage[mem=100]" -W 0:01 \ - -w 'ended(reg.gsl.gwd)' "grep -a '<<<' $LOG_FILE >> $SUM_FILE" From fca20dc4dc93e90608c8a067c560b4d67fbee596 Mon Sep 17 00:00:00 2001 From: George Gayno Date: Tue, 12 Jul 2022 09:48:03 -0500 Subject: [PATCH 13/18] Remove wcoss1 consistency test scripts for ice_blend. Fixes #665 --- reg_tests/ice_blend/driver.wcoss_cray.sh | 63 -------------------- reg_tests/ice_blend/driver.wcoss_dell_p3.sh | 64 --------------------- 2 files changed, 127 deletions(-) delete mode 100755 reg_tests/ice_blend/driver.wcoss_cray.sh delete mode 100755 reg_tests/ice_blend/driver.wcoss_dell_p3.sh diff --git a/reg_tests/ice_blend/driver.wcoss_cray.sh b/reg_tests/ice_blend/driver.wcoss_cray.sh deleted file mode 100755 index 3b1250143..000000000 --- a/reg_tests/ice_blend/driver.wcoss_cray.sh +++ /dev/null @@ -1,63 +0,0 @@ -#!/bin/bash - -#----------------------------------------------------------------------------- -# -# Run ice_blend consistency test on WCOSS-Cray. -# -# Set $DATA to your working directory. Set the project code (BSUB -P) -# and queue (BSUB -q) as appropriate. -# -# Invoke the script as follows: cat $script | bsub -# -# Log output is placed in consistency.log. A summary is -# placed in summary.log. -# -# The test fails when its output does not match the baseline file -# as determined by the 'cmp' command. The baseline file is -# stored in HOMEreg. -# -#----------------------------------------------------------------------------- - -#BSUB -W 0:02 -#BSUB -o consistency.log -#BSUB -e consistency.log -#BSUB -J iceb_regt -#BSUB -q debug -#BSUB -R "rusage[mem=2000]" -#BSUB -P GFS-DEV - -set -x - -source ../../sorc/machine-setup.sh > /dev/null 2>&1 -module use ../../modulefiles -module load build.$target.intel -module list - -export DATA="${WORK_DIR:-/gpfs/hps3/stmp/$LOGNAME}" -export DATA="${DATA}/reg-tests/ice-blend" - -#----------------------------------------------------------------------------- -# Should not have to change anything below. -#----------------------------------------------------------------------------- - -export UPDATE_BASELINE="FALSE" -#export UPDATE_BASELINE="TRUE" - -if [ "$UPDATE_BASELINE" = "TRUE" ]; then - source ../get_hash.sh -fi - -export WGRIB=/gpfs/hps/nco/ops/nwprod/grib_util.v1.0.5/exec/wgrib -export WGRIB2=/gpfs/hps/nco/ops/nwprod/grib_util.v1.0.5/exec/wgrib2 -export COPYGB2=/gpfs/hps/nco/ops/nwprod/grib_util.v1.0.5/exec/copygb2 -export COPYGB=/gpfs/hps/nco/ops/nwprod/grib_util.v1.0.5/exec/copygb -export CNVGRIB=/gpfs/hps/nco/ops/nwprod/grib_util.v1.0.5/exec/cnvgrib - -export HOMEreg=/gpfs/hps3/emc/global/noscrub/George.Gayno/ufs_utils.git/reg_tests/ice_blend -export HOMEgfs=$PWD/../.. - -rm -fr $DATA - -./ice_blend.sh - -exit 0 diff --git a/reg_tests/ice_blend/driver.wcoss_dell_p3.sh b/reg_tests/ice_blend/driver.wcoss_dell_p3.sh deleted file mode 100755 index be2940dea..000000000 --- a/reg_tests/ice_blend/driver.wcoss_dell_p3.sh +++ /dev/null @@ -1,64 +0,0 @@ -#!/bin/bash - -#----------------------------------------------------------------------------- -# -# Run ice_blend consistency test on WCOSS-Dell. -# -# Set $DATA to your working directory. Set the project code (BSUB -P) -# and queue (BSUB -q) as appropriate. -# -# Invoke the script as follows: cat $script | bsub -# -# Log output is placed in consistency.log. A summary is -# placed in summary.log -# -# The test fails when its output does not match the baseline file -# as determined by the 'cmp' command. The baseline file is -# stored in HOMEreg. -# -#----------------------------------------------------------------------------- - -#BSUB -W 0:02 -#BSUB -o consistency.log -#BSUB -e consistency.log -#BSUB -J iceb_regt -#BSUB -q debug -#BSUB -R "affinity[core(1)]" -#BSUB -P GFS-DEV - -source ../../sorc/machine-setup.sh > /dev/null 2>&1 -module use ../../modulefiles -module load build.$target.intel -module load git -module list - -set -x - -export DATA="${WORK_DIR:-/gpfs/dell1/stmp/$LOGNAME}" -export DATA="${DATA}/reg-tests/ice-blend" - -#----------------------------------------------------------------------------- -# Should not have to change anything below. -#----------------------------------------------------------------------------- - -export UPDATE_BASELINE="FALSE" -#export UPDATE_BASELINE="TRUE" - -if [ "$UPDATE_BASELINE" = "TRUE" ]; then - source ../get_hash.sh -fi - -export WGRIB="/gpfs/dell1/nco/ops/nwprod/grib_util.v1.1.1/exec/wgrib" -export WGRIB2="/gpfs/dell1/nco/ops/nwprod/grib_util.v1.1.1/exec/wgrib2" -export COPYGB2="/gpfs/dell1/nco/ops/nwprod/grib_util.v1.1.1/exec/copygb2" -export COPYGB="/gpfs/dell1/nco/ops/nwprod/grib_util.v1.1.1/exec/copygb" -export CNVGRIB="/gpfs/dell1/nco/ops/nwprod/grib_util.v1.1.1/exec/cnvgrib" - -export HOMEreg=/gpfs/dell2/emc/modeling/noscrub/George.Gayno/ufs_utils.git/reg_tests/ice_blend -export HOMEgfs=$PWD/../.. - -rm -fr $DATA - -./ice_blend.sh - -exit 0 From 40761eabec5c1a710591be461960181ee48d9302 Mon Sep 17 00:00:00 2001 From: George Gayno Date: Tue, 12 Jul 2022 09:51:06 -0500 Subject: [PATCH 14/18] Remove wcoss1 consistency tests for snow2mdl. Fixes #665 --- reg_tests/snow2mdl/driver.wcoss_cray.sh | 72 ---------------------- reg_tests/snow2mdl/driver.wcoss_dell_p3.sh | 67 -------------------- 2 files changed, 139 deletions(-) delete mode 100755 reg_tests/snow2mdl/driver.wcoss_cray.sh delete mode 100755 reg_tests/snow2mdl/driver.wcoss_dell_p3.sh diff --git a/reg_tests/snow2mdl/driver.wcoss_cray.sh b/reg_tests/snow2mdl/driver.wcoss_cray.sh deleted file mode 100755 index 2b92d4fa5..000000000 --- a/reg_tests/snow2mdl/driver.wcoss_cray.sh +++ /dev/null @@ -1,72 +0,0 @@ -#!/bin/bash - -#----------------------------------------------------------------------------- -# -# Run snow2mdl consistency tests on WCOSS-Cray. -# -# Set $DATA_ROOT to your working directory. Set the project code -# and queue as appropriate. -# -# Invoke the script as follows: ./$script -# -# Log output is placed in consistency.log. A summary is -# placed in summary.log -# -# The test fails when its output does not match the baseline file -# as determined by the 'cmp' command. The baseline files are -# stored in HOMEreg. -# -#----------------------------------------------------------------------------- - -set -x - -source ../../sorc/machine-setup.sh > /dev/null 2>&1 -module use ../../modulefiles -module load build.$target.intel -module list - -DATA_ROOT="${WORK_DIR:-/gpfs/hps3/stmp/$LOGNAME}" -DATA_ROOT="${DATA_ROOT}/reg-tests/snow2mdl" - -#----------------------------------------------------------------------------- -# Should not have to change anything below. -#----------------------------------------------------------------------------- - -export UPDATE_BASELINE="FALSE" -#export UPDATE_BASELINE="TRUE" - -if [ "$UPDATE_BASELINE" = "TRUE" ]; then - source ../get_hash.sh -fi - -PROJECT_CODE=${PROJECT_CODE:-GFS-DEV} -QUEUE=${QUEUE:-dev} - -export HOMEreg=/gpfs/hps3/emc/global/noscrub/George.Gayno/ufs_utils.git/reg_tests/snow2mdl -export HOMEgfs=$PWD/../.. -export WGRIB=/gpfs/hps/nco/ops/nwprod/grib_util.v1.0.2/exec/wgrib -export WGRIB2=/gpfs/hps/nco/ops/nwprod/grib_util.v1.0.2/exec/wgrib2 - -rm -fr $DATA_ROOT - -LOG_FILE="consistency.log" -SUM_FILE="summary.log" - -# Test the ops function of snow2mdl. - -export DATA=$DATA_ROOT/test.ops -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J snow.ops -W 0:02 \ - -R "rusage[mem=2000]" "$PWD/snow2mdl.ops.sh" - -# Test the afwa global snow data. - -export DATA=$DATA_ROOT/test.global -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J snow.global -W 0:02 \ - -R "rusage[mem=2000]" -w 'ended(snow.ops)' "$PWD/snow2mdl.global.sh" - -# Create a summary file. - -bsub -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J summary -R "rusage[mem=100]" -W 0:01 \ - -w 'ended(snow.global)' "grep -a '<<<' $LOG_FILE >> $SUM_FILE" - -exit 0 diff --git a/reg_tests/snow2mdl/driver.wcoss_dell_p3.sh b/reg_tests/snow2mdl/driver.wcoss_dell_p3.sh deleted file mode 100755 index 89fd5b4ec..000000000 --- a/reg_tests/snow2mdl/driver.wcoss_dell_p3.sh +++ /dev/null @@ -1,67 +0,0 @@ -#!/bin/bash - -#----------------------------------------------------------------------------- -# -# Run snow2mdl consistency tests on WCOSS-Dell. -# -# Set $DATA_ROOT to your working directory. Set the project code -# and queue as appropriate. -# -# Invoke the script as follows: ./$script -# -# Log output is placed in consistency.log. A summary is -# placed in summary.log -# -# The test fails when its output does not match the baseline file -# as determined by the 'cmp' command. The baseline files are -# stored in HOMEreg. -# -#----------------------------------------------------------------------------- - -source ../../sorc/machine-setup.sh > /dev/null 2>&1 -module use ../../modulefiles -module load build.$target.intel -module load git -module list - -set -x - -export DATA_ROOT="${WORK_DIR:-/gpfs/dell1/stmp/$LOGNAME}" -export DATA_ROOT="${DATA_ROOT}/reg-tests/snow2mdl" - -PROJECT_CODE=${PROJECT_CODE:-"GFS-DEV"} -QUEUE=${QUEUE:-"debug"} - -#----------------------------------------------------------------------------- -# Should not have to change anything below. -#----------------------------------------------------------------------------- - -export UPDATE_BASELINE="FALSE" -#export UPDATE_BASELINE="TRUE" - -if [ "$UPDATE_BASELINE" = "TRUE" ]; then - source ../get_hash.sh -fi - -export HOMEreg=/gpfs/dell2/emc/modeling/noscrub/George.Gayno/ufs_utils.git/reg_tests/snow2mdl -export HOMEgfs=$PWD/../.. -export WGRIB=/gpfs/dell1/nco/ops/nwprod/grib_util.v1.0.6/exec/wgrib -export WGRIB2=/gpfs/dell1/nco/ops/nwprod/grib_util.v1.0.6/exec/wgrib2 - -LOG_FILE=consistency.log -SUM_FILE=summary.log - -rm -fr $DATA_ROOT - -export DATA=$DATA_ROOT/test.ops -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J snow.ops -W 0:02 \ - -R "affinity[core(1)]" "$PWD/snow2mdl.ops.sh" - -export DATA=$DATA_ROOT/test.global -bsub -e $LOG_FILE -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J snow.global -W 0:02 \ - -R "affinity[core(1)]" -w 'ended(snow.ops)' "$PWD/snow2mdl.global.sh" - -bsub -o $LOG_FILE -q $QUEUE -P $PROJECT_CODE -J summary -R "affinity[core(1)]" -R "rusage[mem=100]" -W 0:01 \ - -w 'ended(snow.global)' "grep -a '<<<' $LOG_FILE >> $SUM_FILE" - -exit 0 From 47c4322148b30a3979ea91e455d6dfb2b69696fe Mon Sep 17 00:00:00 2001 From: George Gayno Date: Tue, 12 Jul 2022 10:04:36 -0500 Subject: [PATCH 15/18] Remove wcoss1 references from ./reg_tests/rt.sh Fixes #665. --- reg_tests/rt.sh | 32 +------------------------------- 1 file changed, 1 insertion(+), 31 deletions(-) diff --git a/reg_tests/rt.sh b/reg_tests/rt.sh index e533191b2..dfd5cf87e 100755 --- a/reg_tests/rt.sh +++ b/reg_tests/rt.sh @@ -38,29 +38,7 @@ echo "Started on " `hostname -s` >> ${WORK_DIR}/reg_test_results.txt ./build_all.sh -if [[ $target == "wcoss_dell_p3" ]] || [[ $target == "wcoss_cray" ]]; then - prod_machine=`cat /etc/prod` - prod_letter=${prod_machine:0:1} - - this_machine=`hostname -s` - this_letter=${this_machine:0:1} - - # Mars (m), Venus (v) - if [[ "${this_letter}" == "${prod_letter}" ]]; then - exit 0 - fi -fi - -# Set machine_id variable for running link_fixdirs -if [[ $target == "wcoss_dell_p3" ]]; then - machine_id=dell - module load lsf/10.1 -elif [[ $target == "wcoss_cray" ]]; then - machine_id=cray - module load xt-lsfhpc/9.1.3 -else - machine_id=$target -fi +machine_id=$target cd fix ./link_fixdirs.sh emc $machine_id @@ -107,18 +85,10 @@ for dir in snow2mdl global_cycle chgres_cube grid_gen; do cd .. done -if [[ $target == "wcoss_dell_p3" ]]; then - module load lsf/10.1 -elif [[ $target == "wcoss_cray" ]]; then - module load xt-lsfhpc/9.1.3 -fi - for dir in ice_blend; do cd $dir if [[ $target == "hera" ]] || [[ $target == "jet" ]] || [[ $target == "orion" ]] || [[ $target == "s4" ]] ; then sbatch -A ${PROJECT_CODE} ./driver.$target.sh - elif [[ $target == "wcoss_dell_p3" ]] || [[ $target == "wcoss_cray" ]]; then - cat ./driver.$target.sh | bsub -P ${PROJECT_CODE} fi # Wait for job to complete From 77856e6016d20034b9219cf4e9f66b392c38470e Mon Sep 17 00:00:00 2001 From: George Gayno Date: Tue, 12 Jul 2022 10:29:55 -0500 Subject: [PATCH 16/18] Remove wcoss1 cray logic from grid_gen scripts. Remove some now obsolete variables. Fixes #665. --- driver_scripts/driver_grid.hera.sh | 1 - driver_scripts/driver_grid.jet.sh | 1 - driver_scripts/driver_grid.orion.sh | 1 - reg_tests/grid_gen/driver.hera.sh | 1 - reg_tests/grid_gen/driver.jet.sh | 1 - reg_tests/grid_gen/driver.orion.sh | 1 - ush/fv3gfs_driver_grid.sh | 119 ++++++++-------------------- ush/fv3gfs_make_lake.sh | 33 ++------ 8 files changed, 41 insertions(+), 117 deletions(-) diff --git a/driver_scripts/driver_grid.hera.sh b/driver_scripts/driver_grid.hera.sh index 4674e6e8e..82c9ad1c9 100755 --- a/driver_scripts/driver_grid.hera.sh +++ b/driver_scripts/driver_grid.hera.sh @@ -142,7 +142,6 @@ export APRUN=time export APRUN_SFC=srun export OMP_NUM_THREADS=24 export OMP_STACKSIZE=2048m -export machine=HERA ulimit -a ulimit -s unlimited diff --git a/driver_scripts/driver_grid.jet.sh b/driver_scripts/driver_grid.jet.sh index cf7d08475..df8f87b13 100755 --- a/driver_scripts/driver_grid.jet.sh +++ b/driver_scripts/driver_grid.jet.sh @@ -142,7 +142,6 @@ export APRUN=time export APRUN_SFC=srun export OMP_NUM_THREADS=24 export OMP_STACKSIZE=2048m -export machine=JET ulimit -a ulimit -s unlimited diff --git a/driver_scripts/driver_grid.orion.sh b/driver_scripts/driver_grid.orion.sh index 3350d21e3..dd5b58e99 100755 --- a/driver_scripts/driver_grid.orion.sh +++ b/driver_scripts/driver_grid.orion.sh @@ -143,7 +143,6 @@ export APRUN=time export APRUN_SFC=srun export OMP_NUM_THREADS=24 export OMP_STACKSIZE=2048m -export machine=ORION ulimit -a ulimit -s 199000000 diff --git a/reg_tests/grid_gen/driver.hera.sh b/reg_tests/grid_gen/driver.hera.sh index 9c5b9b97a..4efcfd8e2 100755 --- a/reg_tests/grid_gen/driver.hera.sh +++ b/reg_tests/grid_gen/driver.hera.sh @@ -52,7 +52,6 @@ export home_dir=$PWD/../.. export APRUN=time export APRUN_SFC=srun export OMP_STACKSIZE=2048m -export machine=HERA export HOMEreg=/scratch1/NCEPDEV/nems/role.ufsutils/ufs_utils/reg_tests/grid_gen/baseline_data ulimit -a diff --git a/reg_tests/grid_gen/driver.jet.sh b/reg_tests/grid_gen/driver.jet.sh index f6778a5b7..5ce0e1967 100755 --- a/reg_tests/grid_gen/driver.jet.sh +++ b/reg_tests/grid_gen/driver.jet.sh @@ -50,7 +50,6 @@ export home_dir=$PWD/../.. export APRUN=time export APRUN_SFC=srun export OMP_STACKSIZE=2048m -export machine=JET export HOMEreg=/lfs4/HFIP/hfv3gfs/emc.nemspara/role.ufsutils/ufs_utils/reg_tests/grid_gen/baseline_data ulimit -a diff --git a/reg_tests/grid_gen/driver.orion.sh b/reg_tests/grid_gen/driver.orion.sh index 8b00621b8..03c5b3c2b 100755 --- a/reg_tests/grid_gen/driver.orion.sh +++ b/reg_tests/grid_gen/driver.orion.sh @@ -45,7 +45,6 @@ export APRUN=time export APRUN_SFC=srun export OMP_STACKSIZE=2048m export OMP_NUM_THREADS=24 -export machine=ORION export UPDATE_BASELINE="FALSE" #export UPDATE_BASELINE="TRUE" diff --git a/ush/fv3gfs_driver_grid.sh b/ush/fv3gfs_driver_grid.sh index e5c898e7e..00cdd201c 100755 --- a/ush/fv3gfs_driver_grid.sh +++ b/ush/fv3gfs_driver_grid.sh @@ -39,8 +39,6 @@ set -eux -export machine=${machine:?} - #---------------------------------------------------------------------------------- # Makes FV3 cubed-sphere grid #---------------------------------------------------------------------------------- @@ -171,67 +169,33 @@ if [ $gtype = uniform ] || [ $gtype = stretch ] || [ $gtype = nest ]; then echo "Begin uniform orography generation at `date`" -#---------------------------------------------------------------------------------- -# On WCOSS_C use cfp to run multiple tiles simulatneously for the orography -#---------------------------------------------------------------------------------- - - if [ $machine = WCOSS_C ]; then - touch $TEMP_DIR/orog.file1 - if [ $make_gsl_orog = true ]; then - touch $TEMP_DIR/orog_gsl.file1 - fi - tile=1 - while [ $tile -le $ntiles ]; do - echo "$script_dir/fv3gfs_make_orog.sh $res $tile $grid_dir $orog_dir $script_dir $topo " >>$TEMP_DIR/orog.file1 - if [ $make_gsl_orog = true ]; then - export halo_tmp="-999" # no halo - echo $script_dir/fv3gfs_make_orog_gsl.sh $res $tile $halo_tmp $grid_dir $orog_dir $topo_am >>$TEMP_DIR/orog_gsl.file1 - fi - tile=$(( $tile + 1 )) - done - aprun -j 1 -n 4 -N 4 -d 6 -cc depth cfp $TEMP_DIR/orog.file1 + tile=1 + while [ $tile -le $ntiles ]; do + set +x + echo + echo "............ Execute fv3gfs_make_orog.sh for tile $tile .................." + echo + set -x + $script_dir/fv3gfs_make_orog.sh $res $tile $grid_dir $orog_dir $script_dir $topo err=$? if [ $err != 0 ]; then exit $err fi - rm $TEMP_DIR/orog.file1 if [ $make_gsl_orog = true ]; then - aprun -j 1 -n 4 -N 4 -d 6 -cc depth cfp $TEMP_DIR/orog_gsl.file1 - err=$? - if [ $err != 0 ]; then - exit $err - fi - rm $TEMP_DIR/orog_gsl.file1 - fi - else - tile=1 - while [ $tile -le $ntiles ]; do set +x echo - echo "............ Execute fv3gfs_make_orog.sh for tile $tile .................." - echo + echo "............ Execute fv3gfs_make_orog_gsl.sh for tile $tile .................." + echo set -x - $script_dir/fv3gfs_make_orog.sh $res $tile $grid_dir $orog_dir $script_dir $topo + export halo_tmp="-999" # no halo + $script_dir/fv3gfs_make_orog_gsl.sh $res $tile $halo_tmp $grid_dir $orog_dir $topo_am err=$? if [ $err != 0 ]; then exit $err fi - if [ $make_gsl_orog = true ]; then - set +x - echo - echo "............ Execute fv3gfs_make_orog_gsl.sh for tile $tile .................." - echo - set -x - export halo_tmp="-999" # no halo - $script_dir/fv3gfs_make_orog_gsl.sh $res $tile $halo_tmp $grid_dir $orog_dir $topo_am - err=$? - if [ $err != 0 ]; then - exit $err - fi - fi - tile=$(( $tile + 1 )) - done - fi + fi + tile=$(( $tile + 1 )) + done if [ $add_lake = true ]; then $script_dir/fv3gfs_make_lake.sh @@ -385,29 +349,19 @@ elif [ $gtype = regional_gfdl ] || [ $gtype = regional_esg ]; then #---------------------------------------------------------------------------------- # Create orography. -# -# On WCOSS_C use cfp to run multiple tiles simulatneously for the orography. -# For now we only have one tile but in the future we will have more. #---------------------------------------------------------------------------------- echo "Begin orography generation at `date`" - if [ $machine = WCOSS_C ]; then - echo "$script_dir/fv3gfs_make_orog.sh $res $tile $grid_dir $orog_dir $script_dir $topo " >>$TEMP_DIR/orog.file1 - aprun -j 1 -n 4 -N 4 -d 6 -cc depth cfp $TEMP_DIR/orog.file1 - err=$? - rm $TEMP_DIR/orog.file1 - else - set +x - echo - echo "............ Execute fv3gfs_make_orog.sh for tile $tile .................." - echo - set -x - $script_dir/fv3gfs_make_orog.sh $res $tile $grid_dir $orog_dir $script_dir $topo - err=$? - if [ $err != 0 ]; then - exit $err - fi + set +x + echo + echo "............ Execute fv3gfs_make_orog.sh for tile $tile .................." + echo + set -x + $script_dir/fv3gfs_make_orog.sh $res $tile $grid_dir $orog_dir $script_dir $topo + err=$? + if [ $err != 0 ]; then + exit $err fi # add lake data to the orography file, if $add_lake is true @@ -498,22 +452,15 @@ elif [ $gtype = regional_gfdl ] || [ $gtype = regional_esg ]; then if [ $make_gsl_orog = true ]; then export halo_tmp="0" ln -sf $out_dir/C${res}_grid.tile${tile}.halo0.nc $grid_dir/ - if [ $machine = WCOSS_C ]; then - echo $script_dir/fv3gfs_make_orog_gsl.sh $res $tile $halo_tmp $grid_dir $orog_dir $topo_am >>$TEMP_DIR/orog_gsl.file1 - aprun -j 1 -n 4 -N 4 -d 6 -cc depth cfp $TEMP_DIR/orog_gsl.file1 - err=$? - rm $TEMP_DIR/orog_gsl.file1 - else - set +x - echo - echo "............ Execute fv3gfs_make_orog_gsl.sh for tile $tile .................." - echo - set -x - $script_dir/fv3gfs_make_orog_gsl.sh $res $tile $halo_tmp $grid_dir $orog_dir $topo_am - err=$? - if [ $err != 0 ]; then - exit $err - fi + set +x + echo + echo "............ Execute fv3gfs_make_orog_gsl.sh for tile $tile .................." + echo + set -x + $script_dir/fv3gfs_make_orog_gsl.sh $res $tile $halo_tmp $grid_dir $orog_dir $topo_am + err=$? + if [ $err != 0 ]; then + exit $err fi cp $orog_dir/C${res}_oro_data_*.tile${tile}*.nc $out_dir/ # gsl drag suite oro_data files fi diff --git a/ush/fv3gfs_make_lake.sh b/ush/fv3gfs_make_lake.sh index 05c5ab23e..cfb12aba6 100755 --- a/ush/fv3gfs_make_lake.sh +++ b/ush/fv3gfs_make_lake.sh @@ -83,35 +83,18 @@ fi # create lake data for FV3 grid and save it to the orography files -if [ $machine = WCOSS_C ]; then - touch ./lake.txt - tile=$tile_beg - while [ $tile -le $tile_end ]; do - echo "$exe_add_lake ${tile} ${res} ${indir} ${lake_cutoff}" >> ./lake.txt - tile=$(( $tile + 1 )) - done - aprun -j 1 -n 6 -N 6 -d 1 -cc depth cfp ./lake.txt +tile=$tile_beg +while [ $tile -le $tile_end ]; do + outfile=oro.C${res}.tile${tile}.nc + $APRUN $exe_add_lake ${tile} ${res} ${indir} ${lake_cutoff} err=$? if [ $err != 0 ]; then set +x - echo ERROR CREATING LAKE FRACTION + echo ERROR CREATING LAKE FRACTION FOR TILE $tile exit $err fi - rm ./lake.txt -else - tile=$tile_beg - while [ $tile -le $tile_end ]; do - outfile=oro.C${res}.tile${tile}.nc - $APRUN $exe_add_lake ${tile} ${res} ${indir} ${lake_cutoff} - err=$? - if [ $err != 0 ]; then - set +x - echo ERROR CREATING LAKE FRACTION FOR TILE $tile - exit $err - fi - echo "lake fraction is added to $outfile" - tile=$(( $tile + 1 )) - done -fi + echo "lake fraction is added to $outfile" + tile=$(( $tile + 1 )) +done exit 0 From a21169d9523070706aa3d912e327c280fe4ac9a2 Mon Sep 17 00:00:00 2001 From: George Gayno Date: Tue, 12 Jul 2022 10:40:59 -0500 Subject: [PATCH 17/18] Remove unused variable from ./ush/global_cycle_driver.sh. Fixes #665. --- ush/global_cycle_driver.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/ush/global_cycle_driver.sh b/ush/global_cycle_driver.sh index 8bf6be622..e5327970b 100755 --- a/ush/global_cycle_driver.sh +++ b/ush/global_cycle_driver.sh @@ -8,8 +8,6 @@ set -eux # Rahul Mahajan, 10/11/2017 #------------------------------------------------------------------------------------------------- -export machine=${machine:-"WCOSS_C"} - export CASE=${CASE:-C768} # resolution of tile: 48, 96, 192, 384, 768, 1152, 3072 export CDATE=${CDATE:-${cdate:-2017031900}} # format yyyymmddhh yyyymmddhh ... export CDUMP=${CDUMP:-gfs} # gfs or gdas From bffccd0ce7315e1a46102396d9b9baaf24371c20 Mon Sep 17 00:00:00 2001 From: "George.Gayno" Date: Mon, 18 Jul 2022 15:31:13 +0000 Subject: [PATCH 18/18] Minor script updates. Fixes #665. --- driver_scripts/driver_grid.wcoss2.sh | 3 +-- reg_tests/grid_gen/driver.wcoss2.sh | 1 - 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/driver_scripts/driver_grid.wcoss2.sh b/driver_scripts/driver_grid.wcoss2.sh index 90eac38b3..b3c160f76 100755 --- a/driver_scripts/driver_grid.wcoss2.sh +++ b/driver_scripts/driver_grid.wcoss2.sh @@ -4,7 +4,7 @@ #PBS -e log #PBS -q debug #PBS -A GFS-DEV -#PBS -l walltime=00:05:00 +#PBS -l walltime=00:15:00 #PBS -N make_grid #PBS -l select=1:ncpus=24:mem=100GB @@ -145,7 +145,6 @@ export APRUN_SFC="mpiexec -n 24 -ppn 24 -cpu-bind core" export OMP_NUM_THREADS=24 # orog code worked best with 24 threads. export OMP_PLACES=cores export OMP_STACKSIZE=2048m -export machine=WCOSS2_CRAY ulimit -a ulimit -s unlimited diff --git a/reg_tests/grid_gen/driver.wcoss2.sh b/reg_tests/grid_gen/driver.wcoss2.sh index abf239c87..18f1abd22 100755 --- a/reg_tests/grid_gen/driver.wcoss2.sh +++ b/reg_tests/grid_gen/driver.wcoss2.sh @@ -56,7 +56,6 @@ export APRUN_SFC="mpiexec -n 30 -ppn 30 -cpu-bind core" export OMP_STACKSIZE=2048m export OMP_NUM_THREADS=30 # orog code uses threads export OMP_PLACES=cores -export machine=WCOSS2 export HOMEreg=/lfs/h2/emc/global/noscrub/George.Gayno/ufs_utils.git/reg_tests/grid_gen/baseline_data this_dir=$PWD