diff --git a/gcm_setup b/gcm_setup index f8b0e42c..cfd6de0f 100755 --- a/gcm_setup +++ b/gcm_setup @@ -37,20 +37,17 @@ setenv ARCH `uname -s` setenv NODE `uname -n` if ($ARCH == Darwin) then - set FINDPATH = realpath set PRELOAD_COMMAND = 'DYLD_INSERT_LIBRARIES' set LD_LIBRARY_PATH_CMD = 'DYLD_LIBRARY_PATH' # On macOS we seem to need to call mpirun directly and not use esma_mpirun # For some reason SIP does not let the libraries be preloaded set RUN_CMD = 'mpirun -np ' else - set FINDPATH = 'readlink -f' set PRELOAD_COMMAND = 'LD_PRELOAD' set LD_LIBRARY_PATH_CMD = 'LD_LIBRARY_PATH' set RUN_CMD = '$GEOSBIN/esma_mpirun -np ' endif -set GCMSETUP = `$FINDPATH $0` -set BINDIR = `dirname $GCMSETUP` +set BINDIR = `pwd -L` set GEOSDEF = `dirname $BINDIR` set ETCDIR = ${GEOSDEF}/etc @@ -1815,7 +1812,7 @@ else if( $SITE == 'AWS' | $SITE == 'Azure' ) then setenv SSTDIR ${BOUNDARY_DIR}/@SSTNAME/${OGCM_IM}x${OGCM_JM} # location of SST Boundary Conditions setenv CHMDIR ${BOUNDARY_DIR}/fvInput_nc3 # locations of Aerosol Chemistry BCs - setenv WRKDIR /home/$LOGNAME # user work directory + setenv WRKDIR $HOME # user work directory setenv COUPLEDIR ${BOUNDARY_DIR}/bcs_shared/make_bcs_inputs/ocean # Coupled Ocean/Atmos Forcing setenv GWDRSDIR ${BOUNDARY_DIR}/GWD_RIDGE # Location of GWD_RIDGE files @@ -1853,7 +1850,7 @@ else setenv SSTDIR ${BOUNDARY_DIR}/@SSTNAME/${OGCM_IM}x${OGCM_JM} # location of SST Boundary Conditions setenv CHMDIR ${BOUNDARY_DIR}/fvInput_nc3 # locations of Aerosol Chemistry BCs - setenv WRKDIR /home/$LOGNAME # user work directory + setenv WRKDIR $HOME # user work directory setenv COUPLEDIR ${BOUNDARY_DIR}/bcs_shared/make_bcs_inputs/ocean # Coupled Ocean/Atmos Forcing setenv GWDRSDIR ${BOUNDARY_DIR}/GWD_RIDGE # Location of GWD_RIDGE files set NX = 1 @@ -2188,9 +2185,6 @@ endif else if( $MPI_STACK == intelmpi ) then cat > $HOMDIR/SETENV.commands << EOF -setenv I_MPI_ADJUST_ALLREDUCE 12 -setenv I_MPI_ADJUST_GATHERV 3 - # This flag prints out the Intel MPI state. Uncomment if needed #setenv I_MPI_DEBUG 9 EOF @@ -2200,7 +2194,24 @@ EOF # specific compared to the above adjustments if ( $SITE == 'NCCS' ) then +# Some flags we know work on SLES15 and Milan (see below) +# For safety, we keep the old SLES12 flags for that system +# +# NOTE: When Cascade Lake is on SLES15, the following flags +# might need to be Milan only + +if ("$BUILT_ON_SLES15" == "TRUE") then +cat >> $HOMDIR/SETENV.commands << EOF +setenv I_MPI_FABRICS shm:ofi +setenv I_MPI_OFI_PROVIDER psm3 +EOF + +else + cat >> $HOMDIR/SETENV.commands << EOF +setenv I_MPI_ADJUST_ALLREDUCE 12 +setenv I_MPI_ADJUST_GATHERV 3 + setenv I_MPI_SHM_HEAP_VSIZE 512 setenv PSM2_MEMORY large EOF @@ -2216,34 +2227,6 @@ EOF endif # if NOT Singularity -# Testing by Bill Putman found these to be -# useful flags with Intel MPI on SLES15 on the -# Milan nodes. -# Note 1: Testing by NCCS shows the PSM3 provider -# runs on the Infiniband fabric. Tests show it runs -# up to C720. -# Note 2: When the Cascade Lakes are moved to -# SLES15, these will need to be Milan-only flags -# as Intel MPI will probably work just fine with -# Intel chips. -if ("$BUILT_ON_SLES15" == "TRUE") then -cat >> $HOMDIR/SETENV.commands << EOF -setenv I_MPI_FALLBACK 0 -setenv I_MPI_FABRICS ofi -setenv I_MPI_OFI_PROVIDER psm3 -setenv I_MPI_ADJUST_SCATTER 2 -setenv I_MPI_ADJUST_SCATTERV 2 -setenv I_MPI_ADJUST_GATHER 2 -setenv I_MPI_ADJUST_GATHERV 3 -setenv I_MPI_ADJUST_ALLGATHER 3 -setenv I_MPI_ADJUST_ALLGATHERV 3 -setenv I_MPI_ADJUST_ALLREDUCE 12 -setenv I_MPI_ADJUST_REDUCE 10 -setenv I_MPI_ADJUST_BCAST 11 -setenv I_MPI_ADJUST_REDUCE_SCATTER 4 -setenv I_MPI_ADJUST_BARRIER 9 -EOF - endif # if SLES15 endif # if NCCS diff --git a/geoschemchem_setup b/geoschemchem_setup index dae9ef5a..213a32e6 100755 --- a/geoschemchem_setup +++ b/geoschemchem_setup @@ -37,20 +37,17 @@ setenv ARCH `uname -s` setenv NODE `uname -n` if ($ARCH == Darwin) then - set FINDPATH = realpath set PRELOAD_COMMAND = 'DYLD_INSERT_LIBRARIES' set LD_LIBRARY_PATH_CMD = 'DYLD_LIBRARY_PATH' # On macOS we seem to need to call mpirun directly and not use esma_mpirun # For some reason SIP does not let the libraries be preloaded set RUN_CMD = 'mpirun -np ' else - set FINDPATH = 'readlink -f' set PRELOAD_COMMAND = 'LD_PRELOAD' set LD_LIBRARY_PATH_CMD = 'LD_LIBRARY_PATH' set RUN_CMD = '$GEOSBIN/esma_mpirun -np ' endif -set GCMSETUP = `$FINDPATH $0` -set BINDIR = `dirname $GCMSETUP` +set BINDIR = `pwd -L` set GEOSDEF = `dirname $BINDIR` set ETCDIR = ${GEOSDEF}/etc @@ -1763,7 +1760,7 @@ if( $SITE == 'NAS' ) then setenv CHMDIR ${BOUNDARY_DIR}/fvInput_nc3 # locations of Aerosol Chemistry BCs setenv WRKDIR /nobackup/$LOGNAME # user work directory setenv COUPLEDIR ${BOUNDARY_DIR}/bcs_shared/make_bcs_inputs/ocean # Coupled Ocean/Atmos Forcing - setenv GWDRSDIR ${BOUNDARY_DIR}/GWD_RIDGE # Location of GWD_RIDGE files + setenv GWDRSDIR ${BOUNDARY_DIR}/GWD_RIDGE # Location of GWD_RIDGE files else if( $SITE == 'NCCS' ) then setenv BATCH_CMD "sbatch" # SLURM Batch command @@ -1839,13 +1836,13 @@ else if( $SITE == 'AWS' | $SITE == 'Azure' ) then setenv BOUNDARY_DIR /ford1/share/gmao_SIteam/ModelData setenv BC_BASE ${BOUNDARY_DIR}/bcs_shared/fvInput/ExtData/esm/tiles setenv BCSDIR ${BC_BASE}/${LSM_BCS} # location of Boundary Conditions - setenv REPLAY_ANA_EXPID REPLAY_UNSUPPORTED # Default Analysis Experiment for REPLAY + setenv REPLAY_ANA_EXPID REPLAY_UNSUPPORTED # Default Analysis Experiment for REPLAY setenv REPLAY_ANA_LOCATION REPLAY_UNSUPPORTED # Default Analysis Location for REPLAY setenv M2_REPLAY_ANA_LOCATION REPLAY_UNSUPPORTED # Default Analysis Location for M2 REPLAY setenv SSTDIR ${BOUNDARY_DIR}/@SSTNAME/${OGCM_IM}x${OGCM_JM} # location of SST Boundary Conditions setenv CHMDIR ${BOUNDARY_DIR}/fvInput_nc3 # locations of Aerosol Chemistry BCs - setenv WRKDIR /home/$LOGNAME # user work directory + setenv WRKDIR $HOME # user work directory setenv COUPLEDIR ${BOUNDARY_DIR}/bcs_shared/make_bcs_inputs/ocean # Coupled Ocean/Atmos Forcing setenv GWDRSDIR ${BOUNDARY_DIR}/GWD_RIDGE # Location of GWD_RIDGE files @@ -1883,7 +1880,7 @@ else setenv SSTDIR ${BOUNDARY_DIR}/@SSTNAME/${OGCM_IM}x${OGCM_JM} # location of SST Boundary Conditions setenv CHMDIR ${BOUNDARY_DIR}/fvInput_nc3 # locations of Aerosol Chemistry BCs - setenv WRKDIR /home/$LOGNAME # user work directory + setenv WRKDIR $HOME # user work directory setenv COUPLEDIR ${BOUNDARY_DIR}/bcs_shared/make_bcs_inputs/ocean # Coupled Ocean/Atmos Forcing setenv GWDRSDIR ${BOUNDARY_DIR}/GWD_RIDGE # Location of GWD_RIDGE files set NX = 1 @@ -2218,9 +2215,6 @@ endif else if( $MPI_STACK == intelmpi ) then cat > $HOMDIR/SETENV.commands << EOF -setenv I_MPI_ADJUST_ALLREDUCE 12 -setenv I_MPI_ADJUST_GATHERV 3 - # This flag prints out the Intel MPI state. Uncomment if needed #setenv I_MPI_DEBUG 9 EOF @@ -2230,7 +2224,24 @@ EOF # specific compared to the above adjustments if ( $SITE == 'NCCS' ) then +# Some flags we know work on SLES15 and Milan (see below) +# For safety, we keep the old SLES12 flags for that system +# +# NOTE: When Cascade Lake is on SLES15, the following flags +# might need to be Milan only + +if ("$BUILT_ON_SLES15" == "TRUE") then cat >> $HOMDIR/SETENV.commands << EOF +setenv I_MPI_FABRICS shm:ofi +setenv I_MPI_OFI_PROVIDER psm3 +EOF + +else + +cat >> $HOMDIR/SETENV.commands << EOF +setenv I_MPI_ADJUST_ALLREDUCE 12 +setenv I_MPI_ADJUST_GATHERV 3 + setenv I_MPI_SHM_HEAP_VSIZE 512 setenv PSM2_MEMORY large EOF @@ -2246,34 +2257,6 @@ EOF endif # if NOT Singularity -# Testing by Bill Putman found these to be -# useful flags with Intel MPI on SLES15 on the -# Milan nodes. -# Note 1: Testing by NCCS shows the PSM3 provider -# runs on the Infiniband fabric. Tests show it runs -# up to C720. -# Note 2: When the Cascade Lakes are moved to -# SLES15, these will need to be Milan-only flags -# as Intel MPI will probably work just fine with -# Intel chips. -if ("$BUILT_ON_SLES15" == "TRUE") then -cat >> $HOMDIR/SETENV.commands << EOF -setenv I_MPI_FALLBACK 0 -setenv I_MPI_FABRICS ofi -setenv I_MPI_OFI_PROVIDER psm3 -setenv I_MPI_ADJUST_SCATTER 2 -setenv I_MPI_ADJUST_SCATTERV 2 -setenv I_MPI_ADJUST_GATHER 2 -setenv I_MPI_ADJUST_GATHERV 3 -setenv I_MPI_ADJUST_ALLGATHER 3 -setenv I_MPI_ADJUST_ALLGATHERV 3 -setenv I_MPI_ADJUST_ALLREDUCE 12 -setenv I_MPI_ADJUST_REDUCE 10 -setenv I_MPI_ADJUST_BCAST 11 -setenv I_MPI_ADJUST_REDUCE_SCATTER 4 -setenv I_MPI_ADJUST_BARRIER 9 -EOF - endif # if SLES15 endif # if NCCS diff --git a/gmichem_setup b/gmichem_setup index 117aa363..7d3098a1 100755 --- a/gmichem_setup +++ b/gmichem_setup @@ -37,20 +37,17 @@ setenv ARCH `uname -s` setenv NODE `uname -n` if ($ARCH == Darwin) then - set FINDPATH = realpath set PRELOAD_COMMAND = 'DYLD_INSERT_LIBRARIES' set LD_LIBRARY_PATH_CMD = 'DYLD_LIBRARY_PATH' # On macOS we seem to need to call mpirun directly and not use esma_mpirun # For some reason SIP does not let the libraries be preloaded set RUN_CMD = 'mpirun -np ' else - set FINDPATH = 'readlink -f' set PRELOAD_COMMAND = 'LD_PRELOAD' set LD_LIBRARY_PATH_CMD = 'LD_LIBRARY_PATH' set RUN_CMD = '$GEOSBIN/esma_mpirun -np ' endif -set GCMSETUP = `$FINDPATH $0` -set BINDIR = `dirname $GCMSETUP` +set BINDIR = `pwd -L` set GEOSDEF = `dirname $BINDIR` set ETCDIR = ${GEOSDEF}/etc @@ -1888,7 +1885,7 @@ if( $SITE == 'NAS' ) then setenv BATCH_TIME "PBS -l walltime=" # PBS Syntax for walltime setenv BATCH_JOBNAME "PBS -N " # PBS Syntax for job name setenv BATCH_OUTPUTNAME "PBS -o " # PBS Syntax for job output name - setenv BATCH_JOINOUTERR "PBS -j oe -k oed" # PBS Syntax for joining output and error + setenv BATCH_JOINOUTERR "PBS -j oe -k oed" # PBS Syntax for joining output and error setenv RUN_FT "6:00:00" # Wallclock Time for gcm_forecast.j setenv RUN_T "8:00:00" # Wallclock Time for gcm_run.j setenv POST_T "8:00:00" # Wallclock Time for gcm_post.j @@ -1928,7 +1925,7 @@ if( $SITE == 'NAS' ) then setenv CHMDIR ${BOUNDARY_DIR}/fvInput_nc3 # locations of Aerosol Chemistry BCs setenv WRKDIR /nobackup/$LOGNAME # user work directory setenv COUPLEDIR ${BOUNDARY_DIR}/bcs_shared/make_bcs_inputs/ocean # Coupled Ocean/Atmos Forcing - setenv GWDRSDIR ${BOUNDARY_DIR}/GWD_RIDGE # Location of GWD_RIDGE files + setenv GWDRSDIR ${BOUNDARY_DIR}/GWD_RIDGE # Location of GWD_RIDGE files else if( $SITE == 'NCCS' ) then setenv BATCH_CMD "sbatch" # SLURM Batch command @@ -2004,13 +2001,13 @@ else if( $SITE == 'AWS' | $SITE == 'Azure' ) then setenv BOUNDARY_DIR /ford1/share/gmao_SIteam/ModelData setenv BC_BASE ${BOUNDARY_DIR}/bcs_shared/fvInput/ExtData/esm/tiles setenv BCSDIR ${BC_BASE}/${LSM_BCS} # location of Boundary Conditions - setenv REPLAY_ANA_EXPID REPLAY_UNSUPPORTED # Default Analysis Experiment for REPLAY + setenv REPLAY_ANA_EXPID REPLAY_UNSUPPORTED # Default Analysis Experiment for REPLAY setenv REPLAY_ANA_LOCATION REPLAY_UNSUPPORTED # Default Analysis Location for REPLAY setenv M2_REPLAY_ANA_LOCATION REPLAY_UNSUPPORTED # Default Analysis Location for M2 REPLAY setenv SSTDIR ${BOUNDARY_DIR}/@SSTNAME/${OGCM_IM}x${OGCM_JM} # location of SST Boundary Conditions setenv CHMDIR ${BOUNDARY_DIR}/fvInput_nc3 # locations of Aerosol Chemistry BCs - setenv WRKDIR /home/$LOGNAME # user work directory + setenv WRKDIR $HOME # user work directory setenv COUPLEDIR ${BOUNDARY_DIR}/bcs_shared/make_bcs_inputs/ocean # Coupled Ocean/Atmos Forcing setenv GWDRSDIR ${BOUNDARY_DIR}/GWD_RIDGE # Location of GWD_RIDGE files @@ -2048,7 +2045,7 @@ else setenv SSTDIR ${BOUNDARY_DIR}/@SSTNAME/${OGCM_IM}x${OGCM_JM} # location of SST Boundary Conditions setenv CHMDIR ${BOUNDARY_DIR}/fvInput_nc3 # locations of Aerosol Chemistry BCs - setenv WRKDIR /home/$LOGNAME # user work directory + setenv WRKDIR $HOME # user work directory setenv COUPLEDIR ${BOUNDARY_DIR}/bcs_shared/make_bcs_inputs/ocean # Coupled Ocean/Atmos Forcing setenv GWDRSDIR ${BOUNDARY_DIR}/GWD_RIDGE # Location of GWD_RIDGE files set NX = 1 @@ -2389,9 +2386,6 @@ endif else if( $MPI_STACK == intelmpi ) then cat > $HOMDIR/SETENV.commands << EOF -setenv I_MPI_ADJUST_ALLREDUCE 12 -setenv I_MPI_ADJUST_GATHERV 3 - # This flag prints out the Intel MPI state. Uncomment if needed #setenv I_MPI_DEBUG 9 EOF @@ -2401,7 +2395,24 @@ EOF # specific compared to the above adjustments if ( $SITE == 'NCCS' ) then +# Some flags we know work on SLES15 and Milan (see below) +# For safety, we keep the old SLES12 flags for that system +# +# NOTE: When Cascade Lake is on SLES15, the following flags +# might need to be Milan only + +if ("$BUILT_ON_SLES15" == "TRUE") then cat >> $HOMDIR/SETENV.commands << EOF +setenv I_MPI_FABRICS shm:ofi +setenv I_MPI_OFI_PROVIDER psm3 +EOF + +else + +cat >> $HOMDIR/SETENV.commands << EOF +setenv I_MPI_ADJUST_ALLREDUCE 12 +setenv I_MPI_ADJUST_GATHERV 3 + setenv I_MPI_SHM_HEAP_VSIZE 512 setenv PSM2_MEMORY large EOF @@ -2417,34 +2428,6 @@ EOF endif # if NOT Singularity -# Testing by Bill Putman found these to be -# useful flags with Intel MPI on SLES15 on the -# Milan nodes. -# Note 1: Testing by NCCS shows the PSM3 provider -# runs on the Infiniband fabric. Tests show it runs -# up to C720. -# Note 2: When the Cascade Lakes are moved to -# SLES15, these will need to be Milan-only flags -# as Intel MPI will probably work just fine with -# Intel chips. -if ("$BUILT_ON_SLES15" == "TRUE") then -cat >> $HOMDIR/SETENV.commands << EOF -setenv I_MPI_FALLBACK 0 -setenv I_MPI_FABRICS ofi -setenv I_MPI_OFI_PROVIDER psm3 -setenv I_MPI_ADJUST_SCATTER 2 -setenv I_MPI_ADJUST_SCATTERV 2 -setenv I_MPI_ADJUST_GATHER 2 -setenv I_MPI_ADJUST_GATHERV 3 -setenv I_MPI_ADJUST_ALLGATHER 3 -setenv I_MPI_ADJUST_ALLGATHERV 3 -setenv I_MPI_ADJUST_ALLREDUCE 12 -setenv I_MPI_ADJUST_REDUCE 10 -setenv I_MPI_ADJUST_BCAST 11 -setenv I_MPI_ADJUST_REDUCE_SCATTER 4 -setenv I_MPI_ADJUST_BARRIER 9 -EOF - endif # if SLES15 endif # if NCCS diff --git a/stratchem_setup b/stratchem_setup index 085013f9..b76d11fa 100755 --- a/stratchem_setup +++ b/stratchem_setup @@ -31,26 +31,23 @@ endif # Build Directory Locations ####################################################################### -# Set Current Working Path to stratchem_setup -# ------------------------------------------- +# Set Current Working Path to gcm_setup +# ------------------------------------- setenv ARCH `uname -s` setenv NODE `uname -n` if ($ARCH == Darwin) then - set FINDPATH = realpath set PRELOAD_COMMAND = 'DYLD_INSERT_LIBRARIES' set LD_LIBRARY_PATH_CMD = 'DYLD_LIBRARY_PATH' # On macOS we seem to need to call mpirun directly and not use esma_mpirun # For some reason SIP does not let the libraries be preloaded set RUN_CMD = 'mpirun -np ' else - set FINDPATH = 'readlink -f' set PRELOAD_COMMAND = 'LD_PRELOAD' set LD_LIBRARY_PATH_CMD = 'LD_LIBRARY_PATH' set RUN_CMD = '$GEOSBIN/esma_mpirun -np ' endif -set GCMSETUP = `$FINDPATH $0` -set BINDIR = `dirname $GCMSETUP` +set BINDIR = `pwd -L` set GEOSDEF = `dirname $BINDIR` set ETCDIR = ${GEOSDEF}/etc @@ -1748,7 +1745,7 @@ if( $SITE == 'NAS' ) then setenv CHMDIR ${BOUNDARY_DIR}/fvInput_nc3 # locations of Aerosol Chemistry BCs setenv WRKDIR /nobackup/$LOGNAME # user work directory setenv COUPLEDIR ${BOUNDARY_DIR}/bcs_shared/make_bcs_inputs/ocean # Coupled Ocean/Atmos Forcing - setenv GWDRSDIR ${BOUNDARY_DIR}/GWD_RIDGE # Location of GWD_RIDGE files + setenv GWDRSDIR ${BOUNDARY_DIR}/GWD_RIDGE # Location of GWD_RIDGE files else if( $SITE == 'NCCS' ) then setenv BATCH_CMD "sbatch" # SLURM Batch command @@ -1824,13 +1821,13 @@ else if( $SITE == 'AWS' | $SITE == 'Azure' ) then setenv BOUNDARY_DIR /ford1/share/gmao_SIteam/ModelData setenv BC_BASE ${BOUNDARY_DIR}/bcs_shared/fvInput/ExtData/esm/tiles setenv BCSDIR ${BC_BASE}/${LSM_BCS} # location of Boundary Conditions - setenv REPLAY_ANA_EXPID REPLAY_UNSUPPORTED # Default Analysis Experiment for REPLAY + setenv REPLAY_ANA_EXPID REPLAY_UNSUPPORTED # Default Analysis Experiment for REPLAY setenv REPLAY_ANA_LOCATION REPLAY_UNSUPPORTED # Default Analysis Location for REPLAY setenv M2_REPLAY_ANA_LOCATION REPLAY_UNSUPPORTED # Default Analysis Location for M2 REPLAY setenv SSTDIR ${BOUNDARY_DIR}/@SSTNAME/${OGCM_IM}x${OGCM_JM} # location of SST Boundary Conditions setenv CHMDIR ${BOUNDARY_DIR}/fvInput_nc3 # locations of Aerosol Chemistry BCs - setenv WRKDIR /home/$LOGNAME # user work directory + setenv WRKDIR $HOME # user work directory setenv COUPLEDIR ${BOUNDARY_DIR}/bcs_shared/make_bcs_inputs/ocean # Coupled Ocean/Atmos Forcing setenv GWDRSDIR ${BOUNDARY_DIR}/GWD_RIDGE # Location of GWD_RIDGE files @@ -1868,7 +1865,7 @@ else setenv SSTDIR ${BOUNDARY_DIR}/@SSTNAME/${OGCM_IM}x${OGCM_JM} # location of SST Boundary Conditions setenv CHMDIR ${BOUNDARY_DIR}/fvInput_nc3 # locations of Aerosol Chemistry BCs - setenv WRKDIR /home/$LOGNAME # user work directory + setenv WRKDIR $HOME # user work directory setenv COUPLEDIR ${BOUNDARY_DIR}/bcs_shared/make_bcs_inputs/ocean # Coupled Ocean/Atmos Forcing setenv GWDRSDIR ${BOUNDARY_DIR}/GWD_RIDGE # Location of GWD_RIDGE files set NX = 1 @@ -2203,9 +2200,6 @@ endif else if( $MPI_STACK == intelmpi ) then cat > $HOMDIR/SETENV.commands << EOF -setenv I_MPI_ADJUST_ALLREDUCE 12 -setenv I_MPI_ADJUST_GATHERV 3 - # This flag prints out the Intel MPI state. Uncomment if needed #setenv I_MPI_DEBUG 9 EOF @@ -2215,7 +2209,24 @@ EOF # specific compared to the above adjustments if ( $SITE == 'NCCS' ) then +# Some flags we know work on SLES15 and Milan (see below) +# For safety, we keep the old SLES12 flags for that system +# +# NOTE: When Cascade Lake is on SLES15, the following flags +# might need to be Milan only + +if ("$BUILT_ON_SLES15" == "TRUE") then +cat >> $HOMDIR/SETENV.commands << EOF +setenv I_MPI_FABRICS shm:ofi +setenv I_MPI_OFI_PROVIDER psm3 +EOF + +else + cat >> $HOMDIR/SETENV.commands << EOF +setenv I_MPI_ADJUST_ALLREDUCE 12 +setenv I_MPI_ADJUST_GATHERV 3 + setenv I_MPI_SHM_HEAP_VSIZE 512 setenv PSM2_MEMORY large EOF @@ -2231,34 +2242,6 @@ EOF endif # if NOT Singularity -# Testing by Bill Putman found these to be -# useful flags with Intel MPI on SLES15 on the -# Milan nodes. -# Note 1: Testing by NCCS shows the PSM3 provider -# runs on the Infiniband fabric. Tests show it runs -# up to C720. -# Note 2: When the Cascade Lakes are moved to -# SLES15, these will need to be Milan-only flags -# as Intel MPI will probably work just fine with -# Intel chips. -if ("$BUILT_ON_SLES15" == "TRUE") then -cat >> $HOMDIR/SETENV.commands << EOF -setenv I_MPI_FALLBACK 0 -setenv I_MPI_FABRICS ofi -setenv I_MPI_OFI_PROVIDER psm3 -setenv I_MPI_ADJUST_SCATTER 2 -setenv I_MPI_ADJUST_SCATTERV 2 -setenv I_MPI_ADJUST_GATHER 2 -setenv I_MPI_ADJUST_GATHERV 3 -setenv I_MPI_ADJUST_ALLGATHER 3 -setenv I_MPI_ADJUST_ALLGATHERV 3 -setenv I_MPI_ADJUST_ALLREDUCE 12 -setenv I_MPI_ADJUST_REDUCE 10 -setenv I_MPI_ADJUST_BCAST 11 -setenv I_MPI_ADJUST_REDUCE_SCATTER 4 -setenv I_MPI_ADJUST_BARRIER 9 -EOF - endif # if SLES15 endif # if NCCS