From f683469d5074f5488f4f3dc638971ed367fcaf99 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sat, 18 Dec 2021 13:40:27 +0100 Subject: [PATCH 01/27] Switch conda deployment to spack for sys libs --- conda/bootstrap.py | 139 ++++++++++-------------- conda/compass_env/load_compass.template | 2 + conda/shared.py | 4 + 3 files changed, 66 insertions(+), 79 deletions(-) diff --git a/conda/bootstrap.py b/conda/bootstrap.py index 1af5e7ce5b..3b4b4383ae 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -16,7 +16,7 @@ from configparser import ConfigParser from mache import MachineInfo, discover_machine -from mache.spack import get_modules_env_vars_and_mpi_compilers +from mache.spack import make_spack_env, get_modules_env_vars_and_mpi_compilers from shared import parse_args, get_conda_base, check_call, install_miniconda @@ -130,12 +130,20 @@ def build_env(env_type, recreate, machine, compiler, mpi, conda_mpi, version, os.chdir(build_dir) if env_type == 'dev': - if env_name is None: - env_name = f'dev_compass_{version}{env_suffix}' + spack_env = f'dev_compass_{version}{env_suffix}' elif env_type == 'test_release': - env_name = f'test_compass_{version}{env_suffix}' + spack_env = f'test_compass_{version}{env_suffix}' else: - env_name = f'compass_{version}{env_suffix}' + spack_env = f'compass_{version}{env_suffix}' + + if env_name is None or env_type != 'dev': + env_name = spack_env + + # add the compiler and MPI library to the spack env name + spack_env = '{}_{}_{}'.format(spack_env, compiler, mpi) + # spack doesn't like dots + spack_env = spack_env.replace('.', '_') + env_path = os.path.join(conda_base, 'envs', env_name) if conda_mpi == 'nompi': @@ -218,7 +226,7 @@ def build_env(env_type, recreate, machine, compiler, mpi, conda_mpi, version, else: print(f'{env_name} already exists') - return env_path, env_name, activate_env + return env_path, env_name, activate_env, spack_env def get_sys_info(machine, compiler, mpilib, mpicc, mpicxx, mpifc, @@ -295,11 +303,8 @@ def get_sys_info(machine, compiler, mpilib, mpicc, mpicxx, mpifc, return sys_info, mod_env_commands -def build_system_libraries(config, machine, compiler, mpi, version, - template_path, env_path, env_name, activate_env, - mpicc, mpicxx, mpifc, mod_env_commands): - - mache_mod_env_commands = mod_env_commands +def build_spack_env(config, update_spack, machine, compiler, mpi, env_path, + env_name, activate_env, spack_env, mod_env_commands): if machine is not None: esmf = config.get('deploy', 'esmf') @@ -319,77 +324,42 @@ def build_system_libraries(config, machine, compiler, mpi, version, # it could be that esmf was already removed pass - force_build = False if machine is not None: - system_libs = config.get('deploy', 'system_libs') - compiler_path = os.path.join( - system_libs, 'compass_{}'.format(version), compiler, mpi) - scorpio_path = os.path.join(compiler_path, - 'scorpio_{}'.format(scorpio)) - esmf_path = os.path.join(compiler_path, 'esmf_{}'.format(esmf)) + spack_base = config.get('deploy', 'spack') + scorpio_path = os.path.join(spack_base, 'var/spack/environments/', + spack_env, '.spack-env/view') else: - # using conda-forge compilers - system_libs = None + spack_base = None scorpio_path = env_path - esmf_path = env_path - force_build = True - - sys_info, mod_env_commands = get_sys_info( - machine, compiler, mpi, mpicc, mpicxx, mpifc, mod_env_commands) - - if esmf != 'None': - bin_path = os.path.join(esmf_path, 'bin') - lib_path = os.path.join(esmf_path, 'lib') - mod_env_commands = \ - f'{mod_env_commands}\n' \ - f'export PATH="{bin_path}:$PATH"\n' \ - f'export LD_LIBRARY_PATH={lib_path}:$LD_LIBRARY_PATH' mod_env_commands = f'{mod_env_commands}\n' \ f'export PIO={scorpio_path}' - build_esmf = 'False' - if esmf == 'None': - esmf_branch = 'None' - else: - esmf_branch = 'ESMF_{}'.format(esmf.replace('.', '_')) - if not os.path.exists(esmf_path) or force_build: - build_esmf = 'True' - - build_scorpio = 'False' - if scorpio != 'None' and (not os.path.exists(scorpio_path) or force_build): - build_scorpio = 'True' + specs = list() - script_filename = 'build.bash' - - with open('{}/build.template'.format(template_path), 'r') as f: - template = Template(f.read()) + if esmf != 'None': + specs.append(f'esmf@{esmf}+mpi+netcdf~pio+pnetcdf') + if scorpio != 'None': + specs.append(f'scorpio@{scorpio}+pnetcdf~timing+internal-timing~tools+malloc') - if machine is None: - # need to activate the conda environment because that's where the - # libraries are - activate_commands = activate_env.replace("; ", "\n") - mache_mod_env_commands = f'{activate_commands}\n' \ - f'{mache_mod_env_commands}' - - script = template.render( - sys_info=sys_info, modules=mache_mod_env_commands, - scorpio=scorpio, scorpio_path=scorpio_path, - build_scorpio=build_scorpio, esmf_path=esmf_path, - esmf_branch=esmf_branch, build_esmf=build_esmf) - print('Writing {}'.format(script_filename)) - with open(script_filename, 'w') as handle: - handle.write(script) + if update_spack: + make_spack_env(spack_path=spack_base, env_name=spack_env, + spack_specs=specs, compiler=compiler, mpi=mpi, + machine=machine) - command = '/bin/bash build.bash' - check_call(command) + # remove ESMC/ESMF include files that interfere with MPAS time keeping + include_path = os.path.join(scorpio_path, 'include') + for prefix in ['ESMC', 'esmf']: + files = glob.glob(os.path.join(include_path, f'{prefix}*')) + for filename in files: + os.remove(filename) - return sys_info, system_libs, mod_env_commands + return spack_base, mod_env_commands def write_load_compass(template_path, activ_path, conda_base, env_type, - activ_suffix, prefix, env_name, machine, sys_info, - mod_env_commands, env_only): + activ_suffix, prefix, env_name, spack_base, spack_env, + machine, sys_info, mod_env_commands, env_only): try: os.makedirs(activ_path) @@ -417,6 +387,12 @@ def write_load_compass(template_path, activ_path, conda_base, env_type, with open(filename, 'r') as f: template = Template(f.read()) + if not env_only and spack_base is not None: + spack = f'source {spack_base}/share/spack/setup-env.sh\n' \ + f'spack env activate {spack_env}' + else: + spack = '' + if env_type == 'dev': update_compass = \ 'if [[ -f "./setup.py" && -d "compass" ]]; then\n' \ @@ -429,6 +405,7 @@ def write_load_compass(template_path, activ_path, conda_base, env_type, script = template.render(conda_base=conda_base, compass_env=env_name, mod_env_commands=mod_env_commands, + spack=spack, netcdf_paths=sys_info['mpas_netcdf_paths'], update_compass=update_compass) @@ -484,15 +461,15 @@ def test_command(command, env, package): print(' {} passes'.format(package)) -def update_permissions(config, env_type, activ_path, conda_base, system_libs): +def update_permissions(config, env_type, activ_path, conda_base, spack_base): directories = [] if env_type != 'dev': directories.append(conda_base) - if system_libs is not None: + if spack_base is not None: # even if this is not a release, we need to update permissions on # shared system libraries - directories.append(system_libs) + directories.append(spack_base) group = config.get('e3sm_unified', 'group') @@ -699,18 +676,21 @@ def main(): mpifc = 'mpifort' mod_env_commands = '' - env_path, env_name, activate_env = build_env( + env_path, env_name, activate_env, spack_env = build_env( env_type, recreate, machine, compiler, mpi, conda_mpi, version, python, source_path, template_path, conda_base, activ_suffix, args.env_name, env_suffix, activate_base, args.use_local, args.local_conda_build) if compiler is not None: - sys_info, system_libs, mod_env_commands = build_system_libraries( - config, machine, compiler, mpi, version, template_path, env_path, - env_name, activate_env, mpicc, mpicxx, mpifc, mod_env_commands) + sys_info, mod_env_commands = get_sys_info( + machine, compiler, mpi, mpicc, mpicxx, mpifc, mod_env_commands) + spack_base = build_spack_env( + config, args.update_spack, machine, compiler, mpi, env_path, + env_name, activate_env, spack_env, sys_info) else: - sys_info = dict(modules=[], env_vars=[], mpas_netcdf_paths='') - system_libs = None + sys_info = dict(mpas_netcdf_paths='') + spack_base = None + mod_env_commands = '' if env_type == 'dev': if args.env_name is not None: @@ -724,7 +704,8 @@ def main(): script_filename = write_load_compass( template_path, activ_path, conda_base, env_type, activ_suffix, prefix, - env_name, machine, sys_info, mod_env_commands, args.env_only) + env_name, spack_base, spack_env, machine, sys_info, mod_env_commands, + args.env_only) if args.check: check_env(script_filename, env_name) @@ -747,7 +728,7 @@ def main(): if machine is not None: update_permissions(config, env_type, activ_path, conda_base, - system_libs) + spack_base) if __name__ == '__main__': diff --git a/conda/compass_env/load_compass.template b/conda/compass_env/load_compass.template index 458b1fb8ca..765c033ec7 100644 --- a/conda/compass_env/load_compass.template +++ b/conda/compass_env/load_compass.template @@ -3,6 +3,8 @@ conda activate {{ compass_env }} {{ update_compass }} +{{ spack }} + {{ mod_env_commands }} {{ netcdf_paths }} diff --git a/conda/shared.py b/conda/shared.py index 541e653099..aee5f74f69 100644 --- a/conda/shared.py +++ b/conda/shared.py @@ -44,6 +44,10 @@ def parse_args(bootstrap): "packages") parser.add_argument("--use_local", dest="use_local", action='store_true', help="Use locally built conda packages (for testing).") + parser.add_argument("--update_spack", dest="update_spack", + action='store_true', + help="If the shared spack environment should be " + "created or recreated.") if bootstrap: parser.add_argument("--local_conda_build", dest="local_conda_build", type=str, From 38119d5e26ea1b5cfdd6a2476240e7eb2bb4943e Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sat, 18 Dec 2021 13:41:32 +0100 Subject: [PATCH 02/27] Update machine config files for spack --- compass/machines/anvil.cfg | 4 ++-- compass/machines/badger.cfg | 4 ++-- compass/machines/chrysalis.cfg | 4 ++-- compass/machines/compy.cfg | 4 ++-- compass/machines/cori-haswell.cfg | 4 ++-- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/compass/machines/anvil.cfg b/compass/machines/anvil.cfg index e234f50609..55ca5a38a4 100644 --- a/compass/machines/anvil.cfg +++ b/compass/machines/anvil.cfg @@ -29,5 +29,5 @@ mpi_intel = impi # the system MPI library to use for gnu compiler mpi_gnu = mvapich -# the base path to system libraries to be added as part of setting up compass -system_libs = /lcrc/soft/climate/compass/anvil/system +# the base path for spack environments used by compass +spack = /lcrc/soft/climate/compass/anvil/spack diff --git a/compass/machines/badger.cfg b/compass/machines/badger.cfg index 17b00001b0..acc6c0e69b 100644 --- a/compass/machines/badger.cfg +++ b/compass/machines/badger.cfg @@ -29,5 +29,5 @@ mpi_intel = impi # the system MPI library to use for gnu compiler mpi_gnu = mvapich -# the base path to system libraries to be added as part of setting up compass -system_libs = /usr/projects/climate/SHARED_CLIMATE/compass/badger/system +# the base path for spack environments used by compass +spack = /usr/projects/climate/SHARED_CLIMATE/compass/badger/spack diff --git a/compass/machines/chrysalis.cfg b/compass/machines/chrysalis.cfg index bc693c410a..f8280c8197 100644 --- a/compass/machines/chrysalis.cfg +++ b/compass/machines/chrysalis.cfg @@ -29,5 +29,5 @@ mpi_intel = impi # the system MPI library to use for gnu compiler mpi_gnu = openmpi -# the base path to system libraries to be added as part of setting up compass -system_libs = /lcrc/soft/climate/compass/chrysalis/system +# the base path for spack environments used by compass +spack = /lcrc/soft/climate/compass/chrysalis/spack diff --git a/compass/machines/compy.cfg b/compass/machines/compy.cfg index 7ecd2eb5e8..d09d28e617 100644 --- a/compass/machines/compy.cfg +++ b/compass/machines/compy.cfg @@ -29,5 +29,5 @@ mpi_intel = impi # the system MPI library to use for gnu compiler mpi_pgi = mvapich2 -# the base path to system libraries to be added as part of setting up compass -system_libs = /share/apps/E3SM/conda_envs/compass/system +# the base path for spack environments used by compass +spack = /share/apps/E3SM/conda_envs/compass/spack diff --git a/compass/machines/cori-haswell.cfg b/compass/machines/cori-haswell.cfg index f570bbf4ed..122637de24 100644 --- a/compass/machines/cori-haswell.cfg +++ b/compass/machines/cori-haswell.cfg @@ -29,8 +29,8 @@ mpi_intel = mpt # the system MPI library to use for gnu compiler mpi_gnu = mpt -# the base path to system libraries to be added as part of setting up compass -system_libs = /global/cfs/cdirs/e3sm/software/compass/cori-haswell/system +# the base path for spack environments used by compass +spack = /global/cfs/cdirs/e3sm/software/compass/cori-haswell/spack # the version of ESMF to build if using system compilers and MPI (don't build) esmf = None From f74733b3d06a80db396bd152d707464cd6790336 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Sun, 19 Dec 2021 14:20:19 +0100 Subject: [PATCH 03/27] Switch to using spack load script from mache --- conda/bootstrap.py | 28 +++++++------- conda/compass_env/build.template | 64 -------------------------------- 2 files changed, 14 insertions(+), 78 deletions(-) delete mode 100644 conda/compass_env/build.template diff --git a/conda/bootstrap.py b/conda/bootstrap.py index 3b4b4383ae..b27a48a88d 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -16,7 +16,8 @@ from configparser import ConfigParser from mache import MachineInfo, discover_machine -from mache.spack import make_spack_env, get_modules_env_vars_and_mpi_compilers +from mache.spack import make_spack_env, \ + get_modules_env_vars_and_mpi_compilers, get_spack_script from shared import parse_args, get_conda_base, check_call, install_miniconda @@ -354,12 +355,16 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, env_path, for filename in files: os.remove(filename) - return spack_base, mod_env_commands + spack_script = get_spack_script(spack_path=spack_base, env_name=spack_env, + compiler=compiler, mpi=mpi, shell='sh', + machine=machine) + + return spack_base, spack_script, mod_env_commands def write_load_compass(template_path, activ_path, conda_base, env_type, - activ_suffix, prefix, env_name, spack_base, spack_env, - machine, sys_info, mod_env_commands, env_only): + activ_suffix, prefix, env_name, spack_script, machine, + sys_info, mod_env_commands, env_only): try: os.makedirs(activ_path) @@ -387,12 +392,6 @@ def write_load_compass(template_path, activ_path, conda_base, env_type, with open(filename, 'r') as f: template = Template(f.read()) - if not env_only and spack_base is not None: - spack = f'source {spack_base}/share/spack/setup-env.sh\n' \ - f'spack env activate {spack_env}' - else: - spack = '' - if env_type == 'dev': update_compass = \ 'if [[ -f "./setup.py" && -d "compass" ]]; then\n' \ @@ -405,7 +404,7 @@ def write_load_compass(template_path, activ_path, conda_base, env_type, script = template.render(conda_base=conda_base, compass_env=env_name, mod_env_commands=mod_env_commands, - spack=spack, + spack=spack_script, netcdf_paths=sys_info['mpas_netcdf_paths'], update_compass=update_compass) @@ -684,13 +683,14 @@ def main(): if compiler is not None: sys_info, mod_env_commands = get_sys_info( machine, compiler, mpi, mpicc, mpicxx, mpifc, mod_env_commands) - spack_base = build_spack_env( + spack_base, spack_script, mod_env_commands = build_spack_env( config, args.update_spack, machine, compiler, mpi, env_path, - env_name, activate_env, spack_env, sys_info) + env_name, activate_env, spack_env, mod_env_commands) else: sys_info = dict(mpas_netcdf_paths='') spack_base = None mod_env_commands = '' + spack_script = '' if env_type == 'dev': if args.env_name is not None: @@ -704,7 +704,7 @@ def main(): script_filename = write_load_compass( template_path, activ_path, conda_base, env_type, activ_suffix, prefix, - env_name, spack_base, spack_env, machine, sys_info, mod_env_commands, + env_name, spack_script, machine, sys_info, mod_env_commands, args.env_only) if args.check: diff --git a/conda/compass_env/build.template b/conda/compass_env/build.template deleted file mode 100644 index a6594db372..0000000000 --- a/conda/compass_env/build.template +++ /dev/null @@ -1,64 +0,0 @@ -#!/bin/bash - -build_dir=$(pwd) - -{{ modules }} - -set -e - -{{ sys_info.netcdf_paths }} - -export SCORPIO_VERSION="{{ scorpio }}" -export SCORPIO_PATH="{{ scorpio_path }}" - -if [ "{{ build_scorpio }}" == "True" ]; then - rm -rf scorpio* - - git clone git@github.com:E3SM-Project/scorpio.git - cd scorpio - git checkout scorpio-v$SCORPIO_VERSION - - mkdir build - cd build - FC={{ sys_info.mpifc }} CC={{ sys_info.mpicc }} CXX={{ sys_info.mpicxx }} cmake \ - -DCMAKE_INSTALL_PREFIX=$SCORPIO_PATH -DPIO_ENABLE_TIMING=OFF \ - -DPIO_ENABLE_TOOLS=OFF \ - -DNetCDF_C_PATH=$NETCDF_C_PATH \ - -DNetCDF_Fortran_PATH=$NETCDF_FORTRAN_PATH \ - -DPnetCDF_PATH=$PNETCDF_PATH .. - - make - make install - - cd $build_dir -fi - -export ESMF_PATH="{{ esmf_path }}" -export ESMF_BRANCH="{{ esmf_branch }}" - -if [ "{{ build_esmf }}" == "True" ]; then - rm -rf esmf* - git clone git@github.com:esmf-org/esmf.git - cd esmf - git checkout "${ESMF_BRANCH}" - - export ESMF_DIR=$(pwd) - - export ESMF_INSTALL_PREFIX=${ESMF_PATH} - export ESMF_INSTALL_BINDIR=${ESMF_PATH}/bin - export ESMF_INSTALL_DOCDIR=${ESMF_PATH}/doc - export ESMF_INSTALL_HEADERDIR=${ESMF_PATH}/include - export ESMF_INSTALL_LIBDIR=${ESMF_PATH}/lib - export ESMF_INSTALL_MODDIR=${ESMF_PATH}/mod - -{{ sys_info.esmf_netcdf }} - export ESMF_PNETCDF="pnetcdf-config" - -{{ sys_info.esmf_compilers }} - export ESMF_COMM={{ sys_info.esmf_comm }} - - make -j 8 - make install - - cd $build_dir -fi From 5a7b3f8f2e585b816e8ec9f56ba4ee53744e0ed1 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 20 Dec 2021 06:41:02 -0800 Subject: [PATCH 04/27] Clean up system info Get rid of ESMF details Use path to `nc-config`, `nf-config` and `pnetcdf-config` to find NetCDF and pNetCDF paths on Cori (just like on other machines). --- conda/bootstrap.py | 51 ++++++---------------------------------------- 1 file changed, 6 insertions(+), 45 deletions(-) diff --git a/conda/bootstrap.py b/conda/bootstrap.py index b27a48a88d..bd4e199fe6 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -238,16 +238,6 @@ def get_sys_info(machine, compiler, mpilib, mpicc, mpicxx, mpifc, # convert env vars from mache to a list - if 'intel' in compiler: - esmf_compilers = ' export ESMF_COMPILER=intel' - elif compiler == 'pgi': - esmf_compilers = ' export ESMF_COMPILER=pgi\n' \ - ' export ESMF_F90={}\n' \ - ' export ESMF_CXX={}'.format(mpifc, mpicxx) - else: - esmf_compilers = ' export ESMF_F90={}\n' \ - ' export ESMF_CXX={}'.format(mpifc, mpicxx) - if 'intel' in compiler and machine == 'anvil': mod_env_commands = f'{mod_env_commands}\n' \ f'export I_MPI_CC=icc\n' \ @@ -260,45 +250,16 @@ def get_sys_info(machine, compiler, mpilib, mpicc, mpicxx, mpifc, f'export MPAS_EXTERNAL_LIBS="-lgomp"' if mpilib == 'mvapich': - esmf_comm = 'mvapich2' mod_env_commands = f'{mod_env_commands}\n' \ f'export MV2_ENABLE_AFFINITY=0' \ f'export MV2_SHOW_CPU_BINDING=1' - elif mpilib == 'mpich': - esmf_comm = 'mpich3' - elif mpilib == 'impi': - esmf_comm = 'intelmpi' - else: - esmf_comm = mpilib - if machine == 'grizzly' or machine == 'badger': - esmf_netcdf = \ - ' export ESMF_NETCDF="split"\n' \ - ' export ESMF_NETCDF_INCLUDE=$NETCDF_C_PATH/include\n' \ - ' export ESMF_NETCDF_LIBPATH=$NETCDF_C_PATH/lib64' - else: - esmf_netcdf = ' export ESMF_NETCDF="nc-config"' - - if 'cori' in machine: - netcdf_paths = 'export NETCDF_C_PATH=$NETCDF_DIR\n' \ - 'export NETCDF_FORTRAN_PATH=$NETCDF_DIR\n' \ - 'export PNETCDF_PATH=$PNETCDF_DIR' - mpas_netcdf_paths = 'export NETCDF=$NETCDF_DIR\n' \ - 'export NETCDFF=$NETCDF_DIR\n' \ - 'export PNETCDF=$PNETCDF_DIR' - else: - netcdf_paths = \ - 'export NETCDF_C_PATH=$(dirname $(dirname $(which nc-config)))\n' \ - 'export NETCDF_FORTRAN_PATH=$(dirname $(dirname $(which nf-config)))\n' \ - 'export PNETCDF_PATH=$(dirname $(dirname $(which pnetcdf-config)))' - mpas_netcdf_paths = \ - 'export NETCDF=$(dirname $(dirname $(which nc-config)))\n' \ - 'export NETCDFF=$(dirname $(dirname $(which nf-config)))\n' \ - 'export PNETCDF=$(dirname $(dirname $(which pnetcdf-config)))' - - sys_info = dict(mpicc=mpicc, mpicxx=mpicxx, - mpifc=mpifc, esmf_comm=esmf_comm, esmf_netcdf=esmf_netcdf, - esmf_compilers=esmf_compilers, netcdf_paths=netcdf_paths, + mpas_netcdf_paths = \ + 'export NETCDF=$(dirname $(dirname $(which nc-config)))\n' \ + 'export NETCDFF=$(dirname $(dirname $(which nf-config)))\n' \ + 'export PNETCDF=$(dirname $(dirname $(which pnetcdf-config)))' + + sys_info = dict(mpicc=mpicc, mpicxx=mpicxx, mpifc=mpifc, mpas_netcdf_paths=mpas_netcdf_paths) return sys_info, mod_env_commands From 0cea6817f1df3f6b7be91a85e9ec4f5008d9a7bd Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 28 Dec 2021 11:43:21 +0100 Subject: [PATCH 05/27] Clone spack for specific mache version --- conda/bootstrap.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/conda/bootstrap.py b/conda/bootstrap.py index bd4e199fe6..bea9a267e2 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -18,6 +18,7 @@ from mache import MachineInfo, discover_machine from mache.spack import make_spack_env, \ get_modules_env_vars_and_mpi_compilers, get_spack_script +from mache.version import __version__ as mache_version from shared import parse_args, get_conda_base, check_call, install_miniconda @@ -288,6 +289,7 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, env_path, if machine is not None: spack_base = config.get('deploy', 'spack') + spack_base = f'{spack_base}/spack_for_mache_{mache_version}' scorpio_path = os.path.join(spack_base, 'var/spack/environments/', spack_env, '.spack-env/view') else: From f3be6cf15d92ea44deb0dfca365fb0aed6de5cad Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 28 Dec 2021 12:24:21 +0100 Subject: [PATCH 06/27] Option to use non-E3SM hdf5, netcdf on each machine --- compass/machines/anvil.cfg | 4 ++ compass/machines/badger.cfg | 4 ++ compass/machines/chrysalis.cfg | 4 ++ compass/machines/compy.cfg | 6 +++ compass/machines/cori-haswell.cfg | 4 ++ compass/machines/cori-knl.cfg | 8 +++- compass/machines/grizzly.cfg | 8 +++- conda/bootstrap.py | 73 +++++++++++++++++-------------- conda/default.cfg | 8 ++-- 9 files changed, 79 insertions(+), 40 deletions(-) diff --git a/compass/machines/anvil.cfg b/compass/machines/anvil.cfg index 55ca5a38a4..ed4c4a338e 100644 --- a/compass/machines/anvil.cfg +++ b/compass/machines/anvil.cfg @@ -31,3 +31,7 @@ mpi_gnu = mvapich # the base path for spack environments used by compass spack = /lcrc/soft/climate/compass/anvil/spack + +# whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and +# pnetcdf as E3SM (spack modules are used otherwise) +use_e3sm_hdf5_netcdf = True diff --git a/compass/machines/badger.cfg b/compass/machines/badger.cfg index acc6c0e69b..94b93e91c9 100644 --- a/compass/machines/badger.cfg +++ b/compass/machines/badger.cfg @@ -31,3 +31,7 @@ mpi_gnu = mvapich # the base path for spack environments used by compass spack = /usr/projects/climate/SHARED_CLIMATE/compass/badger/spack + +# whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and +# pnetcdf as E3SM (spack modules are used otherwise) +use_e3sm_hdf5_netcdf = True diff --git a/compass/machines/chrysalis.cfg b/compass/machines/chrysalis.cfg index f8280c8197..76a1d4486d 100644 --- a/compass/machines/chrysalis.cfg +++ b/compass/machines/chrysalis.cfg @@ -31,3 +31,7 @@ mpi_gnu = openmpi # the base path for spack environments used by compass spack = /lcrc/soft/climate/compass/chrysalis/spack + +# whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and +# pnetcdf as E3SM (spack modules are used otherwise) +use_e3sm_hdf5_netcdf = True diff --git a/compass/machines/compy.cfg b/compass/machines/compy.cfg index d09d28e617..f1ac3a10bb 100644 --- a/compass/machines/compy.cfg +++ b/compass/machines/compy.cfg @@ -31,3 +31,9 @@ mpi_pgi = mvapich2 # the base path for spack environments used by compass spack = /share/apps/E3SM/conda_envs/compass/spack + +# whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and +# pnetcdf as E3SM (spack modules are used otherwise) +# +# We don't use them on Compy because hdf5 and netcdf were build without MPI +use_e3sm_hdf5_netcdf = False diff --git a/compass/machines/cori-haswell.cfg b/compass/machines/cori-haswell.cfg index 122637de24..447ac95fe2 100644 --- a/compass/machines/cori-haswell.cfg +++ b/compass/machines/cori-haswell.cfg @@ -32,5 +32,9 @@ mpi_gnu = mpt # the base path for spack environments used by compass spack = /global/cfs/cdirs/e3sm/software/compass/cori-haswell/spack +# whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and +# pnetcdf as E3SM (spack modules are used otherwise) +use_e3sm_hdf5_netcdf = True + # the version of ESMF to build if using system compilers and MPI (don't build) esmf = None diff --git a/compass/machines/cori-knl.cfg b/compass/machines/cori-knl.cfg index 1c4ece4265..7f98f6eca6 100644 --- a/compass/machines/cori-knl.cfg +++ b/compass/machines/cori-knl.cfg @@ -29,8 +29,12 @@ mpi_intel = impi # the system MPI library to use for gnu compiler mpi_gnu = mpt -# the base path to system libraries to be added as part of setting up compass -system_libs = /global/cfs/cdirs/e3sm/software/compass/cori-knl/system +# the base path for spack environments used by compass +spack = /global/cfs/cdirs/e3sm/software/compass/cori-knl/spack + +# whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and +# pnetcdf as E3SM (spack modules are used otherwise) +use_e3sm_hdf5_netcdf = True # the version of ESMF to build if using system compilers and MPI (don't build) esmf = None diff --git a/compass/machines/grizzly.cfg b/compass/machines/grizzly.cfg index 304e31a6f0..f7f36ccbed 100644 --- a/compass/machines/grizzly.cfg +++ b/compass/machines/grizzly.cfg @@ -29,5 +29,9 @@ mpi_intel = impi # the system MPI library to use for gnu compiler mpi_gnu = mvapich -# the base path to system libraries to be added as part of setting up compass -system_libs = /usr/projects/climate/SHARED_CLIMATE/compass/grizzly/system +# the base path for spack environments used by compass +spack = /usr/projects/climate/SHARED_CLIMATE/compass/grizzly/spack + +# whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and +# pnetcdf as E3SM (spack modules are used otherwise) +use_e3sm_hdf5_netcdf = True diff --git a/conda/bootstrap.py b/conda/bootstrap.py index bea9a267e2..874b478804 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -266,16 +266,11 @@ def get_sys_info(machine, compiler, mpilib, mpicc, mpicxx, mpifc, return sys_info, mod_env_commands -def build_spack_env(config, update_spack, machine, compiler, mpi, env_path, - env_name, activate_env, spack_env, mod_env_commands): +def build_spack_env(config, update_spack, machine, compiler, mpi, env_name, + activate_env, spack_env, mod_env_commands): - if machine is not None: - esmf = config.get('deploy', 'esmf') - scorpio = config.get('deploy', 'scorpio') - else: - # stick with the conda-forge ESMF and e3sm/label/compass SCORPIO - esmf = 'None' - scorpio = 'None' + esmf = config.get('deploy', 'esmf') + scorpio = config.get('deploy', 'scorpio') if esmf != 'None': # remove conda-forge esmf because we will use the system build @@ -287,20 +282,23 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, env_path, # it could be that esmf was already removed pass - if machine is not None: - spack_base = config.get('deploy', 'spack') - spack_base = f'{spack_base}/spack_for_mache_{mache_version}' - scorpio_path = os.path.join(spack_base, 'var/spack/environments/', - spack_env, '.spack-env/view') - else: - spack_base = None - scorpio_path = env_path - - mod_env_commands = f'{mod_env_commands}\n' \ - f'export PIO={scorpio_path}' + spack_base = config.get('deploy', 'spack') + spack_base = f'{spack_base}/spack_for_mache_{mache_version}' specs = list() + e3sm_hdf5_netcdf = config.getboolean('deploy', 'use_e3sm_hdf5_netcdf') + if not e3sm_hdf5_netcdf: + hdf5 = config.get('deploy', 'hdf5') + netcdf_c = config.get('deploy', 'netcdf_c') + netcdf_fortran = config.get('deploy', 'netcdf_fortran') + pnetcdf = config.get('deploy', 'pnetcdf') + specs.extend([ + f'hdf5@{hdf5}+cxx+fortran+hl+mpi+shared', + f'netcdf-c@{netcdf_c}+mpi~parallel-netcdf', + f'netcdf-fortran@{netcdf_fortran}', + f'parallel-netcdf@{pnetcdf}+cxx+fortran']) + if esmf != 'None': specs.append(f'esmf@{esmf}+mpi+netcdf~pio+pnetcdf') if scorpio != 'None': @@ -309,10 +307,12 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, env_path, if update_spack: make_spack_env(spack_path=spack_base, env_name=spack_env, spack_specs=specs, compiler=compiler, mpi=mpi, - machine=machine) + machine=machine, + include_e3sm_hdf5_netcdf=e3sm_hdf5_netcdf) # remove ESMC/ESMF include files that interfere with MPAS time keeping - include_path = os.path.join(scorpio_path, 'include') + include_path = os.path.join(spack_base, 'var/spack/environments/', + spack_env, '.spack-env/view/include') for prefix in ['ESMC', 'esmf']: files = glob.glob(os.path.join(include_path, f'{prefix}*')) for filename in files: @@ -320,7 +320,8 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, env_path, spack_script = get_spack_script(spack_path=spack_base, env_name=spack_env, compiler=compiler, mpi=mpi, shell='sh', - machine=machine) + machine=machine, + include_e3sm_hdf5_netcdf=e3sm_hdf5_netcdf) return spack_base, spack_script, mod_env_commands @@ -643,17 +644,23 @@ def main(): source_path, template_path, conda_base, activ_suffix, args.env_name, env_suffix, activate_base, args.use_local, args.local_conda_build) + spack_base = None + spack_script = '' + sys_info = dict(mpas_netcdf_paths='') if compiler is not None: - sys_info, mod_env_commands = get_sys_info( - machine, compiler, mpi, mpicc, mpicxx, mpifc, mod_env_commands) - spack_base, spack_script, mod_env_commands = build_spack_env( - config, args.update_spack, machine, compiler, mpi, env_path, - env_name, activate_env, spack_env, mod_env_commands) - else: - sys_info = dict(mpas_netcdf_paths='') - spack_base = None - mod_env_commands = '' - spack_script = '' + if machine is not None: + sys_info, mod_env_commands = get_sys_info( + machine, compiler, mpi, mpicc, mpicxx, mpifc, mod_env_commands) + spack_base, spack_script, mod_env_commands = build_spack_env( + config, args.update_spack, machine, compiler, mpi, env_name, + activate_env, spack_env, mod_env_commands) + scorpio_path = os.path.join(spack_base, 'var/spack/environments/', + spack_env, '.spack-env/view') + mod_env_commands = f'{mod_env_commands}\n' \ + f'export PIO={scorpio_path}' + else: + mod_env_commands = f'{mod_env_commands}\n' \ + f'export PIO={env_path}' if env_type == 'dev': if args.env_name is not None: diff --git a/conda/default.cfg b/conda/default.cfg index 57a781b168..6a2decd1c5 100644 --- a/conda/default.cfg +++ b/conda/default.cfg @@ -19,8 +19,10 @@ python = 3.9 # the MPI version (nompi, mpich or openmpi) mpi = nompi -# the version of ESMF to build if using system compilers and MPI +# the version of various packages to include if using spack esmf = 8.1.1 - -# the SCORPIO version to build +hdf5 = 1.12.1 +netcdf_c = 4.8.1 +netcdf_fortran = 4.5.3 +pnetcdf = 1.12.2 scorpio = 1.2.2 From 15f31f5663d12c337186943519dbdfebc9c86912 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 3 Mar 2022 11:40:31 +0100 Subject: [PATCH 07/27] Add albany develop to spack env --- conda/bootstrap.py | 4 ++++ conda/default.cfg | 1 + 2 files changed, 5 insertions(+) diff --git a/conda/bootstrap.py b/conda/bootstrap.py index 874b478804..5e7b6ff0b2 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -271,6 +271,7 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, env_name, esmf = config.get('deploy', 'esmf') scorpio = config.get('deploy', 'scorpio') + albany = config.get('deploy', 'albany') if esmf != 'None': # remove conda-forge esmf because we will use the system build @@ -304,6 +305,9 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, env_name, if scorpio != 'None': specs.append(f'scorpio@{scorpio}+pnetcdf~timing+internal-timing~tools+malloc') + if albany != 'None': + specs.append(f'albany@{albany}') + if update_spack: make_spack_env(spack_path=spack_base, env_name=spack_env, spack_specs=specs, compiler=compiler, mpi=mpi, diff --git a/conda/default.cfg b/conda/default.cfg index 6a2decd1c5..c2f45e681f 100644 --- a/conda/default.cfg +++ b/conda/default.cfg @@ -26,3 +26,4 @@ netcdf_c = 4.8.1 netcdf_fortran = 4.5.3 pnetcdf = 1.12.2 scorpio = 1.2.2 +albany = develop \ No newline at end of file From 84f7ec0797b425374d5c7078f4ab839b7532b699 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 4 Mar 2022 14:56:04 +0100 Subject: [PATCH 08/27] Update the way env vars are handled They should come from the spack script snippet and not from the E3SM machine config file anymore. --- conda/bootstrap.py | 103 ++++++++++-------------- conda/compass_env/load_compass.template | 4 +- 2 files changed, 43 insertions(+), 64 deletions(-) diff --git a/conda/bootstrap.py b/conda/bootstrap.py index 5e7b6ff0b2..e9936563c2 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -15,9 +15,8 @@ from importlib.resources import path from configparser import ConfigParser -from mache import MachineInfo, discover_machine -from mache.spack import make_spack_env, \ - get_modules_env_vars_and_mpi_compilers, get_spack_script +from mache import discover_machine +from mache.spack import make_spack_env, get_spack_script from mache.version import __version__ as mache_version from shared import parse_args, get_conda_base, check_call, install_miniconda @@ -231,43 +230,41 @@ def build_env(env_type, recreate, machine, compiler, mpi, conda_mpi, version, return env_path, env_name, activate_env, spack_env -def get_sys_info(machine, compiler, mpilib, mpicc, mpicxx, mpifc, - mod_env_commands): +def get_env_vars(machine, compiler, mpilib): if machine is None: machine = 'None' # convert env vars from mache to a list + env_vars = '' if 'intel' in compiler and machine == 'anvil': - mod_env_commands = f'{mod_env_commands}\n' \ - f'export I_MPI_CC=icc\n' \ - f'export I_MPI_CXX=icpc\n' \ - f'export I_MPI_F77=ifort\n' \ - f'export I_MPI_F90=ifort' + env_vars = f'{env_vars}' \ + f'export I_MPI_CC=icc\n' \ + f'export I_MPI_CXX=icpc\n' \ + f'export I_MPI_F77=ifort\n' \ + f'export I_MPI_F90=ifort\n' if platform.system() == 'Linux' and machine == 'None': - mod_env_commands = f'{mod_env_commands}\n' \ - f'export MPAS_EXTERNAL_LIBS="-lgomp"' + env_vars = f'{env_vars}' \ + f'export MPAS_EXTERNAL_LIBS="-lgomp"\n' if mpilib == 'mvapich': - mod_env_commands = f'{mod_env_commands}\n' \ - f'export MV2_ENABLE_AFFINITY=0' \ - f'export MV2_SHOW_CPU_BINDING=1' + env_vars = f'{env_vars}' \ + f'export MV2_ENABLE_AFFINITY=0\n' \ + f'export MV2_SHOW_CPU_BINDING=1\n' - mpas_netcdf_paths = \ - 'export NETCDF=$(dirname $(dirname $(which nc-config)))\n' \ - 'export NETCDFF=$(dirname $(dirname $(which nf-config)))\n' \ - 'export PNETCDF=$(dirname $(dirname $(which pnetcdf-config)))' + env_vars = \ + f'{env_vars}' \ + f'export NETCDF=$(dirname $(dirname $(which nc-config)))\n' \ + f'export NETCDFF=$(dirname $(dirname $(which nf-config)))\n' \ + f'export PNETCDF=$(dirname $(dirname $(which pnetcdf-config)))\n' - sys_info = dict(mpicc=mpicc, mpicxx=mpicxx, mpifc=mpifc, - mpas_netcdf_paths=mpas_netcdf_paths) - - return sys_info, mod_env_commands + return env_vars def build_spack_env(config, update_spack, machine, compiler, mpi, env_name, - activate_env, spack_env, mod_env_commands): + activate_env, spack_env): esmf = config.get('deploy', 'esmf') scorpio = config.get('deploy', 'scorpio') @@ -327,12 +324,12 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, env_name, machine=machine, include_e3sm_hdf5_netcdf=e3sm_hdf5_netcdf) - return spack_base, spack_script, mod_env_commands + return spack_base, spack_script def write_load_compass(template_path, activ_path, conda_base, env_type, activ_suffix, prefix, env_name, spack_script, machine, - sys_info, mod_env_commands, env_only): + env_vars, env_only): try: os.makedirs(activ_path) @@ -347,16 +344,16 @@ def write_load_compass(template_path, activ_path, conda_base, env_type, script_filename = '{}/{}{}.sh'.format(activ_path, prefix, activ_suffix) if not env_only: - mod_env_commands = f'{mod_env_commands}\n' \ + env_vars = f'{env_vars}\n' \ f'export USE_PIO2=true' - mod_env_commands = f'{mod_env_commands}\n' \ - f'export HDF5_USE_FILE_LOCKING=FALSE\n' \ - f'export LOAD_COMPASS_ENV={script_filename}' + env_vars = f'{env_vars}\n' \ + f'export HDF5_USE_FILE_LOCKING=FALSE\n' \ + f'export LOAD_COMPASS_ENV={script_filename}' if machine is not None: - mod_env_commands = f'{mod_env_commands}\n' \ - f'export COMPASS_MACHINE={machine}' + env_vars = f'{env_vars}\n' \ + f'export COMPASS_MACHINE={machine}' - filename = '{}/load_compass.template'.format(template_path) + filename = f'{template_path}/load_compass.template' with open(filename, 'r') as f: template = Template(f.read()) @@ -371,9 +368,8 @@ def write_load_compass(template_path, activ_path, conda_base, env_type, update_compass = '' script = template.render(conda_base=conda_base, compass_env=env_name, - mod_env_commands=mod_env_commands, + env_vars=env_vars, spack=spack_script, - netcdf_paths=sys_info['mpas_netcdf_paths'], update_compass=update_compass) # strip out redundant blank lines @@ -590,14 +586,11 @@ def main(): version = get_version() machine = None - machine_info = None if not args.env_only: if args.machine is None: machine = discover_machine() else: machine = args.machine - if machine is not None: - machine_info = MachineInfo(machine=machine) config = get_config(args.config_file, machine) @@ -631,18 +624,6 @@ def main(): else: compiler = 'gnu' - if machine_info is not None: - mpicc, mpicxx, mpifc, mod_env_commands = \ - get_modules_env_vars_and_mpi_compilers( - machine, compiler, mpi, shell='sh', - include_e3sm_hdf5_netcdf=True) - else: - # using conda-forge compilers - mpicc = 'mpicc' - mpicxx = 'mpicxx' - mpifc = 'mpifort' - mod_env_commands = '' - env_path, env_name, activate_env, spack_env = build_env( env_type, recreate, machine, compiler, mpi, conda_mpi, version, python, source_path, template_path, conda_base, activ_suffix, args.env_name, @@ -650,21 +631,22 @@ def main(): spack_base = None spack_script = '' - sys_info = dict(mpas_netcdf_paths='') if compiler is not None: + env_vars = get_env_vars(machine, compiler, mpi) if machine is not None: - sys_info, mod_env_commands = get_sys_info( - machine, compiler, mpi, mpicc, mpicxx, mpifc, mod_env_commands) - spack_base, spack_script, mod_env_commands = build_spack_env( + spack_base, spack_script = build_spack_env( config, args.update_spack, machine, compiler, mpi, env_name, - activate_env, spack_env, mod_env_commands) + activate_env, spack_env) scorpio_path = os.path.join(spack_base, 'var/spack/environments/', spack_env, '.spack-env/view') - mod_env_commands = f'{mod_env_commands}\n' \ - f'export PIO={scorpio_path}' + env_vars = f'{env_vars}' \ + f'export PIO={scorpio_path}\n' else: - mod_env_commands = f'{mod_env_commands}\n' \ - f'export PIO={env_path}' + env_vars = f'{env_vars}' \ + f'export PIO={env_path}\n' + + else: + env_vars = '' if env_type == 'dev': if args.env_name is not None: @@ -678,8 +660,7 @@ def main(): script_filename = write_load_compass( template_path, activ_path, conda_base, env_type, activ_suffix, prefix, - env_name, spack_script, machine, sys_info, mod_env_commands, - args.env_only) + env_name, spack_script, machine, env_vars, args.env_only) if args.check: check_env(script_filename, env_name) diff --git a/conda/compass_env/load_compass.template b/conda/compass_env/load_compass.template index 765c033ec7..5e7d3f23e5 100644 --- a/conda/compass_env/load_compass.template +++ b/conda/compass_env/load_compass.template @@ -5,6 +5,4 @@ conda activate {{ compass_env }} {{ spack }} -{{ mod_env_commands }} - -{{ netcdf_paths }} +{{ env_vars }} \ No newline at end of file From d4593a1921a395a6f5b6268dddd177186e32d358 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 17 Mar 2022 15:10:35 +0100 Subject: [PATCH 09/27] Allow a specified spack base location --- conda/bootstrap.py | 93 ++++++++++++++++++++-------------------------- conda/shared.py | 14 +++++++ 2 files changed, 54 insertions(+), 53 deletions(-) diff --git a/conda/bootstrap.py b/conda/bootstrap.py index e9936563c2..673cf8964a 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -18,7 +18,8 @@ from mache import discover_machine from mache.spack import make_spack_env, get_spack_script from mache.version import __version__ as mache_version -from shared import parse_args, get_conda_base, check_call, install_miniconda +from shared import parse_args, get_conda_base, get_spack_base, check_call, \ + install_miniconda def get_config(config_file, machine): @@ -29,8 +30,9 @@ def get_config(config_file, machine): config.read(default_config) if machine is not None: - with path('mache.machines', f'{machine}.cfg') as machine_config: - config.read(str(machine_config)) + if not machine.startswith('conda'): + with path('mache.machines', f'{machine}.cfg') as machine_config: + config.read(str(machine_config)) machine_config = os.path.join(here, '..', 'compass', 'machines', '{}.cfg'.format(machine)) @@ -82,20 +84,18 @@ def get_env_setup(args, config, machine, env_type, source_path, conda_base): else: mpi = config.get('deploy', 'mpi_{}'.format(compiler)) - if machine is not None: + if machine is None: + conda_mpi = None + activ_suffix = '' + env_suffix = '' + elif not machine.startswith('conda'): conda_mpi = 'nompi' - else: - conda_mpi = mpi - - if machine is not None: activ_suffix = '_{}_{}_{}'.format(machine, compiler, mpi) env_suffix = '' - elif conda_mpi != 'nompi': + else: activ_suffix = '_{}'.format(mpi) env_suffix = activ_suffix - else: - activ_suffix = '' - env_suffix = '' + conda_mpi = mpi if env_type == 'dev': activ_path = source_path @@ -264,7 +264,7 @@ def get_env_vars(machine, compiler, mpilib): def build_spack_env(config, update_spack, machine, compiler, mpi, env_name, - activate_env, spack_env): + activate_env, spack_env, spack_base): esmf = config.get('deploy', 'esmf') scorpio = config.get('deploy', 'scorpio') @@ -280,8 +280,7 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, env_name, # it could be that esmf was already removed pass - spack_base = config.get('deploy', 'spack') - spack_base = f'{spack_base}/spack_for_mache_{mache_version}' + spack_branch_base = f'{spack_base}/spack_for_mache_{mache_version}' specs = list() @@ -306,25 +305,25 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, env_name, specs.append(f'albany@{albany}') if update_spack: - make_spack_env(spack_path=spack_base, env_name=spack_env, + make_spack_env(spack_path=spack_branch_base, env_name=spack_env, spack_specs=specs, compiler=compiler, mpi=mpi, machine=machine, include_e3sm_hdf5_netcdf=e3sm_hdf5_netcdf) # remove ESMC/ESMF include files that interfere with MPAS time keeping - include_path = os.path.join(spack_base, 'var/spack/environments/', - spack_env, '.spack-env/view/include') + include_path = f'{spack_branch_base}/var/spack/environments/' \ + f'{spack_env}/.spack-env/view/include' for prefix in ['ESMC', 'esmf']: files = glob.glob(os.path.join(include_path, f'{prefix}*')) for filename in files: os.remove(filename) - spack_script = get_spack_script(spack_path=spack_base, env_name=spack_env, + spack_script = get_spack_script(spack_path=spack_branch_base, env_name=spack_env, compiler=compiler, mpi=mpi, shell='sh', machine=machine, include_e3sm_hdf5_netcdf=e3sm_hdf5_netcdf) - return spack_base, spack_script + return spack_branch_base, spack_script def write_load_compass(template_path, activ_path, conda_base, env_type, @@ -426,6 +425,11 @@ def test_command(command, env, package): def update_permissions(config, env_type, activ_path, conda_base, spack_base): + if not config.has_option('e3sm_unified', 'group'): + return + + group = config.get('e3sm_unified', 'group') + directories = [] if env_type != 'dev': directories.append(conda_base) @@ -434,8 +438,6 @@ def update_permissions(config, env_type, activ_path, conda_base, spack_base): # shared system libraries directories.append(spack_base) - group = config.get('e3sm_unified', 'group') - new_uid = os.getuid() new_gid = grp.getgrnam(group).gr_gid @@ -592,6 +594,12 @@ def main(): else: machine = args.machine + if machine is None and not args.env_only: + if platform.system() == 'Linux': + machine = 'conda-linux' + elif platform.system() == 'Darwin': + machine = 'conda-osx' + config = get_config(args.config_file, machine) env_type = config.get('deploy', 'env_type') @@ -599,6 +607,7 @@ def main(): raise ValueError(f'Unexpected env_type: {env_type}') shared = (env_type != 'dev') conda_base = get_conda_base(args.conda_base, config, shared=shared) + spack_base = get_spack_base(args.spack_base, config) base_activation_script = os.path.abspath( '{}/etc/profile.d/conda.sh'.format(conda_base)) @@ -609,42 +618,21 @@ def main(): activ_path = get_env_setup(args, config, machine, env_type, source_path, conda_base) - if machine is None and not args.env_only and args.mpi is None: - raise ValueError('Your machine wasn\'t recognized by compass but you ' - 'didn\'t specify the MPI version. Please provide ' - 'either the --mpi or --env_only flag.') - - if machine is None: - if args.env_only: - compiler = None - elif platform.system() == 'Linux': - compiler = 'gnu' - elif platform.system() == 'Darwin': - compiler = 'clang' - else: - compiler = 'gnu' - env_path, env_name, activate_env, spack_env = build_env( env_type, recreate, machine, compiler, mpi, conda_mpi, version, python, source_path, template_path, conda_base, activ_suffix, args.env_name, env_suffix, activate_base, args.use_local, args.local_conda_build) - spack_base = None spack_script = '' if compiler is not None: env_vars = get_env_vars(machine, compiler, mpi) - if machine is not None: - spack_base, spack_script = build_spack_env( - config, args.update_spack, machine, compiler, mpi, env_name, - activate_env, spack_env) - scorpio_path = os.path.join(spack_base, 'var/spack/environments/', - spack_env, '.spack-env/view') - env_vars = f'{env_vars}' \ - f'export PIO={scorpio_path}\n' - else: - env_vars = f'{env_vars}' \ - f'export PIO={env_path}\n' - + spack_branch_base, spack_script = build_spack_env( + config, args.update_spack, machine, compiler, mpi, env_name, + activate_env, spack_env, spack_base) + scorpio_path = f'{spack_branch_base}/var/spack/environments/' \ + f'{spack_env}.spack-env/view' + env_vars = f'{env_vars}' \ + f'export PIO={scorpio_path}\n' else: env_vars = '' @@ -681,9 +669,8 @@ def main(): commands = '{}; conda clean -y -p -t'.format(activate_base) check_call(commands) - if machine is not None: - update_permissions(config, env_type, activ_path, conda_base, - spack_base) + update_permissions(config, env_type, activ_path, conda_base, + spack_base) if __name__ == '__main__': diff --git a/conda/shared.py b/conda/shared.py index aee5f74f69..a23d5060d7 100644 --- a/conda/shared.py +++ b/conda/shared.py @@ -21,6 +21,8 @@ def parse_args(bootstrap): "related config options") parser.add_argument("--conda", dest="conda_base", help="Path to the conda base") + parser.add_argument("--spack", dest="spack_base", + help="Path to the spack base") parser.add_argument("--env_name", dest="env_name", help="The conda environment name and activation script" " prefix") @@ -79,6 +81,18 @@ def get_conda_base(conda_base, config, shared=False): return conda_base +def get_spack_base(spack_base, config): + if spack_base is None: + if config.has_option('deploy', 'spack'): + spack_base = config.get('deploy', 'spack') + else: + raise ValueError('No spack base provided with --spack and none is ' + 'provided in a config file.') + # handle "~" in the path + spack_base = os.path.abspath(os.path.expanduser(spack_base)) + return spack_base + + def check_call(commands, env=None): print('running: {}'.format(commands)) proc = subprocess.Popen(commands, env=env, executable='/bin/bash', From 3e81f0b8d7e661880b14209f70a4b391ae4189dc Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 17 Mar 2022 19:36:33 +0100 Subject: [PATCH 10/27] Add mpas build flag to albany spack spec --- conda/bootstrap.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conda/bootstrap.py b/conda/bootstrap.py index 673cf8964a..967099f6d1 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -302,7 +302,7 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, env_name, specs.append(f'scorpio@{scorpio}+pnetcdf~timing+internal-timing~tools+malloc') if albany != 'None': - specs.append(f'albany@{albany}') + specs.append(f'albany@{albany}+mpas') if update_spack: make_spack_env(spack_path=spack_branch_base, env_name=spack_env, From 205681c17a35458bf0113127ce0684528313f820 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 18 Mar 2022 11:01:53 +0100 Subject: [PATCH 11/27] Support custom yaml templates for spack env This allows us to use modules other than the E3SM defaults where needed. This merge also adds such a custom template for building with gnu and mvapich on Badger. --- conda/bootstrap.py | 37 +++++----- conda/spack/badger_gnu_mvapich.yaml | 101 ++++++++++++++++++++++++++++ 2 files changed, 123 insertions(+), 15 deletions(-) create mode 100644 conda/spack/badger_gnu_mvapich.yaml diff --git a/conda/bootstrap.py b/conda/bootstrap.py index 967099f6d1..71be689b2c 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -109,8 +109,8 @@ def get_env_setup(args, config, machine, env_type, source_path, conda_base): def build_env(env_type, recreate, machine, compiler, mpi, conda_mpi, version, - python, source_path, template_path, conda_base, activ_suffix, - env_name, env_suffix, activate_base, use_local, + python, source_path, conda_template_path, conda_base, + activ_suffix, env_name, env_suffix, activate_base, use_local, local_conda_build): if env_type != 'dev': @@ -174,7 +174,7 @@ def build_env(env_type, recreate, machine, compiler, mpi, conda_mpi, version, activate_env = \ f'source {base_activation_script}; conda activate {env_name}' - with open(f'{template_path}/spec-file.template', 'r') as f: + with open(f'{conda_template_path}/spec-file.template', 'r') as f: template = Template(f.read()) if env_type == 'dev': @@ -264,7 +264,7 @@ def get_env_vars(machine, compiler, mpilib): def build_spack_env(config, update_spack, machine, compiler, mpi, env_name, - activate_env, spack_env, spack_base): + activate_env, spack_env, spack_base, spack_template_path): esmf = config.get('deploy', 'esmf') scorpio = config.get('deploy', 'scorpio') @@ -304,11 +304,15 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, env_name, if albany != 'None': specs.append(f'albany@{albany}+mpas') + yaml_template = f'{spack_template_path}/{machine}_{compiler}_{mpi}.yaml' + if not os.path.exists(yaml_template): + yaml_template = None if update_spack: make_spack_env(spack_path=spack_branch_base, env_name=spack_env, spack_specs=specs, compiler=compiler, mpi=mpi, machine=machine, - include_e3sm_hdf5_netcdf=e3sm_hdf5_netcdf) + include_e3sm_hdf5_netcdf=e3sm_hdf5_netcdf, + yaml_template=yaml_template) # remove ESMC/ESMF include files that interfere with MPAS time keeping include_path = f'{spack_branch_base}/var/spack/environments/' \ @@ -318,10 +322,11 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, env_name, for filename in files: os.remove(filename) - spack_script = get_spack_script(spack_path=spack_branch_base, env_name=spack_env, - compiler=compiler, mpi=mpi, shell='sh', - machine=machine, - include_e3sm_hdf5_netcdf=e3sm_hdf5_netcdf) + spack_script = get_spack_script( + spack_path=spack_branch_base, env_name=spack_env, compiler=compiler, + mpi=mpi, shell='sh', machine=machine, + include_e3sm_hdf5_netcdf=e3sm_hdf5_netcdf, + yaml_template=yaml_template) return spack_branch_base, spack_script @@ -583,7 +588,8 @@ def main(): args = parse_args(bootstrap=True) source_path = os.getcwd() - template_path = '{}/conda/compass_env'.format(source_path) + conda_template_path = f'{source_path}/conda/compass_env' + spack_template_path = f'{source_path}/conda/spack' version = get_version() @@ -620,15 +626,16 @@ def main(): env_path, env_name, activate_env, spack_env = build_env( env_type, recreate, machine, compiler, mpi, conda_mpi, version, python, - source_path, template_path, conda_base, activ_suffix, args.env_name, - env_suffix, activate_base, args.use_local, args.local_conda_build) + source_path, conda_template_path, conda_base, activ_suffix, + args.env_name, env_suffix, activate_base, args.use_local, + args.local_conda_build) spack_script = '' if compiler is not None: env_vars = get_env_vars(machine, compiler, mpi) spack_branch_base, spack_script = build_spack_env( config, args.update_spack, machine, compiler, mpi, env_name, - activate_env, spack_env, spack_base) + activate_env, spack_env, spack_base, spack_template_path) scorpio_path = f'{spack_branch_base}/var/spack/environments/' \ f'{spack_env}.spack-env/view' env_vars = f'{env_vars}' \ @@ -647,8 +654,8 @@ def main(): prefix = 'load_compass_{}'.format(version) script_filename = write_load_compass( - template_path, activ_path, conda_base, env_type, activ_suffix, prefix, - env_name, spack_script, machine, env_vars, args.env_only) + conda_template_path, activ_path, conda_base, env_type, activ_suffix, + prefix, env_name, spack_script, machine, env_vars, args.env_only) if args.check: check_env(script_filename, env_name) diff --git a/conda/spack/badger_gnu_mvapich.yaml b/conda/spack/badger_gnu_mvapich.yaml new file mode 100644 index 0000000000..5d436c7509 --- /dev/null +++ b/conda/spack/badger_gnu_mvapich.yaml @@ -0,0 +1,101 @@ +spack: + specs: + - cmake + - gcc + - mvapich2 + - intel-mkl + - hdf5 +{{ specs }} + concretization: together + packages: + all: + compiler: [gcc@9.3.0] + providers: + mpi: [mvapich2@2.3] + lapack: [intel-mkl@2020.0.4] + bzip2: + externals: + - spec: bzip2@1.0.6 + prefix: /usr + buildable: false + curl: + externals: + - spec: curl@7.29.0 + prefix: /usr + buildable: false + gettext: + externals: + - spec: gettext@0.19.8.1 + prefix: /usr + buildable: false + openssl: + externals: + - spec: openssl@1.0.2k + prefix: /usr + buildable: false + perl: + externals: + - spec: perl@5.16.3 + prefix: /usr + buildable: false + tar: + externals: + - spec: tar@1.26 + prefix: /usr + buildable: false + xz: + externals: + - spec: xz@5.2.2 + prefix: /usr + buildable: false + cmake: + externals: + - spec: cmake@3.19.2 + prefix: /usr/projects/hpcsoft/toss3/common/x86_64/cmake/3.19.2 + modules: + - cmake/3.19.2 + buildable: false + gcc: + externals: + - spec: gcc@9.3.0 + prefix: /usr/projects/hpcsoft/toss3/common/x86_64/gcc/9.3.0 + modules: + - gcc/9.3.0 + buildable: false + mvapich2: + externals: + - spec: mvapich2@2.3 + prefix: /usr/projects/hpcsoft/toss3/badger/mvapich2/2.3_gcc-9.3.0 + modules: + - mvapich2/2.3 + buildable: false + intel-mkl: + externals: + - spec: intel-mkl@2020.0.4 + prefix: /usr/projects/hpcsoft/toss3/common/x86_64/intel-clusterstudio/2020.4.912/compilers_and_libraries_2020 + modules: + - mkl/2020.0.4 + buildable: false + hdf5: + externals: + - spec: hdf5@1.10.7~cxx+fortran+hl~java+mpi+shared + prefix: /usr/projects/hpcsoft/toss3/badger/hdf5/1.10.7_gcc-9.3.0_mvapich2-2.3 + modules: + - hdf5-parallel/1.10.7 + buildable: false + config: + install_missing_compilers: false + compilers: + - compiler: + spec: gcc@9.3.0 + paths: + cc: /usr/projects/hpcsoft/toss3/common/x86_64/gcc/9.3.0/bin/gcc + cxx: /usr/projects/hpcsoft/toss3/common/x86_64/gcc/9.3.0/bin/g++ + f77: /usr/projects/hpcsoft/toss3/common/x86_64/gcc/9.3.0/bin/gfortran + fc: /usr/projects/hpcsoft/toss3/common/x86_64/gcc/9.3.0/bin/gfortran + flags: {} + operating_system: rhel7 + target: x86_64 + modules: [] + environment: {} + extra_rpaths: [] From 25fb8ac6c921d9326b82c340ad7511a127df7462 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 18 Mar 2022 15:56:41 +0100 Subject: [PATCH 12/27] Add Albany env var to compass activation script If the spack environment include Albany, the libraries needed to link in Albany will be added to the environment variable `MPAS_EXTERNAL_LIBS`. --- conda/bootstrap.py | 38 +++++++++++++++++++++++++++----------- 1 file changed, 27 insertions(+), 11 deletions(-) diff --git a/conda/bootstrap.py b/conda/bootstrap.py index 71be689b2c..dcaad55307 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -236,7 +236,7 @@ def get_env_vars(machine, compiler, mpilib): machine = 'None' # convert env vars from mache to a list - env_vars = '' + env_vars = 'export MPAS_EXTERNAL_LIBS=""\n' if 'intel' in compiler and machine == 'anvil': env_vars = f'{env_vars}' \ @@ -246,8 +246,9 @@ def get_env_vars(machine, compiler, mpilib): f'export I_MPI_F90=ifort\n' if platform.system() == 'Linux' and machine == 'None': - env_vars = f'{env_vars}' \ - f'export MPAS_EXTERNAL_LIBS="-lgomp"\n' + env_vars = \ + f'{env_vars}' \ + f'export MPAS_EXTERNAL_LIBS="${{MPAS_EXTERNAL_LIBS}} -lgomp"\n' if mpilib == 'mvapich': env_vars = f'{env_vars}' \ @@ -264,7 +265,8 @@ def get_env_vars(machine, compiler, mpilib): def build_spack_env(config, update_spack, machine, compiler, mpi, env_name, - activate_env, spack_env, spack_base, spack_template_path): + activate_env, spack_env, spack_base, spack_template_path, + env_vars): esmf = config.get('deploy', 'esmf') scorpio = config.get('deploy', 'scorpio') @@ -328,7 +330,25 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, env_name, include_e3sm_hdf5_netcdf=e3sm_hdf5_netcdf, yaml_template=yaml_template) - return spack_branch_base, spack_script + spack_view = f'{spack_branch_base}/var/spack/environments/' \ + f'{spack_env}/.spack-env/view' + env_vars = f'{env_vars}' \ + f'export PIO={spack_view}\n' + if albany != 'None': + albany_flag_filename = f'{spack_view}/export_albany.in' + with open(albany_flag_filename, 'r') as f: + albany_flags = f.read() + if platform.system() == 'Darwin': + stdcxx = '-lc++' + else: + stdcxx = '-lstdc++' + env_vars = \ + f'{env_vars}' \ + f'export {albany_flags}\n' \ + f'export MPAS_EXTERNAL_LIBS="${{MPAS_EXTERNAL_LIBS}} ' \ + f'${{ALBANY_LINK_LIBS}} {stdcxx}"\n' + + return spack_branch_base, spack_script, env_vars def write_load_compass(template_path, activ_path, conda_base, env_type, @@ -633,13 +653,9 @@ def main(): spack_script = '' if compiler is not None: env_vars = get_env_vars(machine, compiler, mpi) - spack_branch_base, spack_script = build_spack_env( + spack_branch_base, spack_script, env_vars = build_spack_env( config, args.update_spack, machine, compiler, mpi, env_name, - activate_env, spack_env, spack_base, spack_template_path) - scorpio_path = f'{spack_branch_base}/var/spack/environments/' \ - f'{spack_env}.spack-env/view' - env_vars = f'{env_vars}' \ - f'export PIO={scorpio_path}\n' + activate_env, spack_env, spack_base, spack_template_path, env_vars) else: env_vars = '' From 77e6b40a3803d2a4eff77b833ac1aa8c118e8696 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 22 Mar 2022 10:54:11 +0100 Subject: [PATCH 13/27] Add tmpdir option for spack Spack is running out of space on `/tmp` on some machines (e.g. Compy) and needs a different temp. directory. --- conda/bootstrap.py | 7 ++++--- conda/shared.py | 3 +++ 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/conda/bootstrap.py b/conda/bootstrap.py index dcaad55307..2af56a46bd 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -266,7 +266,7 @@ def get_env_vars(machine, compiler, mpilib): def build_spack_env(config, update_spack, machine, compiler, mpi, env_name, activate_env, spack_env, spack_base, spack_template_path, - env_vars): + env_vars, tmpdir): esmf = config.get('deploy', 'esmf') scorpio = config.get('deploy', 'scorpio') @@ -314,7 +314,7 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, env_name, spack_specs=specs, compiler=compiler, mpi=mpi, machine=machine, include_e3sm_hdf5_netcdf=e3sm_hdf5_netcdf, - yaml_template=yaml_template) + yaml_template=yaml_template, tmpdir=tmpdir) # remove ESMC/ESMF include files that interfere with MPAS time keeping include_path = f'{spack_branch_base}/var/spack/environments/' \ @@ -655,7 +655,8 @@ def main(): env_vars = get_env_vars(machine, compiler, mpi) spack_branch_base, spack_script, env_vars = build_spack_env( config, args.update_spack, machine, compiler, mpi, env_name, - activate_env, spack_env, spack_base, spack_template_path, env_vars) + activate_env, spack_env, spack_base, spack_template_path, env_vars, + args.tmpdir) else: env_vars = '' diff --git a/conda/shared.py b/conda/shared.py index a23d5060d7..055d6013f2 100644 --- a/conda/shared.py +++ b/conda/shared.py @@ -50,6 +50,9 @@ def parse_args(bootstrap): action='store_true', help="If the shared spack environment should be " "created or recreated.") + parser.add_argument("--tmpdir", dest="tmpdir", + help="A temporary directory for building spack " + "packages") if bootstrap: parser.add_argument("--local_conda_build", dest="local_conda_build", type=str, From b8c8a7bbba20d284f8f2f070cf8c720b460e7b1b Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 23 Mar 2022 22:00:22 +0100 Subject: [PATCH 14/27] Add a flag to include albany We want to exclude it by default because it adds a lot of build overhead. --- conda/bootstrap.py | 8 ++++++++ conda/shared.py | 4 ++++ 2 files changed, 12 insertions(+) diff --git a/conda/bootstrap.py b/conda/bootstrap.py index 2af56a46bd..bf2e5659b2 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -336,6 +336,10 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, env_name, f'export PIO={spack_view}\n' if albany != 'None': albany_flag_filename = f'{spack_view}/export_albany.in' + if not os.path.exists(albany_flag_filename): + raise ValueError('Missing Albany linking flags in ' + '{albany_flag_filename}.\n Maybe your Spack ' + 'environment may need to be rebuilt with Albany?') with open(albany_flag_filename, 'r') as f: albany_flags = f.read() if platform.system() == 'Darwin': @@ -611,6 +615,7 @@ def main(): conda_template_path = f'{source_path}/conda/compass_env' spack_template_path = f'{source_path}/conda/spack' + version = get_version() machine = None @@ -650,6 +655,9 @@ def main(): args.env_name, env_suffix, activate_base, args.use_local, args.local_conda_build) + if not args.with_albany: + config.set('deploy', 'albany', 'None') + spack_script = '' if compiler is not None: env_vars = get_env_vars(machine, compiler, mpi) diff --git a/conda/shared.py b/conda/shared.py index 055d6013f2..09e913bc81 100644 --- a/conda/shared.py +++ b/conda/shared.py @@ -53,6 +53,10 @@ def parse_args(bootstrap): parser.add_argument("--tmpdir", dest="tmpdir", help="A temporary directory for building spack " "packages") + parser.add_argument("--with_albany", dest="with_albany", + action='store_true', + help="Whether to include albany in the spack " + "environment") if bootstrap: parser.add_argument("--local_conda_build", dest="local_conda_build", type=str, From 9d090427aa3c391475bddd1d7a43c7b4cf2ee8cb Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 24 Mar 2022 16:54:04 +0100 Subject: [PATCH 15/27] Update esmf to 8.2.0 --- conda/default.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/conda/default.cfg b/conda/default.cfg index c2f45e681f..a20e0540b3 100644 --- a/conda/default.cfg +++ b/conda/default.cfg @@ -20,10 +20,10 @@ python = 3.9 mpi = nompi # the version of various packages to include if using spack -esmf = 8.1.1 +esmf = 8.2.0 hdf5 = 1.12.1 netcdf_c = 4.8.1 netcdf_fortran = 4.5.3 pnetcdf = 1.12.2 scorpio = 1.2.2 -albany = develop \ No newline at end of file +albany = develop From 3971516362ad4b47a1de1b5d3e5215511c96b6d9 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 25 Mar 2022 07:54:12 +0100 Subject: [PATCH 16/27] Add gnu/openmpi build on Badger --- conda/spack/badger_gnu_openmpi.yaml | 101 ++++++++++++++++++++++++++++ conda/unsupported.txt | 1 - 2 files changed, 101 insertions(+), 1 deletion(-) create mode 100644 conda/spack/badger_gnu_openmpi.yaml diff --git a/conda/spack/badger_gnu_openmpi.yaml b/conda/spack/badger_gnu_openmpi.yaml new file mode 100644 index 0000000000..ff24dd75e6 --- /dev/null +++ b/conda/spack/badger_gnu_openmpi.yaml @@ -0,0 +1,101 @@ +spack: + specs: + - cmake + - gcc + - openmpi + - intel-mkl + - hdf5 +{{ specs }} + concretization: together + packages: + all: + compiler: [gcc@9.3.0] + providers: + mpi: [openmpi@3.1.6] + lapack: [intel-mkl@2020.0.4] + bzip2: + externals: + - spec: bzip2@1.0.6 + prefix: /usr + buildable: false + curl: + externals: + - spec: curl@7.29.0 + prefix: /usr + buildable: false + gettext: + externals: + - spec: gettext@0.19.8.1 + prefix: /usr + buildable: false + openssl: + externals: + - spec: openssl@1.0.2k + prefix: /usr + buildable: false + perl: + externals: + - spec: perl@5.16.3 + prefix: /usr + buildable: false + tar: + externals: + - spec: tar@1.26 + prefix: /usr + buildable: false + xz: + externals: + - spec: xz@5.2.2 + prefix: /usr + buildable: false + cmake: + externals: + - spec: cmake@3.19.2 + prefix: /usr/projects/hpcsoft/toss3/common/x86_64/cmake/3.19.2 + modules: + - cmake/3.19.2 + buildable: false + gcc: + externals: + - spec: gcc@9.3.0 + prefix: /usr/projects/hpcsoft/toss3/common/x86_64/gcc/9.3.0 + modules: + - gcc/9.3.0 + buildable: false + openmpi: + externals: + - spec: openmpi@3.1.6 + prefix: /usr/projects/hpcsoft/toss3/badger/openmpi/3.1.6-gcc-9.3.0 + modules: + - openmpi/3.1.6 + buildable: false + intel-mkl: + externals: + - spec: intel-mkl@2020.0.4 + prefix: /usr/projects/hpcsoft/toss3/common/x86_64/intel-clusterstudio/2020.4.912/compilers_and_libraries_2020 + modules: + - mkl/2020.0.4 + buildable: false + hdf5: + externals: + - spec: hdf5@1.10.7~cxx+fortran+hl~java+mpi+shared + prefix: /usr/projects/hpcsoft/toss3/badger/hdf5/1.10.7_gcc-9.3.0_openmpi-3.1.6 + modules: + - hdf5-parallel/1.10.7 + buildable: false + config: + install_missing_compilers: false + compilers: + - compiler: + spec: gcc@9.3.0 + paths: + cc: /usr/projects/hpcsoft/toss3/common/x86_64/gcc/9.3.0/bin/gcc + cxx: /usr/projects/hpcsoft/toss3/common/x86_64/gcc/9.3.0/bin/g++ + f77: /usr/projects/hpcsoft/toss3/common/x86_64/gcc/9.3.0/bin/gfortran + fc: /usr/projects/hpcsoft/toss3/common/x86_64/gcc/9.3.0/bin/gfortran + flags: {} + operating_system: rhel7 + target: x86_64 + modules: [] + environment: {} + extra_rpaths: [] diff --git a/conda/unsupported.txt b/conda/unsupported.txt index 7b9e027cfb..2ede419232 100644 --- a/conda/unsupported.txt +++ b/conda/unsupported.txt @@ -13,5 +13,4 @@ grizzly, intel, openmpi cori-knl, gnu, mpt # compile but hang -badger, gnu, openmpi badger, intel, openmpi From 01022a2612d5ecb97a6833aa643191ef45a82d56 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 25 Mar 2022 11:47:27 +0100 Subject: [PATCH 17/27] Don't use E3SM HDF5 and NetCDF modules on Badger This gives us a lot more freedom to explore compiler and MPI library combinations. --- compass/machines/badger.cfg | 2 +- conda/spack/badger_gnu_mvapich.yaml | 8 -------- conda/spack/badger_gnu_openmpi.yaml | 8 -------- 3 files changed, 1 insertion(+), 17 deletions(-) diff --git a/compass/machines/badger.cfg b/compass/machines/badger.cfg index 94b93e91c9..068f6ec146 100644 --- a/compass/machines/badger.cfg +++ b/compass/machines/badger.cfg @@ -34,4 +34,4 @@ spack = /usr/projects/climate/SHARED_CLIMATE/compass/badger/spack # whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and # pnetcdf as E3SM (spack modules are used otherwise) -use_e3sm_hdf5_netcdf = True +use_e3sm_hdf5_netcdf = False diff --git a/conda/spack/badger_gnu_mvapich.yaml b/conda/spack/badger_gnu_mvapich.yaml index 5d436c7509..b07caf6fd9 100644 --- a/conda/spack/badger_gnu_mvapich.yaml +++ b/conda/spack/badger_gnu_mvapich.yaml @@ -4,7 +4,6 @@ spack: - gcc - mvapich2 - intel-mkl - - hdf5 {{ specs }} concretization: together packages: @@ -76,13 +75,6 @@ spack: modules: - mkl/2020.0.4 buildable: false - hdf5: - externals: - - spec: hdf5@1.10.7~cxx+fortran+hl~java+mpi+shared - prefix: /usr/projects/hpcsoft/toss3/badger/hdf5/1.10.7_gcc-9.3.0_mvapich2-2.3 - modules: - - hdf5-parallel/1.10.7 - buildable: false config: install_missing_compilers: false compilers: diff --git a/conda/spack/badger_gnu_openmpi.yaml b/conda/spack/badger_gnu_openmpi.yaml index ff24dd75e6..33c975f2a6 100644 --- a/conda/spack/badger_gnu_openmpi.yaml +++ b/conda/spack/badger_gnu_openmpi.yaml @@ -4,7 +4,6 @@ spack: - gcc - openmpi - intel-mkl - - hdf5 {{ specs }} concretization: together packages: @@ -76,13 +75,6 @@ spack: modules: - mkl/2020.0.4 buildable: false - hdf5: - externals: - - spec: hdf5@1.10.7~cxx+fortran+hl~java+mpi+shared - prefix: /usr/projects/hpcsoft/toss3/badger/hdf5/1.10.7_gcc-9.3.0_openmpi-3.1.6 - modules: - - hdf5-parallel/1.10.7 - buildable: false config: install_missing_compilers: false compilers: From e7e8e8ee862631699c080026f03a8334f10c7430 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 4 Apr 2022 08:27:59 -0600 Subject: [PATCH 18/27] Take gnu and openmpi out again on Badger It's not working still... --- conda/unsupported.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/conda/unsupported.txt b/conda/unsupported.txt index 2ede419232..31533b5da4 100644 --- a/conda/unsupported.txt +++ b/conda/unsupported.txt @@ -14,3 +14,6 @@ cori-knl, gnu, mpt # compile but hang badger, intel, openmpi + +# compiles but error: Listed fewer edges than expected. +badger, gnu, openmpi From 1fcd4b7c8aded0bebb62a47675daacb1deceda2c Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 5 Apr 2022 06:28:22 -0600 Subject: [PATCH 19/27] Use system libxml2 for Badger --- conda/spack/badger_gnu_mvapich.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/conda/spack/badger_gnu_mvapich.yaml b/conda/spack/badger_gnu_mvapich.yaml index b07caf6fd9..dcf5ea0537 100644 --- a/conda/spack/badger_gnu_mvapich.yaml +++ b/conda/spack/badger_gnu_mvapich.yaml @@ -27,6 +27,11 @@ spack: - spec: gettext@0.19.8.1 prefix: /usr buildable: false + libxml2: + externals: + - spec: libxml2@2.9.1 + prefix: /usr + buildable: false openssl: externals: - spec: openssl@1.0.2k From ce6885f4377b9d49781a59ef8b4344113699dccf Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 5 Apr 2022 21:22:50 +0200 Subject: [PATCH 20/27] Add a check for albany support A file lists the machines, compilers and MPI libraries that work with Albany. When a developer creates a compass environment with a given set and using the `--with_albany` flag, an error will be raised if the configuration is not supported. --- conda/albany_supported.txt | 7 +++++++ conda/bootstrap.py | 38 +++++++++++++++++++++++++++++++------- 2 files changed, 38 insertions(+), 7 deletions(-) create mode 100644 conda/albany_supported.txt diff --git a/conda/albany_supported.txt b/conda/albany_supported.txt new file mode 100644 index 0000000000..5cba96521c --- /dev/null +++ b/conda/albany_supported.txt @@ -0,0 +1,7 @@ +# a list of supported machine, compiler and mpi combinations for Albany + +anvil, gnu, mvapich +anvil, gnu, openmpi +badger, gnu, mvapich +chrysalis, gnu, openmpi +cori-haswell, gnu, mpt diff --git a/conda/bootstrap.py b/conda/bootstrap.py index bf2e5659b2..9217b5c375 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -58,7 +58,8 @@ def get_version(): return version -def get_env_setup(args, config, machine, env_type, source_path, conda_base): +def get_env_setup(args, config, machine, env_type, source_path, conda_base, + with_albany): if args.python is not None: python = args.python @@ -104,6 +105,9 @@ def get_env_setup(args, config, machine, env_type, source_path, conda_base): check_unsupported(machine, compiler, mpi, source_path) + if with_albany: + check_albany_supported(machine, compiler, mpi, source_path) + return python, recreate, compiler, mpi, conda_mpi, activ_suffix, \ env_suffix, activ_path @@ -595,17 +599,38 @@ def update_permissions(config, env_type, activ_path, conda_base, spack_base): def check_unsupported(machine, compiler, mpi, source_path): with open(os.path.join(source_path, 'conda', 'unsupported.txt'), 'r') as f: content = f.readlines() - content = [line.strip() for line in content if not line.startswith('#')] + content = [line.strip() for line in content if not + line.strip().startswith('#')] for line in content: if line.strip() == '': continue unsupported = [part.strip() for part in line.split(',')] if len(unsupported) != 3: - raise ValueError('Bad line in "unsupported.txt" {}'.format(line)) + raise ValueError(f'Bad line in "unsupported.txt" {line}') if machine == unsupported[0] and compiler == unsupported[1] and \ mpi == unsupported[2]: - raise ValueError('{} with {} is not supported on {}'.format( - compiler, mpi, machine)) + raise ValueError(f'{compiler} with {mpi} is not supported on ' + f'{machine}') + + +def check_albany_supported(machine, compiler, mpi, source_path): + filename = os.path.join(source_path, 'conda', 'albany_supported.txt') + with open(filename, 'r') as f: + content = f.readlines() + content = [line.strip() for line in content if not + line.strip().startswith('#')] + for line in content: + if line.strip() == '': + continue + supported = [part.strip() for part in line.split(',')] + if len(supported) != 3: + raise ValueError(f'Bad line in "albany_supported.txt" {line}') + if machine == supported[0] and compiler == supported[1] and \ + mpi == supported[2]: + return + + raise ValueError(f'{compiler} with {mpi} is not supported with Albany on ' + f'{machine}') def main(): @@ -615,7 +640,6 @@ def main(): conda_template_path = f'{source_path}/conda/compass_env' spack_template_path = f'{source_path}/conda/spack' - version = get_version() machine = None @@ -647,7 +671,7 @@ def main(): python, recreate, compiler, mpi, conda_mpi, activ_suffix, env_suffix, \ activ_path = get_env_setup(args, config, machine, env_type, - source_path, conda_base) + source_path, conda_base, args.with_albany) env_path, env_name, activate_env, spack_env = build_env( env_type, recreate, machine, compiler, mpi, conda_mpi, version, python, From 8d2a1f55718aabb10b3261e1b98dfe214a705dfe Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 6 Apr 2022 03:48:35 -0500 Subject: [PATCH 21/27] Sometimes add -lmpi_cxx to MALI linking This is currently only needed on Anvil and Chrysalis with Albany and OpenMPI. --- conda/bootstrap.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/conda/bootstrap.py b/conda/bootstrap.py index 9217b5c375..6c98f069a2 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -350,11 +350,15 @@ def build_spack_env(config, update_spack, machine, compiler, mpi, env_name, stdcxx = '-lc++' else: stdcxx = '-lstdc++' + if mpi == 'openmpi' and machine in ['anvil', 'chrysalis']: + mpicxx = '-lmpi_cxx' + else: + mpicxx = '' env_vars = \ f'{env_vars}' \ f'export {albany_flags}\n' \ f'export MPAS_EXTERNAL_LIBS="${{MPAS_EXTERNAL_LIBS}} ' \ - f'${{ALBANY_LINK_LIBS}} {stdcxx}"\n' + f'${{ALBANY_LINK_LIBS}} {stdcxx} {mpicxx}"\n' return spack_branch_base, spack_script, env_vars @@ -632,7 +636,6 @@ def check_albany_supported(machine, compiler, mpi, source_path): raise ValueError(f'{compiler} with {mpi} is not supported with Albany on ' f'{machine}') - def main(): args = parse_args(bootstrap=True) From 160b0ca5a313f062ff767b1cdfedae73a3b32c96 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 7 Apr 2022 12:23:54 +0200 Subject: [PATCH 22/27] Update deployment for unknown linux and osx --- compass/machines/conda-linux.cfg | 9 +++++++++ compass/machines/conda-osx.cfg | 9 +++++++++ conda/bootstrap.py | 31 ++++++++++++++++++++++--------- 3 files changed, 40 insertions(+), 9 deletions(-) create mode 100644 compass/machines/conda-linux.cfg create mode 100644 compass/machines/conda-osx.cfg diff --git a/compass/machines/conda-linux.cfg b/compass/machines/conda-linux.cfg new file mode 100644 index 0000000000..b86800d427 --- /dev/null +++ b/compass/machines/conda-linux.cfg @@ -0,0 +1,9 @@ +# Options related to deploying a compass conda environment on supported +# machines +[deploy] + +# the compiler set to use for system libraries and MPAS builds +compiler = gnu + +# the system MPI library to use for gnu compiler +mpi_gnu = mpich diff --git a/compass/machines/conda-osx.cfg b/compass/machines/conda-osx.cfg new file mode 100644 index 0000000000..65b91c7583 --- /dev/null +++ b/compass/machines/conda-osx.cfg @@ -0,0 +1,9 @@ +# Options related to deploying a compass conda environment on supported +# machines +[deploy] + +# the compiler set to use for system libraries and MPAS builds +compiler = clang + +# the system MPI library to use for gnu compiler +mpi_gnu = mpich diff --git a/conda/bootstrap.py b/conda/bootstrap.py index 6c98f069a2..84a7d742d1 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -3,7 +3,6 @@ from __future__ import print_function import os -import re import platform import subprocess import glob @@ -86,7 +85,7 @@ def get_env_setup(args, config, machine, env_type, source_path, conda_base, mpi = config.get('deploy', 'mpi_{}'.format(compiler)) if machine is None: - conda_mpi = None + conda_mpi = 'nompi' activ_suffix = '' env_suffix = '' elif not machine.startswith('conda'): @@ -164,7 +163,8 @@ def build_env(env_type, recreate, machine, compiler, mpi, conda_mpi, version, if env_type == 'test_release': # for a test release, we will be the compass package from the dev label channels = channels + ['-c e3sm/label/compass_dev'] - if machine is None or env_type == 'release': + if (machine is not None and machine.startswith('conda')) \ + or env_type == 'release': # we need libpnetcdf and scorpio (and maybe compass itself) from the # e3sm channel, compass label channels = channels + ['-c e3sm/label/compass'] @@ -249,7 +249,7 @@ def get_env_vars(machine, compiler, mpilib): f'export I_MPI_F77=ifort\n' \ f'export I_MPI_F90=ifort\n' - if platform.system() == 'Linux' and machine == 'None': + if platform.system() == 'Linux' and machine.startswith('conda'): env_vars = \ f'{env_vars}' \ f'export MPAS_EXTERNAL_LIBS="${{MPAS_EXTERNAL_LIBS}} -lgomp"\n' @@ -636,6 +636,7 @@ def check_albany_supported(machine, compiler, mpi, source_path): raise ValueError(f'{compiler} with {mpi} is not supported with Albany on ' f'{machine}') + def main(): args = parse_args(bootstrap=True) @@ -652,6 +653,8 @@ def main(): else: machine = args.machine + e3sm_machine = machine is not None + if machine is None and not args.env_only: if platform.system() == 'Linux': machine = 'conda-linux' @@ -665,7 +668,6 @@ def main(): raise ValueError(f'Unexpected env_type: {env_type}') shared = (env_type != 'dev') conda_base = get_conda_base(args.conda_base, config, shared=shared) - spack_base = get_spack_base(args.spack_base, config) base_activation_script = os.path.abspath( '{}/etc/profile.d/conda.sh'.format(conda_base)) @@ -676,6 +678,13 @@ def main(): activ_path = get_env_setup(args, config, machine, env_type, source_path, conda_base, args.with_albany) + if args.spack_base is not None: + spack_base = args.spack_base + elif e3sm_machine and compiler is not None: + spack_base = get_spack_base(args.spack_base, config) + else: + spack_base = None + env_path, env_name, activate_env, spack_env = build_env( env_type, recreate, machine, compiler, mpi, conda_mpi, version, python, source_path, conda_template_path, conda_base, activ_suffix, @@ -688,10 +697,14 @@ def main(): spack_script = '' if compiler is not None: env_vars = get_env_vars(machine, compiler, mpi) - spack_branch_base, spack_script, env_vars = build_spack_env( - config, args.update_spack, machine, compiler, mpi, env_name, - activate_env, spack_env, spack_base, spack_template_path, env_vars, - args.tmpdir) + if spack_base is not None: + spack_branch_base, spack_script, env_vars = build_spack_env( + config, args.update_spack, machine, compiler, mpi, env_name, + activate_env, spack_env, spack_base, spack_template_path, + env_vars, args.tmpdir) + else: + env_vars = f'{env_vars}' \ + f'export PIO={env_path}\n' else: env_vars = '' From 66d4be10f8db75729527446fa9f8cb0d7a6aad0d Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 7 Apr 2022 15:41:48 +0200 Subject: [PATCH 23/27] Remove Cori-KNL from supported machines --- compass/machines/cori-knl.cfg | 40 ----------- conda/unsupported.txt | 3 - docs/design_docs/compass_package.rst | 1 - docs/developers_guide/machines/cori.rst | 18 ----- docs/developers_guide/machines/index.rst | 2 - docs/users_guide/machines/cori.rst | 89 ------------------------ docs/users_guide/quick_start.rst | 6 -- 7 files changed, 159 deletions(-) delete mode 100644 compass/machines/cori-knl.cfg diff --git a/compass/machines/cori-knl.cfg b/compass/machines/cori-knl.cfg deleted file mode 100644 index 7f98f6eca6..0000000000 --- a/compass/machines/cori-knl.cfg +++ /dev/null @@ -1,40 +0,0 @@ - -# The paths section describes paths that are used within the ocean core test -# cases. -[paths] - -# The root to a location where the mesh_database, initial_condition_database, -# and bathymetry_database for MPAS-Ocean will be cached -ocean_database_root = /global/cfs/cdirs/e3sm/mpas_standalonedata/mpas-ocean - -# The root to a location where the mesh_database and initial_condition_database -# for MALI will be cached -landice_database_root = /global/cfs/cdirs/e3sm/mpas_standalonedata/mpas-albany-landice - -# the path to the base conda environment where compass environments have -# been created -compass_envs = /global/cfs/cdirs/e3sm/software/compass/cori-knl/base - - -# Options related to deploying a compass conda environment on supported -# machines -[deploy] - -# the compiler set to use for system libraries and MPAS builds -compiler = intel - -# the system MPI library to use for intel compiler -mpi_intel = impi - -# the system MPI library to use for gnu compiler -mpi_gnu = mpt - -# the base path for spack environments used by compass -spack = /global/cfs/cdirs/e3sm/software/compass/cori-knl/spack - -# whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and -# pnetcdf as E3SM (spack modules are used otherwise) -use_e3sm_hdf5_netcdf = True - -# the version of ESMF to build if using system compilers and MPI (don't build) -esmf = None diff --git a/conda/unsupported.txt b/conda/unsupported.txt index 31533b5da4..a55be55ba4 100644 --- a/conda/unsupported.txt +++ b/conda/unsupported.txt @@ -3,14 +3,11 @@ # don't compile grizzly, gnu, impi badger, gnu, impi -cori-knl, gnu, impi -cori-knl, intel, mpt anvil, gnu, impi # compile but don't run successfully grizzly, gnu, openmpi grizzly, intel, openmpi -cori-knl, gnu, mpt # compile but hang badger, intel, openmpi diff --git a/docs/design_docs/compass_package.rst b/docs/design_docs/compass_package.rst index 617eb364c5..820fe1d925 100644 --- a/docs/design_docs/compass_package.rst +++ b/docs/design_docs/compass_package.rst @@ -2211,7 +2211,6 @@ The available machines are listed with: default cori-haswell chrysalis - cori-knl compy grizzly diff --git a/docs/developers_guide/machines/cori.rst b/docs/developers_guide/machines/cori.rst index f9fca77718..2e8cb351f8 100644 --- a/docs/developers_guide/machines/cori.rst +++ b/docs/developers_guide/machines/cori.rst @@ -32,21 +32,3 @@ Then, you can build the MPAS model with .. code-block:: bash make gnu-nersc - -cori-knl, intel ---------------- - -This is the default ``compass`` compiler on Cori-KNL. If the environment has -been set up properly (see :ref:`dev_conda_env`), you should be able to source: - -.. code-block:: bash - - source load_dev_compass_1.0.0_cori-knl_intel_mpt.sh - -Then, you can build the MPAS model with - - -.. code-block:: bash - - make intel-mpi - diff --git a/docs/developers_guide/machines/index.rst b/docs/developers_guide/machines/index.rst index f19d90a460..99756679ca 100644 --- a/docs/developers_guide/machines/index.rst +++ b/docs/developers_guide/machines/index.rst @@ -63,8 +63,6 @@ the MPAS model. | +------------+-----------+-------------------+ | | gnu | mpt | gnu-nersc | +--------------+------------+-----------+-------------------+ -| cori-knl | intel | impi | intel-mpi | -+--------------+------------+-----------+-------------------+ | grizzly | intel | impi | intel-mpi | | +------------+-----------+-------------------+ | | gnu | mvapich | gfortran | diff --git a/docs/users_guide/machines/cori.rst b/docs/users_guide/machines/cori.rst index 61da4c0c18..62045414dc 100644 --- a/docs/users_guide/machines/cori.rst +++ b/docs/users_guide/machines/cori.rst @@ -150,95 +150,6 @@ To build the MPAS model with make gnu-nersc -Cori-KNL --------- - -config options -~~~~~~~~~~~~~~ - -And here are the same for ``-m cori-knl``: - -.. code-block:: cfg - - # The paths section describes paths that are used within the ocean core test - # cases. - [paths] - - # The root to a location where the mesh_database, initial_condition_database, - # and bathymetry_database for MPAS-Ocean will be cached - ocean_database_root = /global/cfs/cdirs/e3sm/mpas_standalonedata/mpas-ocean - - # The root to a location where the mesh_database and initial_condition_database - # for MALI will be cached - landice_database_root = /global/cfs/cdirs/e3sm/mpas_standalonedata/mpas-albany-landice - - # the path to the base conda environment where compass environments have - # been created - compass_envs = /global/cfs/cdirs/e3sm/software/compass/cori-knl/base - - - # Options related to deploying a compass conda environment on supported - # machines - [deploy] - - # the compiler set to use for system libraries and MPAS builds - compiler = intel - - # the system MPI library to use for intel compiler - mpi_intel = impi - - # the system MPI library to use for gnu compiler - mpi_gnu = mpt - - # the base path to system libraries to be added as part of setting up compass - system_libs = /global/cfs/cdirs/e3sm/software/compass/cori-knl/system - - # the version of ESMF to build if using system compilers and MPI (don't build) - esmf = None - -Additionally, some relevant config options come from the -`mache `_ package: - -.. code-block:: cfg - - # The parallel section describes options related to running jobs in parallel - [parallel] - - # parallel system of execution: slurm, cobalt or single_node - system = slurm - - # whether to use mpirun or srun to run a task - parallel_executable = srun - - # cores per node on the machine - cores_per_node = 68 - - # account for running diagnostics jobs - account = e3sm - - # available configurations(s) (default is the first) - configurations = knl - - # quality of service (default is the first) - qos = regular, premium, debug - - -Intel on Cori-KNL -~~~~~~~~~~~~~~~~~ - -To load the compass environment and modules, and set appropriate environment -variables: - -.. code-block:: bash - - source /global/cfs/cdirs/e3sm/software/compass/cori-knl/load_latest_compass_intel_impi.sh - -To build the MPAS model with - -.. code-block:: bash - - make intel-nersc - Jupyter notebook on remote data ------------------------------- diff --git a/docs/users_guide/quick_start.rst b/docs/users_guide/quick_start.rst index e1218a7a89..9ac0ba67e3 100644 --- a/docs/users_guide/quick_start.rst +++ b/docs/users_guide/quick_start.rst @@ -50,12 +50,6 @@ Grizzly). Here are the commands to load the the environment for the latest source /global/cfs/cdirs/e3sm/software/compass/cori-haswell/load_latest_compass.sh -* Cori-KNL: - -.. code-block:: bash - - source /global/cfs/cdirs/e3sm/software/compass/cori-knl/load_latest_compass.sh - * Grizzly: .. code-block:: bash From ec8cf01120f31ca5371bcef9b42640ed752cab63 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 7 Apr 2022 17:40:11 +0200 Subject: [PATCH 24/27] Remove Grizzly from supported machines --- compass/machines/grizzly.cfg | 37 ----- conda/unsupported.txt | 5 - docs/design_docs/compass_package.rst | 1 - docs/developers_guide/machines/badger.rst | 2 +- docs/developers_guide/machines/grizzly.rst | 52 ------- docs/developers_guide/machines/index.rst | 5 - docs/developers_guide/quick_start.rst | 2 +- docs/users_guide/machines/badger.rst | 68 +++++++++- docs/users_guide/machines/grizzly.rst | 150 --------------------- docs/users_guide/machines/index.rst | 1 - docs/users_guide/quick_start.rst | 10 +- 11 files changed, 70 insertions(+), 263 deletions(-) delete mode 100644 compass/machines/grizzly.cfg delete mode 100644 docs/developers_guide/machines/grizzly.rst delete mode 100644 docs/users_guide/machines/grizzly.rst diff --git a/compass/machines/grizzly.cfg b/compass/machines/grizzly.cfg deleted file mode 100644 index f7f36ccbed..0000000000 --- a/compass/machines/grizzly.cfg +++ /dev/null @@ -1,37 +0,0 @@ - -# The paths section describes paths that are used within the ocean core test -# cases. -[paths] - -# The root to a location where the mesh_database, initial_condition_database, -# and bathymetry_database for MPAS-Ocean will be cached -ocean_database_root = /usr/projects/regionalclimate/COMMON_MPAS/ocean/grids/ - -# The root to a location where the mesh_database and initial_condition_database -# for MALI will be cached -landice_database_root = /usr/projects/regionalclimate/COMMON_MPAS/mpas_standalonedata/mpas-albany-landice - -# the path to the base conda environment where compass environments have -# been created -compass_envs = /usr/projects/climate/SHARED_CLIMATE/compass/grizzly/base - - -# Options related to deploying a compass conda environment on supported -# machines -[deploy] - -# the compiler set to use for system libraries and MPAS builds -compiler = intel - -# the system MPI library to use for intel compiler -mpi_intel = impi - -# the system MPI library to use for gnu compiler -mpi_gnu = mvapich - -# the base path for spack environments used by compass -spack = /usr/projects/climate/SHARED_CLIMATE/compass/grizzly/spack - -# whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and -# pnetcdf as E3SM (spack modules are used otherwise) -use_e3sm_hdf5_netcdf = True diff --git a/conda/unsupported.txt b/conda/unsupported.txt index a55be55ba4..47b43cefee 100644 --- a/conda/unsupported.txt +++ b/conda/unsupported.txt @@ -1,14 +1,9 @@ # a list of unsupported machine, compiler and mpi combinations # don't compile -grizzly, gnu, impi badger, gnu, impi anvil, gnu, impi -# compile but don't run successfully -grizzly, gnu, openmpi -grizzly, intel, openmpi - # compile but hang badger, intel, openmpi diff --git a/docs/design_docs/compass_package.rst b/docs/design_docs/compass_package.rst index 820fe1d925..a811ba7268 100644 --- a/docs/design_docs/compass_package.rst +++ b/docs/design_docs/compass_package.rst @@ -2212,7 +2212,6 @@ The available machines are listed with: cori-haswell chrysalis compy - grizzly When setting up a test case or test suite, the ``--machine`` or ``-m`` flag is used to specify the machine. diff --git a/docs/developers_guide/machines/badger.rst b/docs/developers_guide/machines/badger.rst index 35ce29a9ec..837c7bd2e9 100644 --- a/docs/developers_guide/machines/badger.rst +++ b/docs/developers_guide/machines/badger.rst @@ -6,7 +6,7 @@ Badger .. note:: It is important that you not point to custom module files for the compiler, - MPI, and netcdf modules on Grizzly to work properly. If you have: + MPI, and netcdf modules on Badger to work properly. If you have: .. code-block:: bash diff --git a/docs/developers_guide/machines/grizzly.rst b/docs/developers_guide/machines/grizzly.rst deleted file mode 100644 index 01af4231c5..0000000000 --- a/docs/developers_guide/machines/grizzly.rst +++ /dev/null @@ -1,52 +0,0 @@ -.. _dev_machine_grizzly: - -Grizzly -======= - -.. note:: - - It is important that you not point to custom module files for the compiler, - MPI, and netcdf modules on Grizzly to work properly. If you have: - - .. code-block:: bash - - module use /usr/projects/climate/SHARED_CLIMATE/modulefiles/all - - or similar commands in your ``.bashrc``, please either comment them out or - make sure to run - - .. code-block:: bash - - module unuse /usr/projects/climate/SHARED_CLIMATE/modulefiles/all - - -intel ------ - -This is the default ``compass`` compiler on Grizzly. If the environment has -been set up properly (see :ref:`dev_conda_env`), you should be able to source: - -.. code-block:: bash - - source load_dev_compass_1.0.0_brizzly_intel_impi.sh - -Then, you can build the MPAS model with - -.. code-block:: bash - - make intel-mpi - -gnu ---- - -If you've set things up for this compiler, you should be able to: - -.. code-block:: bash - - source load_dev_compass_1.0.0_grizzly_gnu_mvapich.sh - -Then, you can build the MPAS model with - -.. code-block:: bash - - make gfortran diff --git a/docs/developers_guide/machines/index.rst b/docs/developers_guide/machines/index.rst index 99756679ca..e1d9db3aab 100644 --- a/docs/developers_guide/machines/index.rst +++ b/docs/developers_guide/machines/index.rst @@ -63,10 +63,6 @@ the MPAS model. | +------------+-----------+-------------------+ | | gnu | mpt | gnu-nersc | +--------------+------------+-----------+-------------------+ -| grizzly | intel | impi | intel-mpi | -| +------------+-----------+-------------------+ -| | gnu | mvapich | gfortran | -+--------------+------------+-----------+-------------------+ Below are specifics for each supported machine @@ -78,7 +74,6 @@ Below are specifics for each supported machine chrysalis compy cori - grizzly .. _dev_other_machines: diff --git a/docs/developers_guide/quick_start.rst b/docs/developers_guide/quick_start.rst index 5f7663a3b6..bfadf8e600 100644 --- a/docs/developers_guide/quick_start.rst +++ b/docs/developers_guide/quick_start.rst @@ -332,7 +332,7 @@ MPAS-Ocean, i.e.: make Compiling MALI with Albany has not yet been standardized. Some information is -available at +available at `https://github.com/MALI-Dev/E3SM/wiki `_, and complete instructions will be added here in the future. diff --git a/docs/users_guide/machines/badger.rst b/docs/users_guide/machines/badger.rst index f1e70b3b0b..0336d32bde 100644 --- a/docs/users_guide/machines/badger.rst +++ b/docs/users_guide/machines/badger.rst @@ -3,9 +3,73 @@ Badger ====== -For details on LANL IC, see :ref:`machine_grizzly`. +`LANL IC overview and search `_ -login: ``ssh -t $my_username@wtrw.lanl.gov ssh ba-fe`` +`DST Calendar `_ (within LANL yellow) + +Information about Slurm: + +* `Introduction to Slurm at LANL `_ + +* `Basic Slurm Guide for LANL HPC Users `_ + +* `Slurm Command Summary `_ + +* `Slurm: Running Jobs on HPC Platforms `_ + +* `example of batch scripts `_ + +Machine specifications: `badger `_ +`turquoise network `_ + +login: ``ssh -t $my_moniker@wtrw.lanl.gov ssh ba-fe`` + +File locations: + +* small home directory, for start-up scripts only: ``/users/$my_moniker`` + +* home directory, backed up: ``/usr/projects/climate/$my_moniker`` + +* scratch space, not backed up: ``/lustre/scratch3/turquoise/$my_moniker`` or + ``scratch4`` + +Check compute time: + +* ``sacctmgr list assoc user=$my_moniker format=Cluster,Account%18,Partition,QOS%45`` + +* Which is my default account? sacctmgr list user $my_moniker + +* ``sshare -a | head -2; sshare -a | grep $ACCOUNT | head -1`` + +* ``sreport -t Hours cluster AccountUtilizationByUser start=2019-12-02 | grep $ACCOUNT`` + +* check job priority: ``sshare -a | head -2; sshare -a | grep $ACCOUNT`` + +* `LANL Cluster Usage Overview `_ (within LANL yellow) + +Check disk usage: + +* your home space: ``chkhome`` + +* total disk usage in Petabytes: ``df -BP |head -n 1; df -BP|grep climate; df -BP |grep scratch`` + +Archiving + +* `turquoise HPSS archive `_ + +* archive front end: ``ssh -t $my_moniker@wtrw.lanl.gov ssh ar-tn`` + +* storage available at: ``cd /archive/`` + +* you can just copy files directly into here for a particular project. + +LANL uses slurm. To obtain an interactive node: + +.. code-block:: bash + + salloc -N 1 -t 2:0:0 --qos=interactive + +Use ``--account=ACCOUNT_NAME`` to change to a particular account. config options diff --git a/docs/users_guide/machines/grizzly.rst b/docs/users_guide/machines/grizzly.rst deleted file mode 100644 index 180d377e92..0000000000 --- a/docs/users_guide/machines/grizzly.rst +++ /dev/null @@ -1,150 +0,0 @@ -.. _machine_grizzly: - -Grizzly -======= - -`LANL IC overview and search `_ - -`DST Calendar `_ (within LANL yellow) - -Information about Slurm: - -* `Introduction to Slurm at LANL `_ - -* `Basic Slurm Guide for LANL HPC Users `_ - -* `Slurm Command Summary `_ - -* `Slurm: Running Jobs on HPC Platforms `_ - -* `example of batch scripts `_ - -Machine specifications: `grizzly `_ -`badger `_ -`turquoise network `_ - -login: ``ssh -t $my_moniker@wtrw.lanl.gov ssh gr-fe`` - -File locations: - -* small home directory, for start-up scripts only: ``/users/$my_moniker`` - -* home directory, backed up: ``/usr/projects/climate/$my_moniker`` - -* scratch space, not backed up: ``/lustre/scratch3/turquoise/$my_moniker`` or - ``scratch4`` - -Check compute time: - -* ``sacctmgr list assoc user=$my_moniker format=Cluster,Account%18,Partition,QOS%45`` - -* Which is my default account? sacctmgr list user $my_moniker - -* ``sshare -a | head -2; sshare -a | grep $ACCOUNT | head -1`` - -* ``sreport -t Hours cluster AccountUtilizationByUser start=2019-12-02 | grep $ACCOUNT`` - -* check job priority: ``sshare -a | head -2; sshare -a | grep $ACCOUNT`` - -* `LANL Cluster Usage Overview `_ (within LANL yellow) - -Check disk usage: - -* your home space: ``chkhome`` - -* total disk usage in Petabytes: ``df -BP |head -n 1; df -BP|grep climate; df -BP |grep scratch`` - -Archiving - -* `turquoise HPSS archive `_ - -* archive front end: ``ssh -t $my_moniker@wtrw.lanl.gov ssh ar-tn`` - -* storage available at: ``cd /archive/`` - -* you can just copy files directly into here for a particular project. - -LANL uses slurm. To obtain an interactive node: - -.. code-block:: bash - - salloc -N 1 -t 2:0:0 --qos=interactive - -Use ``--account=ACCOUNT_NAME`` to change to a particular account. - - -config options --------------- - -Here are the default config options added when you choose ``-m grizzly`` when -setting up test cases or a test suite: - -.. code-block:: cfg - - # The paths section describes paths that are used within the ocean core test - # cases. - [paths] - - # The root to a location where the mesh_database, initial_condition_database, - # and bathymetry_database for MPAS-Ocean will be cached - ocean_database_root = /usr/projects/regionalclimate/COMMON_MPAS/ocean/grids/ - - # The root to a location where the mesh_database and initial_condition_database - # for MALI will be cached - landice_database_root = /usr/projects/regionalclimate/COMMON_MPAS/mpas_standalonedata/mpas-albany-landice - - # the path to the base conda environment where compass environments have - # been created - compass_envs = /usr/projects/climate/SHARED_CLIMATE/anaconda_envs/base - - - # The parallel section describes options related to running tests in parallel - [parallel] - - # parallel system of execution: slurm or single_node - system = slurm - - # whether to use mpirun or srun to run the model - parallel_executable = srun - - # cores per node on the machine - cores_per_node = 36 - - # the slurm account - account = e3sm - - # the number of multiprocessing or dask threads to use - threads = 18 - - -grizzly, gnu ------------- - -To load the compass environment and modules, and set appropriate environment -variables: - -.. code-block:: bash - - source /usr/projects/climate/SHARED_CLIMATE/compass/badger/load_latest_compass_gnu_mvapich.sh - -To build the MPAS model with - -.. code-block:: bash - - make gfortran - -grizzly, intel --------------- - -To load the compass environment and modules, and set appropriate environment -variables: - -.. code-block:: bash - - source /usr/projects/climate/SHARED_CLIMATE/compass/badger/load_latest_compass_intel_impi.sh - -To build the MPAS model with - -.. code-block:: bash - - make intel-mpi diff --git a/docs/users_guide/machines/index.rst b/docs/users_guide/machines/index.rst index 6bea3235e2..8925f8da2b 100644 --- a/docs/users_guide/machines/index.rst +++ b/docs/users_guide/machines/index.rst @@ -139,7 +139,6 @@ to use the default MPI variant for each compiler on each machine. chrysalis compy cori - grizzly .. _other_machines: diff --git a/docs/users_guide/quick_start.rst b/docs/users_guide/quick_start.rst index 9ac0ba67e3..6d84d4615c 100644 --- a/docs/users_guide/quick_start.rst +++ b/docs/users_guide/quick_start.rst @@ -16,8 +16,8 @@ For each ``compass`` release, we maintain a ``compass`` package as well as all of its dependencies and some libraries (currently `ESMF `_ and `SCORPIO `_) built with system -MPI on our standard machines (Anvil, Badger, Chrysalis, Compy, Cori, and -Grizzly). Here are the commands to load the the environment for the latest +MPI on our standard machines (Anvil, Badger, Chrysalis, Compy, and Cori). +Here are the commands to load the the environment for the latest ``compass`` release with the default compiler and MPI library on each machine: * Anvil (Blues): @@ -50,12 +50,6 @@ Grizzly). Here are the commands to load the the environment for the latest source /global/cfs/cdirs/e3sm/software/compass/cori-haswell/load_latest_compass.sh -* Grizzly: - -.. code-block:: bash - - source /usr/projects/climate/SHARED_CLIMATE/compass/grizzly/load_latest_compass.sh - These same paths (minus ``load_latest_compass.sh``) also have load scripts for the latest version of compass with all the supported compiler and MPI combinations. For example, on Anvil, you can get an environment appropriate From cc0ff60c96ed755bd42b7cffcdf9db4acec916fc Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 7 Apr 2022 20:34:54 +0200 Subject: [PATCH 25/27] Update docs to mention Spack and Albany support --- docs/developers_guide/machines/anvil.rst | 10 +++- docs/developers_guide/machines/badger.rst | 4 +- docs/developers_guide/machines/chrysalis.rst | 4 +- docs/developers_guide/machines/compy.rst | 2 +- docs/developers_guide/machines/cori.rst | 4 +- docs/developers_guide/machines/index.rst | 6 +++ docs/developers_guide/quick_start.rst | 54 ++++++++++++++------ docs/users_guide/machines/anvil.rst | 18 +++++-- docs/users_guide/machines/badger.rst | 12 +++-- docs/users_guide/machines/chrysalis.rst | 12 +++-- docs/users_guide/machines/compy.rst | 12 +++-- docs/users_guide/machines/cori.rst | 16 +++--- docs/users_guide/machines/index.rst | 8 ++- docs/users_guide/quick_start.rst | 20 ++++---- 14 files changed, 125 insertions(+), 57 deletions(-) diff --git a/docs/developers_guide/machines/anvil.rst b/docs/developers_guide/machines/anvil.rst index 29a5eaddaf..ccde79d923 100644 --- a/docs/developers_guide/machines/anvil.rst +++ b/docs/developers_guide/machines/anvil.rst @@ -17,7 +17,13 @@ Then, you can build the MPAS model with .. code-block:: bash - make intel-mpi + make [DEBUG=true] [OPENMP=true] intel-mpi + +For other MPI libraries (``openmpi`` or ``mvapich`` instead of ``impi``), use + +.. code-block:: bash + + make [DEBUG=true] [OPENMP=true] ifort gnu --- @@ -32,4 +38,4 @@ Then, you can build the MPAS model with .. code-block:: bash - make gfortran + make [DEBUG=true] [OPENMP=true] [ALBANY=true] gfortran diff --git a/docs/developers_guide/machines/badger.rst b/docs/developers_guide/machines/badger.rst index 837c7bd2e9..86e7b96647 100644 --- a/docs/developers_guide/machines/badger.rst +++ b/docs/developers_guide/machines/badger.rst @@ -33,7 +33,7 @@ Then, you can build the MPAS model with .. code-block:: bash - make intel-mpi + make [DEBUG=true] [OPENMP=true] intel-mpi gnu --- @@ -48,4 +48,4 @@ Then, you can build the MPAS model with .. code-block:: bash - make gfortran + make [DEBUG=true] [OPENMP=true] [ALBANY=true] gfortran diff --git a/docs/developers_guide/machines/chrysalis.rst b/docs/developers_guide/machines/chrysalis.rst index 94bc492ff1..af5f7b9437 100644 --- a/docs/developers_guide/machines/chrysalis.rst +++ b/docs/developers_guide/machines/chrysalis.rst @@ -17,7 +17,7 @@ Then, you can build the MPAS model with .. code-block:: bash - make intel-mpi + make [DEBUG=true] [OPENMP=true] intel-mpi gnu --- @@ -32,4 +32,4 @@ Then, you can build the MPAS model with .. code-block:: bash - make gfortran + make [DEBUG=true] [OPENMP=true] [ALBANY=true] gfortran diff --git a/docs/developers_guide/machines/compy.rst b/docs/developers_guide/machines/compy.rst index 07e22c885f..58f4312332 100644 --- a/docs/developers_guide/machines/compy.rst +++ b/docs/developers_guide/machines/compy.rst @@ -21,5 +21,5 @@ Then, you can build the MPAS model with .. code-block:: bash - make intel-mpi + make [DEBUG=true] [OPENMP=true] intel-mpi diff --git a/docs/developers_guide/machines/cori.rst b/docs/developers_guide/machines/cori.rst index 2e8cb351f8..62347f79a7 100644 --- a/docs/developers_guide/machines/cori.rst +++ b/docs/developers_guide/machines/cori.rst @@ -16,7 +16,7 @@ Then, you can build the MPAS model with .. code-block:: bash - make intel-nersc + make [DEBUG=true] [OPENMP=true] intel-nersc cori-haswell, gnu ----------------- @@ -31,4 +31,4 @@ Then, you can build the MPAS model with .. code-block:: bash - make gnu-nersc + make [DEBUG=true] [OPENMP=true] [ALBANY=true] gnu-nersc diff --git a/docs/developers_guide/machines/index.rst b/docs/developers_guide/machines/index.rst index e1d9db3aab..c4de1bd483 100644 --- a/docs/developers_guide/machines/index.rst +++ b/docs/developers_guide/machines/index.rst @@ -34,6 +34,12 @@ you were setting things up. You can can source this file on a compute node MPI libraries and environment variables for running ``compass`` tests and the MPAS model. +.. note:: + + Albany (and therefore most of the functionality in MALI) is currently only + supported for those configurations with ``gnu`` compilers. + + +--------------+------------+-----------+-------------------+ | Machine | Compiler | MPI lib. | MPAS make target | +==============+============+===========+===================+ diff --git a/docs/developers_guide/quick_start.rst b/docs/developers_guide/quick_start.rst index bfadf8e600..76f7bfb881 100644 --- a/docs/developers_guide/quick_start.rst +++ b/docs/developers_guide/quick_start.rst @@ -82,7 +82,7 @@ If you are on one of the :ref:`dev_supported_machines`, run: .. code-block:: bash ./conda/configure_compass_env.py --conda \ - -c + -c [--mpi ] [-m ] [--with_albany] The ```` is typically ``~/miniconda3``. This is the location where you would like to install Miniconda3 or where it is @@ -94,14 +94,24 @@ See the machine under :ref:`dev_supported_machines` for a list of available compilers to pass to ``-c``. If you don't supply a compiler, you will get the default one for that machine (usually Intel). Typically, you will want the default MPI flavor that compass has defined for each compiler, so you should -not need to specify which MPI version to use but you may do so with ``-i`` if -you need to. +not need to specify which MPI version to use but you may do so with ``--mpi`` +if you need to. If you are on a login node, the script should automatically recognize what machine you are on. You can supply the machine name with ``-m `` if you run into trouble with the automatic recognition (e.g. if you're setting up the environment on a compute node, which is not recommended). +If you are working with MALI, you should specify ``--with_albany``. This will +ensure that the Albany and Trilinos libraries are included among those built +with system compilers and MPI libraries, a requirement for many MAlI test +cases. Currently, only Albany is only supported with ``gnu`` compilers. + +It is safe to add the ``--with_albany`` flag for MPAS-Ocean but it is not +recommended unless a user wants to be able to run both models with the same +conda/spack environment. The main downside is simply that unneeded libraries +will be linked in to MPAS-Ocean. + Unknown machines ~~~~~~~~~~~~~~~~ @@ -128,6 +138,15 @@ up the compass test environment. A config file can be specified using file. More information, including example config files, can be found in :ref:`config_files`. +.. note:: + + Currently, there is not a good way to build Albany for an unknown machine as + part of the compass deployment process, meaning MALI will be limited to the + shallow-ice approximation (SIA) solver. + + To get started on HPC systems that aren't supported by Compass, get in touch + with the developers. + What the script does ~~~~~~~~~~~~~~~~~~~~ @@ -138,20 +157,21 @@ this script will also: so changes you make to the repo are immediately reflected in the conda environment. -* build the `SCORPIO `_ library if it - hasn't already been built. SCORPIO is needed building and running MPAS - components. +* uses Spack to build several libraries with system compilers and MPI library, + including: `SCORPIO `_ (parallel + i/o for MPAS components) and `ESMF `_ + (making mapping files in parallel). -* build the `ESMF `_ library if it hasn't - already been built. ESMF with the system's version of MPI is needed for - making mapping files. +* optionally (with the ``--with_albany`` flag) use Spack to install + `Trilinos `_ and + `Albany `_ libraries. -* make an activation script called - ``load_dev_compass____.sh``, - where ```` is the compass version, ```` is the name of the - machine (to prevent confusion when running from the same branch on multiple - machines), ```` is the compiler name (e.g. ``intel`` or ``gnu``), - and ``mpi`` is the MPI flavor (e.g. ``impi``, ``mvapich``, ``openmpi``). +* make an activation script called ``load_*.sh``, where the details of the + name encode the conda environment name, the machine, compilers and MPI + libraries, e.g. + ``load_dev_compass____.sh`` (```` + is the compass version, ```` is the name of the + machine, ```` is the compiler name, and ``mpi`` is the MPI flavor). * optionally (with the ``--check`` flag), run some tests to make sure some of the expected packages are available. @@ -170,6 +190,10 @@ Optional flags to something other than the default (``dev_compass_`` or ``dev_compass__``). +``--with-albany`` + Install Albany for full MALI support (currently only with ``gnu`` + compilers) + Activating the environment ~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/docs/users_guide/machines/anvil.rst b/docs/users_guide/machines/anvil.rst index 92a9e9c252..ec8b6a7acf 100644 --- a/docs/users_guide/machines/anvil.rst +++ b/docs/users_guide/machines/anvil.rst @@ -46,8 +46,12 @@ or when you choose ``-m anvil`` when setting up test cases or a test suite: # the system MPI library to use for gnu compiler mpi_gnu = mvapich - # the base path to system libraries to be added as part of setting up compass - system_libs = /lcrc/soft/climate/compass/anvil/system + # the base path for spack environments used by compass + spack = /lcrc/soft/climate/compass/anvil/spack + + # whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and + # pnetcdf as E3SM (spack modules are used otherwise) + use_e3sm_hdf5_netcdf = True Additionally, some relevant config options come from the `mache `_ package: @@ -90,7 +94,13 @@ To build the MPAS model with .. code-block:: bash - make intel-mpi + make [DEBUG=true] [OPENMP=true] intel-mpi + +For other MPI libraries (``openmpi`` or ``mvapich`` instead of ``impi``), use + +.. code-block:: bash + + make [DEBUG=true] [OPENMP=true] ifort Gnu on Anvil ------------ @@ -106,4 +116,4 @@ To build the MPAS model with .. code-block:: bash - make gfortran + make [DEBUG=true] [OPENMP=true] [ALBANY=true] gfortran diff --git a/docs/users_guide/machines/badger.rst b/docs/users_guide/machines/badger.rst index 0336d32bde..c0cf456108 100644 --- a/docs/users_guide/machines/badger.rst +++ b/docs/users_guide/machines/badger.rst @@ -111,8 +111,12 @@ suite: # the system MPI library to use for gnu compiler mpi_gnu = mvapich - # the base path to system libraries to be added as part of setting up compass - system_libs = /usr/projects/climate/SHARED_CLIMATE/compass/badger/system + # the base path for spack environments used by compass + spack = /usr/projects/climate/SHARED_CLIMATE/compass/badger/spack + + # whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and + # pnetcdf as E3SM (spack modules are used otherwise) + use_e3sm_hdf5_netcdf = False Additionally, some relevant config options come from the `mache `_ package: @@ -152,7 +156,7 @@ To build the MPAS model with .. code-block:: bash - make intel-mpi + make [DEBUG=true] [OPENMP=true] intel-mpi Gnu on Badger ------------- @@ -169,4 +173,4 @@ To build the MPAS model with .. code-block:: bash - make gfortran + make [DEBUG=true] [OPENMP=true] [ALBANY=true] gfortran diff --git a/docs/users_guide/machines/chrysalis.rst b/docs/users_guide/machines/chrysalis.rst index 6c108d5065..3f2e7b076f 100644 --- a/docs/users_guide/machines/chrysalis.rst +++ b/docs/users_guide/machines/chrysalis.rst @@ -42,8 +42,12 @@ test suite: # the system MPI library to use for gnu compiler mpi_gnu = openmpi - # the base path to system libraries to be added as part of setting up compass - system_libs = /lcrc/soft/climate/compass/chrysalis/system + # the base path for spack environments used by compass + spack = /lcrc/soft/climate/compass/chrysalis/spack + + # whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and + # pnetcdf as E3SM (spack modules are used otherwise) + use_e3sm_hdf5_netcdf = True Additionally, some relevant config options come from the `mache `_ package: @@ -81,7 +85,7 @@ To build the MPAS model with .. code-block:: bash - make intel-mpi + make [DEBUG=true] [OPENMP=true] intel-mpi Gnu on Chrysalis @@ -98,5 +102,5 @@ To build the MPAS model with .. code-block:: bash - make gfortran + make [DEBUG=true] [OPENMP=true] [ALBANY=true] gfortran diff --git a/docs/users_guide/machines/compy.rst b/docs/users_guide/machines/compy.rst index c5ff8edbe8..3dcffcf674 100644 --- a/docs/users_guide/machines/compy.rst +++ b/docs/users_guide/machines/compy.rst @@ -42,8 +42,14 @@ suite: # the system MPI library to use for gnu compiler mpi_pgi = mvapich2 - # the base path to system libraries to be added as part of setting up compass - system_libs = /share/apps/E3SM/conda_envs/compass/system + # the base path for spack environments used by compass + spack = /share/apps/E3SM/conda_envs/compass/spack + + # whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and + # pnetcdf as E3SM (spack modules are used otherwise) + # + # We don't use them on Compy because hdf5 and netcdf were build without MPI + use_e3sm_hdf5_netcdf = False Additionally, some relevant config options come from the `mache `_ package: @@ -86,4 +92,4 @@ To build the MPAS model with .. code-block:: bash - make intel-mpi + make [DEBUG=true] [OPENMP=true] intel-mpi diff --git a/docs/users_guide/machines/cori.rst b/docs/users_guide/machines/cori.rst index 62045414dc..bab26182d1 100644 --- a/docs/users_guide/machines/cori.rst +++ b/docs/users_guide/machines/cori.rst @@ -42,8 +42,8 @@ Archive: Cori-Haswell ------------ -Since Cori's Haswell and KNL nodes have different configuration options and -compilers, they are treated as separate supported machines in compass. +Cori's Haswell and KNL nodes have different configuration options and +compilers. We only support Cori-Haswell at this time. config options ~~~~~~~~~~~~~~ @@ -84,8 +84,12 @@ cases or a test suite: # the system MPI library to use for gnu compiler mpi_gnu = mpt - # the base path to system libraries to be added as part of setting up compass - system_libs = /global/cfs/cdirs/e3sm/software/compass/cori-haswell/system + # the base path for spack environments used by compass + spack = /global/cfs/cdirs/e3sm/software/compass/cori-haswell/spack + + # whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and + # pnetcdf as E3SM (spack modules are used otherwise) + use_e3sm_hdf5_netcdf = True # the version of ESMF to build if using system compilers and MPI (don't build) esmf = None @@ -131,7 +135,7 @@ To build the MPAS model with .. code-block:: bash - make intel-nersc + make [DEBUG=true] [OPENMP=true] intel-nersc Gnu on Cori-Haswell @@ -148,7 +152,7 @@ To build the MPAS model with .. code-block:: bash - make gnu-nersc + make [DEBUG=true] [OPENMP=true] [ALBANY=true] gnu-nersc Jupyter notebook on remote data diff --git a/docs/users_guide/machines/index.rst b/docs/users_guide/machines/index.rst index 8925f8da2b..e0447aec92 100644 --- a/docs/users_guide/machines/index.rst +++ b/docs/users_guide/machines/index.rst @@ -48,8 +48,12 @@ The config options typically defined for a machine are: # the system MPI library to use for gnu compiler mpi_gnu = mvapich - # the base path to system libraries to be added as part of setting up compass - system_libs = /usr/projects/climate/SHARED_CLIMATE/compass/badger/system + # the base path for spack environments used by compass + spack = /usr/projects/climate/SHARED_CLIMATE/compass/badger/spack + + # whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and + # pnetcdf as E3SM (spack modules are used otherwise) + use_e3sm_hdf5_netcdf = False The ``paths`` section provides local paths to the root of the "databases" diff --git a/docs/users_guide/quick_start.rst b/docs/users_guide/quick_start.rst index 6d84d4615c..4298e9743a 100644 --- a/docs/users_guide/quick_start.rst +++ b/docs/users_guide/quick_start.rst @@ -139,22 +139,22 @@ To build MPAS-Ocean, first source the appropriate load script (see MALI can be compiled with or without the Albany library that contains the first-order velocity solver. The Albany first-order velocity solver is the only velocity option that is scientifically validated, but the Albany library -is only installed on Badger, Grizzly, and Cori. Therefore, in some situations -it is desirable to compile without Albany to run basic tests on platforms where -Albany is not available. This basic mode of MALI can be compiled similarly to -MPAS-Ocean. Again, first source the appropriate load script (see -:ref:`conda_env`) then run: +is only available with Gnu compilers (and therefore not at all on Compy). +Therefore, in some situations it is desirable to compile without Albany to run +basic tests on platforms where Albany is not available. This basic mode of +MALI can be compiled similarly to MPAS-Ocean. Again, first source the +appropriate load script (see :ref:`conda_env`) then run: .. code-block:: bash cd components/mpas-albany-landice git submodule update --init --recursive - make + make [ALBANY=true] -Compiling MALI with Albany has not yet been standardized, though this is a -feature we hope to support in the next release. Some information is available -at `https://github.com/MALI-Dev/E3SM/wiki `_, -and complete instructions will be added here in the future. +where `ALBANY=true` is included if you want to compile with Albany support +and excluded if you do not. Some more information on building and running +MALI is available at +`https://github.com/MALI-Dev/E3SM/wiki `_. See the last column of the table in :ref:`dev_supported_machines` for the right ```` command for each machine and compiler. From 221aa9cc990e5204725f3dcfce39c89ee246d266 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 13 Apr 2022 13:40:43 +0200 Subject: [PATCH 26/27] Change help for `--mpi` flag --- conda/shared.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/conda/shared.py b/conda/shared.py index 09e913bc81..de2c6fefb8 100644 --- a/conda/shared.py +++ b/conda/shared.py @@ -29,8 +29,8 @@ def parse_args(bootstrap): parser.add_argument("-p", "--python", dest="python", type=str, help="The python version to deploy") parser.add_argument("-i", "--mpi", dest="mpi", type=str, - help="The MPI library (nompi, mpich, openmpi or a " - "system flavor) to deploy") + help="The MPI library to deploy, see the docs for " + "details") parser.add_argument("-c", "--compiler", dest="compiler", type=str, help="The name of the compiler") parser.add_argument("--env_only", dest="env_only", action='store_true', From 876251c1684b391c6691214ae0594adcc0dbad26 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 14 Apr 2022 09:55:30 +0200 Subject: [PATCH 27/27] Do not set machine name on linux/osx --- conda/bootstrap.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conda/bootstrap.py b/conda/bootstrap.py index 84a7d742d1..25aafaf8c0 100755 --- a/conda/bootstrap.py +++ b/conda/bootstrap.py @@ -385,7 +385,7 @@ def write_load_compass(template_path, activ_path, conda_base, env_type, env_vars = f'{env_vars}\n' \ f'export HDF5_USE_FILE_LOCKING=FALSE\n' \ f'export LOAD_COMPASS_ENV={script_filename}' - if machine is not None: + if machine is not None and not machine.startswith('conda'): env_vars = f'{env_vars}\n' \ f'export COMPASS_MACHINE={machine}'