diff --git a/.github/workflows/container.yml b/.github/workflows/container.yml new file mode 100644 index 00000000000..26f0fdc7cc6 --- /dev/null +++ b/.github/workflows/container.yml @@ -0,0 +1,58 @@ +name: container build/publish + +on: + push: + branches: + - master + paths: + - 'docker/**' + + pull_request: + branches: + - master + paths: + - 'docker/**' + +concurrency: + group: ${{ github.ref }} + cancel-in-progress: true + +jobs: + # Only build container if there has been a change. + build-containers: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + steps: + - name: Checkout code + uses: actions/checkout@v3 + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Login to DockerHub + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Docker meta + id: meta + uses: docker/metadata-action@v4 + with: + images: ghcr.io/ESMCI/cime + flavor: | + latest=auto + tags: | + type=sha + - name: Build and push + uses: docker/build-push-action@v3 + with: + target: base + context: docker/ + push: ${{ github.event == 'push' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 949f4c04fa6..2f0b35042f5 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -4,14 +4,20 @@ on: push: branches: - master - paths-ignore: - - 'doc/**' + paths: + - 'CIME/**' + - 'scripts/**' + - 'tools/**' + - 'utils/**' pull_request: branches: - master - paths-ignore: - - 'doc/**' + paths: + - 'CIME/**' + - 'scripts/**' + - 'tools/**' + - 'utils/**' concurrency: group: ${{ github.ref }} @@ -43,64 +49,16 @@ jobs: pre-commit run -a - # Check if there has been a change to any file under docker/ - get-docker-changes: - runs-on: ubuntu-latest - outputs: - any_changed: ${{ steps.get-changed-files.outputs.any_changed }} - steps: - - name: Checkout code - uses: actions/checkout@v3 - with: - fetch-depth: 2 - - name: Get changed files - id: get-changed-files - uses: tj-actions/changed-files@v29 - with: - files: docker - - # Only build container if there has been a change. - build-containers: - runs-on: ubuntu-latest - needs: get-docker-changes - if: ${{ needs.get-docker-changes.outputs.any_changed == 'true' }} - steps: - - name: Checkout code - uses: actions/checkout@v3 - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - name: Login to DockerHub - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Docker meta - id: meta - uses: docker/metadata-action@v4 - with: - images: jasonb87/cime - tags: | - type=raw,value=latest - type=sha,prefix={{ date 'YYYYMMDD' }}_,format=short - - name: Build and push - uses: docker/build-push-action@v3 - with: - target: base - context: docker/ - push: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - cache-from: type=registry,ref=jasonb87/cime:buildcache - cache-to: type=registry,ref=jasonb87/cime:buildcache,mode=max - # Runs unit testing under different python versions. unit-testing: runs-on: ubuntu-latest - needs: build-containers if: ${{ always() && ! cancelled() }} - container: jasonb87/cime:latest + container: + image: ghcr.io/esmci/cime:latest + credentials: + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + options: '--pull=always' strategy: matrix: python-version: ['3.8', '3.9', '3.10'] @@ -134,9 +92,13 @@ jobs: # Run system tests system-testing: runs-on: ubuntu-latest - needs: build-containers if: ${{ always() && ! cancelled() }} - container: jasonb87/cime:latest + container: + image: ghcr.io/esmci/cime:latest + credentials: + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + options: '--pull=always' strategy: matrix: model: ["e3sm", "cesm"] diff --git a/CIME/XML/env_batch.py b/CIME/XML/env_batch.py index d251d1e474c..7682ab6e1ee 100644 --- a/CIME/XML/env_batch.py +++ b/CIME/XML/env_batch.py @@ -795,20 +795,10 @@ def submit_jobs( batch_job_id = None for _ in range(num_submit): for job, dependency in jobs: - if dependency is not None: - deps = dependency.split() - else: - deps = [] - dep_jobs = [] - if user_prereq is not None: - dep_jobs.append(user_prereq) - for dep in deps: - if dep in depid.keys() and depid[dep] is not None: - dep_jobs.append(str(depid[dep])) - if prev_job is not None: - dep_jobs.append(prev_job) + dep_jobs = get_job_deps(dependency, depid, prev_job, user_prereq) logger.debug("job {} depends on {}".format(job, dep_jobs)) + result = self._submit_single_job( case, job, @@ -1399,3 +1389,41 @@ def make_all_batch_files(self, case): input_batch_script, job ) ) + + +def get_job_deps(dependency, depid, prev_job=None, user_prereq=None): + """ + Gather list of job batch ids that a job depends on. + + Parameters + ---------- + dependency : str + List of dependent job names. + depid : dict + Lookup where keys are job names and values are the batch id. + user_prereq : str + User requested dependency. + + Returns + ------- + list + List of batch ids that job depends on. + """ + deps = [] + dep_jobs = [] + + if user_prereq is not None: + dep_jobs.append(user_prereq) + + if dependency is not None: + # Match all words, excluding "and" and "or" + deps = re.findall(r"\b(?!and\b|or\b)\w+(?:\.\w+)?\b", dependency) + + for dep in deps: + if dep in depid and depid[dep] is not None: + dep_jobs.append(str(depid[dep])) + + if prev_job is not None: + dep_jobs.append(prev_job) + + return dep_jobs diff --git a/CIME/case/case_st_archive.py b/CIME/case/case_st_archive.py index 7ff0773c9b0..8238cf2f912 100644 --- a/CIME/case/case_st_archive.py +++ b/CIME/case/case_st_archive.py @@ -1186,7 +1186,9 @@ def test_env_archive(self, testdir="env_archive_test"): for comp_archive_spec in comp_archive_specs: comp_expected = archive.get(comp_archive_spec, "compname") - if comp_expected == "ww3": + # Rename ww3 component when case and archive names don't match, + # specific to CESM. + if comp_expected == "ww3" and "ww" in comps_in_case: comp_expected = "ww" comp_class = archive.get(comp_archive_spec, "compclass").upper() if comp_class in components: diff --git a/CIME/tests/test_unit_xml_env_batch.py b/CIME/tests/test_unit_xml_env_batch.py index 01b96d1ead2..d59c4b080c9 100755 --- a/CIME/tests/test_unit_xml_env_batch.py +++ b/CIME/tests/test_unit_xml_env_batch.py @@ -5,12 +5,189 @@ import tempfile from unittest import mock -from CIME.XML.env_batch import EnvBatch +from CIME.utils import CIMEError +from CIME.XML.env_batch import EnvBatch, get_job_deps # pylint: disable=unused-argument class TestXMLEnvBatch(unittest.TestCase): + @mock.patch("CIME.XML.env_batch.EnvBatch._submit_single_job") + def test_submit_jobs(self, _submit_single_job): + case = mock.MagicMock() + + case.get_value.side_effect = [ + False, + ] + + env_batch = EnvBatch() + + with self.assertRaises(CIMEError): + env_batch.submit_jobs(case) + + @mock.patch("CIME.XML.env_batch.os.path.isfile") + @mock.patch("CIME.XML.env_batch.get_batch_script_for_job") + @mock.patch("CIME.XML.env_batch.EnvBatch._submit_single_job") + def test_submit_jobs_dependency( + self, _submit_single_job, get_batch_script_for_job, isfile + ): + case = mock.MagicMock() + + case.get_env.return_value.get_jobs.return_value = [ + "case.build", + "case.run", + ] + + case.get_env.return_value.get_value.side_effect = [ + None, + "", + None, + "case.build", + ] + + case.get_value.side_effect = [ + False, + ] + + _submit_single_job.side_effect = ["0", "1"] + + isfile.return_value = True + + get_batch_script_for_job.side_effect = [".case.build", ".case.run"] + + env_batch = EnvBatch() + + depid = env_batch.submit_jobs(case) + + _submit_single_job.assert_any_call( + case, + "case.build", + skip_pnl=False, + resubmit_immediate=False, + dep_jobs=[], + allow_fail=False, + no_batch=False, + mail_user=None, + mail_type=None, + batch_args=None, + dry_run=False, + workflow=True, + ) + _submit_single_job.assert_any_call( + case, + "case.run", + skip_pnl=False, + resubmit_immediate=False, + dep_jobs=[ + "0", + ], + allow_fail=False, + no_batch=False, + mail_user=None, + mail_type=None, + batch_args=None, + dry_run=False, + workflow=True, + ) + assert depid == {"case.build": "0", "case.run": "1"} + + @mock.patch("CIME.XML.env_batch.os.path.isfile") + @mock.patch("CIME.XML.env_batch.get_batch_script_for_job") + @mock.patch("CIME.XML.env_batch.EnvBatch._submit_single_job") + def test_submit_jobs_single( + self, _submit_single_job, get_batch_script_for_job, isfile + ): + case = mock.MagicMock() + + case.get_env.return_value.get_jobs.return_value = [ + "case.run", + ] + + case.get_env.return_value.get_value.return_value = None + + case.get_value.side_effect = [ + False, + ] + + _submit_single_job.return_value = "0" + + isfile.return_value = True + + get_batch_script_for_job.side_effect = [ + ".case.run", + ] + + env_batch = EnvBatch() + + depid = env_batch.submit_jobs(case) + + _submit_single_job.assert_any_call( + case, + "case.run", + skip_pnl=False, + resubmit_immediate=False, + dep_jobs=[], + allow_fail=False, + no_batch=False, + mail_user=None, + mail_type=None, + batch_args=None, + dry_run=False, + workflow=True, + ) + assert depid == {"case.run": "0"} + + def test_get_job_deps(self): + # no jobs + job_deps = get_job_deps("", {}) + + assert job_deps == [] + + # dependency doesn't exist + job_deps = get_job_deps("case.run", {}) + + assert job_deps == [] + + job_deps = get_job_deps("case.run", {"case.run": 0}) + + assert job_deps == [ + "0", + ] + + job_deps = get_job_deps( + "case.run case.post_run_io", {"case.run": 0, "case.post_run_io": 1} + ) + + assert job_deps == ["0", "1"] + + # old syntax + job_deps = get_job_deps("case.run and case.post_run_io", {"case.run": 0}) + + assert job_deps == [ + "0", + ] + + # old syntax + job_deps = get_job_deps( + "(case.run and case.post_run_io) or case.test", {"case.run": 0} + ) + + assert job_deps == [ + "0", + ] + + job_deps = get_job_deps("", {}, user_prereq="2") + + assert job_deps == [ + "2", + ] + + job_deps = get_job_deps("", {}, prev_job="1") + + assert job_deps == [ + "1", + ] + def test_get_submit_args_job_queue(self): with tempfile.NamedTemporaryFile() as tfile: tfile.write( diff --git a/docker/Dockerfile b/docker/Dockerfile index 7b4e2260cea..33f429a05b2 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -47,6 +47,8 @@ RUN mamba install --yes -c conda-forge \ # gcc, gxx, gfortran provide symlinks for x86_64-conda-linux-gnu-* # ar and ranlib are not symlinked RUN mamba install --yes -c conda-forge \ + lapack \ + blas \ libnetcdf=${LIBNETCDF_VERSION}=*openmpi* \ netcdf-fortran=${NETCDF_FORTRAN_VERSION}=*openmpi* \ esmf=${ESMF_VERSION}=*openmpi* \ diff --git a/docker/config_machines.xml b/docker/config_machines.xml index 83db416a54d..46bac2f5618 100644 --- a/docker/config_machines.xml +++ b/docker/config_machines.xml @@ -37,6 +37,8 @@ 1 1 + /opt/conda + /opt/conda diff --git a/docker/docker.cmake b/docker/docker.cmake index cf367dcfd9e..ef3be66706f 100644 --- a/docker/docker.cmake +++ b/docker/docker.cmake @@ -1,11 +1,8 @@ string(APPEND CXXFLAGS " -std=c++14") string(APPEND CXX_LIBS " -lstdc++") -string(APPEND SLIBS " -L/opt/conda/lib -lnetcdf -lnetcdff") string(APPEND FFLAGS " -I/opt/conda/include") +string(APPEND SLIBS " -L/opt/conda/lib") set(MPI_PATH "/opt/conda") -set(NETCDF_C_PATH "/opt/conda") -set(NETCDF_FORTRAN_PATH "/opt/conda") -set(PNETCDF_PATH "/opt/conda") if (CMAKE_Fortran_COMPILER_VERSION VERSION_GREATER_EQUAL 10) string(APPEND FFLAGS " -fallow-argument-mismatch -fallow-invalid-boz ") endif()