diff --git a/.gitlab/config/SpackCIBridge.py b/.gitlab/config/SpackCIBridge.py new file mode 100755 index 0000000000..72c189e480 --- /dev/null +++ b/.gitlab/config/SpackCIBridge.py @@ -0,0 +1,692 @@ +#!/usr/bin/env python3 + +import argparse +import atexit +import base64 +from datetime import datetime, timedelta, timezone +import dateutil.parser +from github import Github +import json +import os +import re +import subprocess +import sys +import tempfile +import urllib.parse +import urllib.request + + +class SpackCIBridge(object): + + def __init__(self, gitlab_repo="", gitlab_host="", gitlab_project="", github_project="", + disable_status_post=True, sync_draft_prs=False, + main_branch=None, prereq_checks=[]): + self.gitlab_repo = gitlab_repo + self.github_project = github_project + github_token = os.environ.get('GITHUB_TOKEN') + self.github_repo = "https://{0}@github.com/{1}.git".format(github_token, self.github_project) + self.py_github = Github(github_token) + self.py_gh_repo = self.py_github.get_repo(self.github_project, lazy=True) + + self.merge_msg_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)") + self.unmergeable_shas = [] + + self.post_status = not disable_status_post + self.sync_draft_prs = sync_draft_prs + self.main_branch = main_branch + self.currently_running_sha = None + self.latest_tested_main_commit = None + + self.prereq_checks = prereq_checks + + dt = datetime.now(timezone.utc) + timedelta(minutes=-60) + self.time_threshold_brief = urllib.parse.quote_plus(dt.isoformat(timespec="seconds")) + + self.pipeline_api_template = gitlab_host + self.pipeline_api_template += "/api/v4/projects/" + self.pipeline_api_template += urllib.parse.quote_plus(gitlab_project) + self.pipeline_api_template += "/pipelines?ref={0}" + + self.commit_api_template = gitlab_host + self.commit_api_template += "/api/v4/projects/" + self.commit_api_template += urllib.parse.quote_plus(gitlab_project) + self.commit_api_template += "/repository/commits/{0}" + + self.cached_commits = {} + + @atexit.register + def cleanup(): + """Shutdown ssh-agent upon program termination.""" + if "SSH_AGENT_PID" in os.environ: + print(" Shutting down ssh-agent({0})".format(os.environ["SSH_AGENT_PID"])) + subprocess.run(["ssh-agent", "-k"], check=True) + + def setup_ssh(self, ssh_key_base64): + """Start the ssh agent.""" + print("Starting ssh-agent") + output = subprocess.run(["ssh-agent", "-s"], check=True, stdout=subprocess.PIPE).stdout + + # Search for PID in output. + pid_regexp = re.compile(r"SSH_AGENT_PID=([0-9]+)") + match = pid_regexp.search(output.decode("utf-8")) + if match is None: + print("WARNING: could not detect ssh-agent PID.", file=sys.stderr) + print("ssh-agent will not be killed upon program termination", file=sys.stderr) + else: + pid = match.group(1) + os.environ["SSH_AGENT_PID"] = pid + self.cleanup_ssh_agent = True + + # Search for socket in output. + socket_regexp = re.compile(r"SSH_AUTH_SOCK=([^;]+);") + match = socket_regexp.search(output.decode("utf-8")) + if match is None: + print("WARNING: could not detect ssh-agent socket.", file=sys.stderr) + print("Key will be added to caller's ssh-agent (if any)", file=sys.stderr) + else: + socket = match.group(1) + os.environ["SSH_AUTH_SOCK"] = socket + + # Add the key. + ssh_key = base64.b64decode(ssh_key_base64) + ssh_key = ssh_key.replace(b"\r", b"") + with tempfile.NamedTemporaryFile() as fp: + fp.write(ssh_key) + fp.seek(0) + subprocess.run(["ssh-add", fp.name], check=True) + + def get_commit(self, commit): + """ Check our cache for a commit on GitHub. + If we don't have it yet, use the GitHub API to retrieve it.""" + if commit not in self.cached_commits: + self.cached_commits[commit] = self.py_gh_repo.get_commit(sha=commit) + return self.cached_commits[commit] + + def list_github_prs(self): + """ Return two dicts of data about open PRs on GitHub: + one for all open PRs, and one for open PRs that are not up-to-date on GitLab.""" + pr_dict = {} + pulls = self.py_gh_repo.get_pulls(state="open") + print("Rate limit after get_pulls(): {}".format(self.py_github.rate_limiting[0])) + for pull in pulls: + backlogged = False + push = True + if pull.draft and not self.sync_draft_prs: + print("Skipping draft PR {0} ({1})".format(pull.number, pull.head.ref)) + backlogged = "draft" + push = False + + pr_string = "pr{0}_{1}".format(pull.number, pull.head.ref) + + if push and pull.updated_at < datetime.now() + timedelta(minutes=-2880): + # Skip further analysis of this PR if it hasn't been updated in 48 hours. + # This helps us avoid wasting our rate limit on PRs with merge conflicts. + print("Skip pushing stale PR {0}".format(pr_string)) + backlogged = "stale" + push = False + + if push: + # Determine if this PR still needs to be pushed to GitLab. This happens in one of two cases: + # 1) we have never pushed it before + # 2) we have pushed it before, but the HEAD sha has changed since we pushed it last + log_args = ["git", "log", "--pretty=%s", "gitlab/{0}".format(pr_string)] + try: + merge_commit_msg = subprocess.run( + log_args, check=True, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL).stdout + match = self.merge_msg_regex.match(merge_commit_msg.decode("utf-8")) + if match and (match.group(1) == pull.head.sha or match.group(2) == pull.head.sha): + print("Skip pushing {0} because GitLab already has HEAD {1}".format(pr_string, pull.head.sha)) + push = False + except subprocess.CalledProcessError: + # This occurs when it's a new PR that hasn't been pushed to GitLab yet. + pass + + if push: + # Check the PRs-to-be-pushed to see if any of them should be considered "backlogged". + # We currently recognize three types of backlogged PRs: + # 1) Some required "prerequisite checks" have not yet completed successfully. + # 2) The PR is based on a version of the "main branch" that has not yet been tested + # 3) Draft PRs. Handled earlier in this function. + if not backlogged and self.prereq_checks: + checks_desc = "waiting for {} check to succeed" + checks_to_verify = self.prereq_checks.copy() + pr_check_runs = self.get_commit(pull.head.sha).get_check_runs() + for check in pr_check_runs: + if check.name in checks_to_verify: + checks_to_verify.remove(check.name) + if check.conclusion != "success": + backlogged = checks_desc.format(check.name) + push = False + break + if not backlogged and checks_to_verify: + backlogged = checks_desc.format(checks_to_verify[0]) + push = False + if backlogged: + print("Skip pushing {0} because of {1}".format(pr_string, backlogged)) + + if not backlogged: + if self.main_branch and pull.base.ref == self.main_branch: + # Check if we should defer pushing/testing this PR because it is based on "too new" of a commit + # of the main branch. + tmp_pr_branch = f"temporary_{pr_string}" + subprocess.run(["git", "fetch", "--unshallow", "github", + f"refs/pull/{pull.number}/head:{tmp_pr_branch}"], check=True) + # Get the merge base between this PR and the main branch. + try: + merge_base_sha = subprocess.run( + ["git", "merge-base", tmp_pr_branch, f"github/{self.main_branch}"], + check=True, stdout=subprocess.PIPE).stdout.strip() + except subprocess.CalledProcessError: + print(f"'git merge-base {tmp_pr_branch} github/{self.main_branch}' " + "returned non-zero. Skipping") + self.unmergeable_shas.append(pull.head.sha) + continue + + repo_head_sha = subprocess.run( + ["git", "rev-parse", tmp_pr_branch], + check=True, stdout=subprocess.PIPE).stdout.decode("utf-8").strip() + + if pull.head.sha != repo_head_sha: + # If gh repo and api don't agree on what the head sha is, don't + # push. Instead log an error message and backlog the PR. + a_sha, r_sha = pull.head.sha[:7], repo_head_sha[:7] + print(f"Skip pushing {pr_string} because api says HEAD is {a_sha}, " + f"while repo says HEAD is {r_sha}") + backlogged = f"GitHub HEAD shas out of sync (repo={r_sha}, API={a_sha})" + push = False + # Check if our PR's merge base is an ancestor of the latest tested main branch commit. + elif subprocess.run( + ["git", "merge-base", "--is-ancestor", merge_base_sha, self.latest_tested_main_commit] + ).returncode == 0: + print(f"{tmp_pr_branch}'s merge base IS an ancestor of latest_tested_main " + f"{merge_base_sha} vs. {self.latest_tested_main_commit}") + try: + subprocess.run(["git", "checkout", self.latest_tested_main_commit], check=True) + subprocess.run(["git", "checkout", "-b", pr_string], check=True) + commit_msg = f"Merge {pull.head.sha} into {self.latest_tested_main_commit}" + subprocess.run( + ["git", "merge", "--no-ff", "-m", commit_msg, tmp_pr_branch], + check=True) + print(f"Merge succeeded, ready to push {pr_string} to GitLab for CI pipeline testing") + except subprocess.CalledProcessError: + print(f"Failed to merge PR {pull.number} ({pull.head.ref}) with latest tested " + f"{self.main_branch} ({self.latest_tested_main_commit}). Skipping") + self.unmergeable_shas.append(pull.head.sha) + subprocess.run(["git", "merge", "--abort"]) + backlogged = "merge conflicts with {}".format(self.main_branch) + push = False + continue + else: + print(f"Skip pushing {pr_string} because its merge base is NOT an ancestor of " + f"latest_tested_main {merge_base_sha} vs. {self.latest_tested_main_commit}") + backlogged = "base" + push = False + else: + # If the --main-branch CLI argument wasn't passed, or if this PR doesn't target that branch, + # then we will push the merge commit that was automatically created by GitHub to GitLab + # where it will kick off a CI pipeline. + try: + subprocess.run(["git", "fetch", "--unshallow", "github", + f"{pull.merge_commit_sha}:{pr_string}"], check=True) + except subprocess.CalledProcessError: + print("Failed to locally checkout PR {0} ({1}). Skipping" + .format(pull.number, pull.merge_commit_sha)) + backlogged = "GitLab failed to checkout this branch" + push = False + continue + + pr_dict[pr_string] = { + 'base_sha': pull.base.sha, + 'head_sha': pull.head.sha, + 'push': push, + 'backlogged': backlogged, + } + + def listify_dict(d): + pr_strings = sorted(d.keys()) + base_shas = [d[s]['base_sha'] for s in pr_strings] + head_shas = [d[s]['head_sha'] for s in pr_strings] + b_logged = [d[s]['backlogged'] for s in pr_strings] + return { + "pr_strings": pr_strings, + "base_shas": base_shas, + "head_shas": head_shas, + "backlogged": b_logged, + } + all_open_prs = listify_dict(pr_dict) + filtered_pr_dict = {k: v for (k, v) in pr_dict.items() if v['push']} + filtered_open_prs = listify_dict(filtered_pr_dict) + print("All Open PRs:") + for pr_string in all_open_prs['pr_strings']: + print(" {0}".format(pr_string)) + print("Filtered Open PRs:") + for pr_string in filtered_open_prs['pr_strings']: + print(" {0}".format(pr_string)) + print("Rate limit at the end of list_github_prs(): {}".format(self.py_github.rate_limiting[0])) + return [all_open_prs, filtered_open_prs] + + def list_github_protected_branches(self): + """ Return a list of protected branch names from GitHub.""" + branches = self.py_gh_repo.get_branches() + print("Rate limit after get_branches(): {}".format(self.py_github.rate_limiting[0])) + protected_branches = [br.name for br in branches if br.protected] + protected_branches = sorted(protected_branches) + if self.currently_running_sha: + print("Skip pushing {0} because it already has a pipeline running ({1})" + .format(self.main_branch, self.currently_running_sha)) + protected_branches.remove(self.main_branch) + print("Protected branches:") + for protected_branch in protected_branches: + print(" {0}".format(protected_branch)) + return protected_branches + + def list_github_tags(self): + """ Return a list of tag names from GitHub.""" + tag_list = self.py_gh_repo.get_tags() + print("Rate limit after get_tags(): {}".format(self.py_github.rate_limiting[0])) + tags = sorted([tag.name for tag in tag_list]) + print("Tags:") + for tag in tags: + print(" {0}".format(tag)) + return tags + + def setup_git_repo(self): + """Initialize a bare git repository with two remotes: + one for GitHub and one for GitLab. + If main_branch was specified, we also fetch that branch from GitHub. + """ + subprocess.run(["git", "init"], check=True) + subprocess.run(["git", "config", "user.email", "noreply@spack.io"], check=True) + subprocess.run(["git", "config", "user.name", "spackbot"], check=True) + subprocess.run(["git", "config", "advice.detachedHead", "false"], check=True) + subprocess.run(["git", "remote", "add", "github", self.github_repo], check=True) + subprocess.run(["git", "remote", "add", "gitlab", self.gitlab_repo], check=True) + + # Shallow fetch from GitLab. + self.gitlab_shallow_fetch() + + if self.main_branch: + subprocess.run(["git", "fetch", "--unshallow", "github", self.main_branch], check=True) + + def get_gitlab_pr_branches(self): + """Query GitLab for branches that have already been copied over from GitHub PRs. + Return the string output of `git branch --remotes --list gitlab/pr*`. + """ + branch_args = ["git", "branch", "--remotes", "--list", "gitlab/pr*"] + self.gitlab_pr_output = \ + subprocess.run(branch_args, check=True, stdout=subprocess.PIPE).stdout + + def gitlab_shallow_fetch(self): + """Perform a shallow fetch from GitLab""" + fetch_args = ["git", "fetch", "-q", "--depth=1", "gitlab"] + subprocess.run(fetch_args, check=True, stdout=subprocess.PIPE).stdout + + def get_open_refspecs(self, open_prs): + """Return a list of refspecs to push given a list of open PRs.""" + print("Building initial lists of refspecs to fetch and push") + pr_strings = open_prs["pr_strings"] + base_shas = open_prs["base_shas"] + backlogged = open_prs["backlogged"] + open_refspecs = [] + for open_pr, base_sha, backlog in zip(pr_strings, base_shas, backlogged): + open_refspecs.append("{0}:{0}".format(open_pr)) + print(" pushing {0} (based on {1})".format(open_pr, base_sha)) + return open_refspecs + + def update_refspecs_for_protected_branches(self, protected_branches, open_refspecs, fetch_refspecs): + """Update our refspecs lists for protected branches from GitHub.""" + for protected_branch in protected_branches: + fetch_refspecs.append("+refs/heads/{0}:refs/remotes/{0}".format(protected_branch)) + open_refspecs.append("refs/heads/{0}:refs/heads/{0}".format(protected_branch)) + return open_refspecs, fetch_refspecs + + def update_refspecs_for_tags(self, tags, open_refspecs, fetch_refspecs): + """Update our refspecs lists for tags from GitHub.""" + for tag in tags: + fetch_refspecs.append("+refs/tags/{0}:refs/tags/{0}".format(tag)) + open_refspecs.append("refs/tags/{0}:refs/tags/{0}".format(tag)) + return open_refspecs, fetch_refspecs + + def fetch_github_branches(self, fetch_refspecs): + """Perform `git fetch` for a given list of refspecs.""" + print("Fetching GitHub refs for open PRs") + fetch_args = ["git", "fetch", "-q", "--unshallow", "github"] + fetch_refspecs + subprocess.run(fetch_args, check=True) + + def build_local_branches(self, protected_branches): + """Create local branches for a list of protected branches.""" + print("Building local branches for protected branches") + for branch in protected_branches: + local_branch_name = "{0}".format(branch) + remote_branch_name = "refs/remotes/{0}".format(branch) + subprocess.run(["git", "branch", "-q", local_branch_name, remote_branch_name], check=True) + + def make_status_for_pipeline(self, pipeline): + """Generate POST data to create a GitHub status from a GitLab pipeline + API response + """ + post_data = {} + if "status" not in pipeline: + return post_data + + if pipeline["status"] == "created": + post_data["state"] = "pending" + post_data["description"] = "Pipeline has been created" + + elif pipeline["status"] == "waiting_for_resource": + post_data["state"] = "pending" + post_data["description"] = "Pipeline is waiting for resources" + + elif pipeline["status"] == "preparing": + post_data["state"] = "pending" + post_data["description"] = "Pipeline is preparing" + + elif pipeline["status"] == "pending": + post_data["state"] = "pending" + post_data["description"] = "Pipeline is pending" + + elif pipeline["status"] == "running": + post_data["state"] = "pending" + post_data["description"] = "Pipeline is running" + + elif pipeline["status"] == "manual": + post_data["state"] = "pending" + post_data["description"] = "Pipeline is running manually" + + elif pipeline["status"] == "scheduled": + post_data["state"] = "pending" + post_data["description"] = "Pipeline is scheduled" + + elif pipeline["status"] == "failed": + post_data["state"] = "error" + post_data["description"] = "Pipeline failed" + + elif pipeline["status"] == "canceled": + # Do not post canceled pipeline status to GitHub, it's confusing to our users. + # This usually happens when a PR gets force-pushed. The next time the sync script runs + # it will post a status for the newly force-pushed commit. + return {} + + elif pipeline["status"] == "skipped": + post_data["state"] = "failure" + post_data["description"] = "Pipeline was skipped" + + elif pipeline["status"] == "success": + post_data["state"] = "success" + post_data["description"] = "Pipeline succeeded" + + post_data["target_url"] = pipeline["web_url"] + return post_data + + def dedupe_pipelines(self, api_response): + """Prune pipelines API response to only include the most recent result for each SHA""" + pipelines = {} + for response in api_response: + sha = response['sha'] + if sha not in pipelines: + pipelines[sha] = response + else: + existing_datetime = dateutil.parser.parse(pipelines[sha]['updated_at']) + current_datetime = dateutil.parser.parse(response['updated_at']) + if current_datetime > existing_datetime: + pipelines[sha] = response + return pipelines + + def find_pr_sha(self, tested_sha): + api_url = self.commit_api_template.format(tested_sha) + + try: + request = urllib.request.Request(api_url) + if "GITLAB_TOKEN" in os.environ: + request.add_header("Authorization", "Bearer %s" % os.environ["GITLAB_TOKEN"]) + response = urllib.request.urlopen(request) + except OSError: + print('Failed to fetch commit for tested sha {0}'.format(tested_sha)) + return None + + response_data = response.read() + + try: + tested_commit_info = json.loads(response_data) + except json.decoder.JSONDecodeError: + print('Failed to parse response as json ({0})'.format(response_data)) + return None + + if 'title' not in tested_commit_info: + print('Returned commit object missing "Title" field') + return None + + merge_commit_msg = tested_commit_info['title'] + m = self.merge_msg_regex.match(merge_commit_msg) + + if m is None: + print('Failed to find pr_sha in merge commit message') + return None + + return m.group(1) + + def get_pipelines_for_branch(self, branch, time_threshold=None): + # Use gitlab's API to get pipeline results for the corresponding ref. + api_url = self.pipeline_api_template.format( + urllib.parse.quote_plus(branch) + ) + + # Optionally constrain the query with the provided time_threshold + if time_threshold: + api_url = "{0}&updated_after={1}".format(api_url, time_threshold) + + try: + request = urllib.request.Request(api_url) + if "GITLAB_TOKEN" in os.environ: + request.add_header("Authorization", "Bearer %s" % os.environ["GITLAB_TOKEN"]) + response = urllib.request.urlopen(request) + except OSError as inst: + print("GitLab API request error accessing {0}".format(api_url)) + print(inst) + return None + try: + pipelines = json.loads(response.read()) + except json.decoder.JSONDecodeError as inst: + print("Error parsing response to {0}".format(api_url)) + print(inst) + return None + + return self.dedupe_pipelines(pipelines) + + def post_pipeline_status(self, open_prs, protected_branches): + print("Rate limit at the beginning of post_pipeline_status(): {}".format(self.py_github.rate_limiting[0])) + pipeline_branches = [] + backlog_branches = [] + # Split up the open_prs branches into two piles: branches we force-pushed to gitlab + # and branches we deferred pushing. + for pr_branch, base_sha, head_sha, backlog in zip(open_prs["pr_strings"], + open_prs["base_shas"], + open_prs["head_shas"], + open_prs["backlogged"]): + if not backlog: + pipeline_branches.append(pr_branch) + else: + backlog_branches.append((pr_branch, head_sha, backlog)) + + pipeline_branches.extend(protected_branches) + + print('Querying pipelines to post status for:') + for branch in pipeline_branches: + # Post status to GitHub for each pipeline found. + pipelines = self.get_pipelines_for_branch(branch, self.time_threshold_brief) + if not pipelines: + continue + for sha, pipeline in pipelines.items(): + post_data = self.make_status_for_pipeline(pipeline) + if not post_data: + continue + # TODO: associate shas with protected branches, so we do not have to + # hit an endpoint here, but just use the sha we already know just like + # we do below for backlogged PR statuses. + pr_sha = self.find_pr_sha(sha) + if not pr_sha: + print('Could not find github PR sha for tested commit: {0}'.format(sha)) + print('Using tested commit to post status') + pr_sha = sha + self.create_status_for_commit(pr_sha, + branch, + post_data["state"], + post_data["target_url"], + post_data["description"]) + + # Post a status of pending/backlogged for branches we deferred pushing + print("Posting backlogged status to the following:") + base_backlog_desc = \ + "This branch's merge-base with {} is newer than the latest commit tested by GitLab".format(self.main_branch) + for branch, head_sha, reason in backlog_branches: + if reason == "stale": + print("Skip posting status for {} because it has not been updated recently".format(branch)) + continue + elif reason == "base": + desc = base_backlog_desc + url = "https://github.com/spack/spack-infrastructure/blob/main/docs/deferred_pipelines.md" + elif reason == "draft": + desc = "GitLab CI is disabled for draft PRs" + url = "" + else: + desc = reason + url = "" + self.create_status_for_commit(head_sha, branch, "pending", url, desc) + + # Post errors to any PRs that we couldn't merge to latest_tested_main_commit. + print('Posting unmergeable status to the following:') + for sha in self.unmergeable_shas: + print(' {0}'.format(sha)) + self.create_status_for_commit(sha, "", "error", "", f"PR could not be merged with {self.main_branch}") + print("Rate limit at the end of post_pipeline_status(): {}".format(self.py_github.rate_limiting[0])) + + def create_status_for_commit(self, sha, branch, state, target_url, description): + context = "OLCF Ascent (Summit)" + commit = self.get_commit(sha) + existing_statuses = commit.get_combined_status() + for status in existing_statuses.statuses: + if (status.context == context and + status.state == state and + status.description == description and + status.target_url == target_url): + print("Not posting duplicate status to {} / {}".format(branch, sha)) + return + try: + status_response = self.get_commit(sha).create_status( + state=state, + target_url=target_url, + description=description, + context=context + ) + if status_response.state != state: + print("Expected CommitStatus state {0}, got {1}".format( + state, status_response.state)) + except Exception as e_inst: + print('Caught exception posting status for {0}/{1}'.format(branch, sha)) + print(e_inst) + print(" {0} -> {1}".format(branch, sha)) + + def sync(self): + """Synchronize pull requests from GitHub as branches on GitLab.""" + + print("Initial rate limit: {}".format(self.py_github.rate_limiting[0])) + reset_time = datetime.utcfromtimestamp(self.py_github.rate_limiting_resettime).strftime('%Y-%m-%d %H:%M:%S') + print("Rate limit will refresh at: {} UTC".format(reset_time)) + + # Setup SSH command for communicating with GitLab. + os.environ["GIT_SSH_COMMAND"] = "ssh -F /dev/null -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" + + # Work inside a temporary directory that will be deleted when this script terminates. + with tempfile.TemporaryDirectory() as tmpdirname: + os.chdir(tmpdirname) + + # Setup the local repo with two remotes. + self.setup_git_repo() + + if self.main_branch: + # Find the currently running main branch pipeline, if any, and get the sha. + # Also get the latest commit on the main branch that has a completed pipeline. + main_branch_pipelines = self.get_pipelines_for_branch(self.main_branch) + for sha, pipeline in main_branch_pipelines.items(): + if self.latest_tested_main_commit is None and \ + (pipeline['status'] == "success" or pipeline['status'] == "failed"): + self.latest_tested_main_commit = sha + + if self.currently_running_sha is None and pipeline['status'] == "running": + self.currently_running_sha = sha + + if self.latest_tested_main_commit and self.currently_running_sha: + break + + print("Latest completed {0} pipeline: {1}".format(self.main_branch, self.latest_tested_main_commit)) + print("Currently running {0} pipeline: {1}".format(self.main_branch, self.currently_running_sha)) + + # Retrieve open PRs from GitHub. + all_open_prs, open_prs = self.list_github_prs() + + # Get protected branches on GitHub. + protected_branches = self.list_github_protected_branches() + + # Get tags on GitHub. + tags = self.list_github_tags() + + # Get refspecs for open PRs and protected branches. + open_refspecs = self.get_open_refspecs(open_prs) + fetch_refspecs = [] + self.update_refspecs_for_protected_branches(protected_branches, open_refspecs, fetch_refspecs) + self.update_refspecs_for_tags(tags, open_refspecs, fetch_refspecs) + + # Sync open GitHub PRs and protected branches to GitLab. + self.fetch_github_branches(fetch_refspecs) + self.build_local_branches(protected_branches) + if open_refspecs: + print("Syncing to GitLab") + push_args = ["git", "push", "--porcelain", "-f", "gitlab"] + open_refspecs + subprocess.run(push_args, check=True) + + # Post pipeline status to GitHub for each open PR, if enabled + if self.post_status: + print('Posting pipeline status for open PRs and protected branches') + self.post_pipeline_status(all_open_prs, protected_branches) + + +if __name__ == "__main__": + # Parse command-line arguments. + parser = argparse.ArgumentParser(description="Sync GitHub PRs to GitLab") + parser.add_argument("github_project", help="GitHub project (org/repo or user/repo)") + parser.add_argument("gitlab_repo", help="Full clone URL for GitLab") + parser.add_argument("gitlab_host", help="GitLab web host") + parser.add_argument("gitlab_project", help="GitLab project (org/repo or user/repo)") + parser.add_argument("--disable-status-post", action="store_true", default=False, + help="Do not post pipeline status to each GitHub PR") + parser.add_argument("--sync-draft-prs", action="store_true", default=False, + help="Copy draft PRs from GitHub to GitLab") + parser.add_argument("--pr-mirror-bucket", default=None, + help="Delete mirrors for closed PRs from the specified S3 bucket") + parser.add_argument("--main-branch", default=None, + help="""If provided, we check if there is a currently running +pipeline for this branch. If so, we defer syncing any subsequent commits in an effort +to not interrupt this pipeline. We also defer pushing any PR branches that are based +on a commit of the main branch that is newer than the latest commit tested by GitLab.""") + parser.add_argument("--prereq-check", nargs="+", default=False, + help="Only push branches that have already passed this GitHub check") + + args = parser.parse_args() + + ssh_key_base64 = os.getenv("GITLAB_SSH_KEY_BASE64") + if ssh_key_base64 is None: + raise Exception("GITLAB_SSH_KEY_BASE64 environment is not set") + + if "GITHUB_TOKEN" not in os.environ: + raise Exception("GITHUB_TOKEN environment is not set") + + bridge = SpackCIBridge(gitlab_repo=args.gitlab_repo, + gitlab_host=args.gitlab_host, + gitlab_project=args.gitlab_project, + github_project=args.github_project, + disable_status_post=args.disable_status_post, + sync_draft_prs=args.sync_draft_prs, + main_branch=args.main_branch, + prereq_checks=args.prereq_check) + bridge.setup_ssh(ssh_key_base64) + bridge.sync() diff --git a/.gitlab/config/ccache.cmake b/.gitlab/config/ccache.cmake new file mode 100644 index 0000000000..5e5f90c66b --- /dev/null +++ b/.gitlab/config/ccache.cmake @@ -0,0 +1,61 @@ +##============================================================================ +## Copyright (c) Kitware, Inc. +## All rights reserved. +## See LICENSE.txt for details. +## +## This software is distributed WITHOUT ANY WARRANTY; without even +## the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR +## PURPOSE. See the above copyright notice for more information. +##============================================================================ + +cmake_minimum_required(VERSION 3.0 FATAL_ERROR) + +set(version 4.6.1) +set(arch x86_64) + +if(CMAKE_HOST_SYSTEM_NAME STREQUAL "Linux") + set(sha256sum da1e1781bc1c4b019216fa16391af3e1daaee7e7f49a8ec9b0cdc8a1d05c50e2) + set(base_url https://github.com/ccache/ccache/releases/download) + set(platform linux) + set(extension tar.xz) +elseif(CMAKE_HOST_SYSTEM_NAME STREQUAL "Darwin") + set(sha256sum 3e36ba8c80fbf7f2b95fe0227b9dd1ca6143d721aab052caf0d5729769138059) + set(full_url https://gitlab.kitware.com/utils/ci-utilities/-/package_files/534/download) + set(filename ccache) + set(extension tar.gz) +elseif(CMAKE_HOST_SYSTEM_NAME STREQUAL "Windows") + set(sha256sum a6c6311973aa3d2aae22424895f2f968e5d661be003b25f1bd854a5c0cd57563) + set(base_url https://github.com/ccache/ccache/releases/download) + set(platform windows) + set(extension zip) +else() + message(FATAL_ERROR "Unrecognized platform ${CMAKE_HOST_SYSTEM_NAME}") +endif() + +if(NOT DEFINED filename) + set(filename "ccache-${version}-${platform}-${arch}") +endif() + +set(tarball "${filename}.${extension}") + +if(NOT DEFINED full_url) + set(full_url "${base_url}/v${version}/${tarball}") +endif() + +file(DOWNLOAD + "${full_url}" $ENV{CCACHE_INSTALL_DIR}/${tarball} + EXPECTED_HASH SHA256=${sha256sum} + SHOW_PROGRESS + ) + +execute_process( + COMMAND ${CMAKE_COMMAND} -E tar xf ${tarball} + WORKING_DIRECTORY $ENV{CCACHE_INSTALL_DIR} + RESULT_VARIABLE extract_results + ) + +if(extract_results) + message(FATAL_ERROR "Extracting `${tarball}` failed: ${extract_results}.") +endif() + +file(RENAME $ENV{CCACHE_INSTALL_DIR}/${filename} $ENV{CCACHE_INSTALL_DIR}/ccache) diff --git a/.gitlab/config/dynamic_pipeline.yml.in b/.gitlab/config/dynamic_pipeline.yml.in new file mode 100644 index 0000000000..e5a9000594 --- /dev/null +++ b/.gitlab/config/dynamic_pipeline.yml.in @@ -0,0 +1,8 @@ +child_pipeline_{branch}: + variables: + CUSTOM_CI_BUILDS_DIR: "/lustre/orion/csc303/scratch/vbolea/ci/adios2/runtime" + trigger: + include: + - project: 'ci/csc303_crusher/dev/adios2' + ref: '{branch}' + file: '.gitlab/gitlab-ci-crusher.yml' diff --git a/.gitlab/config/generate_pipelines.py b/.gitlab/config/generate_pipelines.py new file mode 100755 index 0000000000..4ae975aaf0 --- /dev/null +++ b/.gitlab/config/generate_pipelines.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python3 + +# Distributed under the OSI-approved Apache License, Version 2.0. See +# accompanying file Copyright.txt for details. +# +# generate_pipeline.py +# +# Created: May 19, 2023 +# Author: Vicente Adolfo Bolea Sanchez + +from datetime import datetime +import argparse +import requests +import time +# Remove annyoing insecure warning +import urllib3 +urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + + +def is_date_after(date, days): + deadline_sec = int(time.time()) - (days * 86400) + utc_dt = datetime.strptime(date, '%Y-%m-%dT%H:%M:%SZ') + timestamp_sec = (utc_dt - datetime(1970, 1, 1)).total_seconds() + return timestamp_sec > deadline_sec + + +def request_dict(url): + r = requests.get(url, verify=False) + return r.json() + + +parser = argparse.ArgumentParser( + prog='generate_pipeline.py', + description='Generate Dynamic pipelines for Gitlab') +parser.add_argument( + '-u', '--url', required=True, + help='Base URL for Gitlab remote. Ex: https://code.olcf.ornl.gov/') +parser.add_argument( + '-p', '--project_id', required=True, + help='Gitlab internal project ID of the project.') +parser.add_argument( + '-d', '--days', type=int, default=1, + help='How many days back to search for commits') +parser.add_argument( + '-m', '--max', type=int, default=3, + help='Maximum amount of pipelines computed') +parser.add_argument( + '-f', '--template_file', required=True, + help='Template file of the pipeline `{branch}` will be substituted') +args = parser.parse_args() + +with open(args.template_file, "r") as fd: + template_str = fd.read() + url = args.url + "/api/v4/projects/" + str(args.project_id) + branches = request_dict(url + "/repository/branches") + num_pipeline = 0 + for branch in branches: + # Convert to ISO 8601 date format. + date_stamp = branch['commit']['committed_date'].split('.')[0] + "Z" + if num_pipeline < args.max and is_date_after(date_stamp, args.days): + commit_sha = branch['commit']['id'] + commit = request_dict(url + "/repository/commits/" + commit_sha) + if commit['status'] is None: + num_pipeline += 1 + print(template_str.format(branch=branch['name'])) diff --git a/.gitlab/kokkos.sh b/.gitlab/config/kokkos.sh similarity index 90% rename from .gitlab/kokkos.sh rename to .gitlab/config/kokkos.sh index 698fe84ac7..2504b90637 100755 --- a/.gitlab/kokkos.sh +++ b/.gitlab/config/kokkos.sh @@ -23,7 +23,6 @@ cmake -S "$WORKDIR/kokkos-$VERSION" -B "$WORKDIR/kokkos_build" \ "-DCMAKE_CXX_STANDARD:STRING=17" \ "-DCMAKE_CXX_EXTENSIONS:BOOL=OFF" \ "-DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=ON" \ - "-DCMAKE_CXX_COMPILER:STRING=$WORKDIR/kokkos-$VERSION/bin/nvcc_wrapper" \ $* cmake --build "$WORKDIR/kokkos_build" diff --git a/.gitlab/gitlab-ci-ascent.yml b/.gitlab/gitlab-ci-ascent.yml index 1d1e5c0a95..02e50f4584 100644 --- a/.gitlab/gitlab-ci-ascent.yml +++ b/.gitlab/gitlab-ci-ascent.yml @@ -21,6 +21,7 @@ CUDAHOSTCXX: "g++" CUSTOM_CI_BUILDS_DIR: "/gpfs/wolf/csc303/scratch/vbolea/ci/adios2" GITLAB_SITE: "OLCF Ascent" + CI_BIN_DIR: "$CI_PROJECT_DIR/build" SCHEDULER_PARAMETERS: -P CSC303 -W 1:00 -nnodes 1 -alloc_flags gpudefault before_script: - *setup_env_ecpci @@ -68,18 +69,22 @@ ascent-kokkos-cuda: libffi hdf5 cmake + KOKKOS_VER: 3.7.01 + Kokkos_DIR: "$CI_PROJECT_DIR/deps/kokkos_install" + # Cmake would not install a RPATH inside the source dir + LD_LIBRARY_PATH: "$Kokkos_DIR/lib64/:$LD_LIBRARY_PATH" KOKKOS_OPTS: >- -DKokkos_ARCH_POWER9=ON -DKokkos_ARCH_VOLTA70=ON -DKokkos_ENABLE_CUDA=ON -DKokkos_ENABLE_CUDA_LAMBDA=ON - -DCMAKE_INSTALL_PREFIX:PATH=$CI_BUILDS_DIR/kokkos_install - - Kokkos_DIR: $CI_BUILDS_DIR/kokkos_install + -DCMAKE_INSTALL_PREFIX:PATH=$Kokkos_DIR + -DCMAKE_CXX_COMPILER:STRING=$CI_PROJECT_DIR/deps/kokkos-$KOKKOS_VER/bin/nvcc_wrapper before_script: - *setup_env_ecpci + - mkdir -p "$CI_PROJECT_DIR/deps" - ccache -z - - .gitlab/kokkos.sh "$CI_BUILDS_DIR" "3.7.01" $KOKKOS_OPTS + - .gitlab/config/kokkos.sh "$CI_PROJECT_DIR/deps" "$KOKKOS_VER" $KOKKOS_OPTS extends: - .ascent-common @@ -127,4 +132,4 @@ sync-github-prs: CUSTOM_CI_BUILDS_DIR: "/gpfs/wolf/csc303/scratch/vbolea/ci/adios2" script: - export PATH="/gpfs/wolf/csc303/scratch/vbolea/ci/utils:$PATH" - - SpackCIBridge.py ornladios/ADIOS2 git@code.ornl.gov:ecpcitest/adios2.git https://code.ornl.gov/ ecpcitest/adios2 --prereq-check=format --prereq-check=git_checks + - .gitlab/config/SpackCIBridge.py ornladios/ADIOS2 git@code.ornl.gov:ecpcitest/adios2.git https://code.ornl.gov/ ecpcitest/adios2 --prereq-check=format --prereq-check=git_checks diff --git a/.gitlab/gitlab-ci-crusher.yml b/.gitlab/gitlab-ci-crusher.yml new file mode 100644 index 0000000000..5d1c2b6109 --- /dev/null +++ b/.gitlab/gitlab-ci-crusher.yml @@ -0,0 +1,230 @@ +# Ad-hoc build that runs in the ECP Hardware, concretely in OLCF Crusher. + +stages: + - pre + - setup + - build + - post + +.setup_env_ecpci: &setup_env_ecpci | + module purge + module load ${JOB_MODULES} + module list + export PATH="${CCACHE_INSTALL_DIR}/ccache:$PATH" + +.install_ccache: &install_ccache | + mkdir -p "$CCACHE_INSTALL_DIR" + cmake --version + cmake -VV -P .gitlab/config/ccache.cmake + ccache -z + ccache -s + +.crusher-common: + except: + - schedules + - pushes + interruptible: true + variables: + CCACHE_BASEDIR: "/lustre/orion/csc303/scratch/" + CCACHE_DIR: "/lustre/orion/csc303/scratch/vbolea/ci/adios2/ccache" + CUSTOM_CI_BUILDS_DIR: "/lustre/orion/csc303/scratch/vbolea/ci/adios2/runtime" + + # -isystem= is not affected by CCACHE_BASEDIR, thus we must ignore it + CCACHE_IGNOREOPTIONS: "-isystem=*" + CCACHE_NOHASHDIR: "true" + CCACHE_INSTALL_DIR: "$CI_PROJECT_DIR/deps/ccache_install" + + CMAKE_BUILD_TYPE: "RelWithDebInfo" + CMAKE_GENERATOR: "Ninja" + CMAKE_PREFIX_PATH: "$CI_PROJECT_DIR/deps/kokkos_install" + + # We do not want to use the user's ~/.gitconfig + GIT_CONFIG_GLOBAL: "true" + GITLAB_SITE: "OLCF Crusher" + CI_BIN_DIR: "$CI_PROJECT_DIR/build" + +.setup-common: + stage: setup + tags: [ shell ] + before_script: + - *setup_env_ecpci + - *install_ccache + script: + - bash scripts/ci/gitlab-ci/run.sh update + artifacts: + expire_in: 24 hours + when: always + paths: + - deps/*install/ + - build/ + +.build-common: + stage: build + tags: [ slurm ] + variables: + SCHEDULER_PARAMETERS: "-ACSC303_crusher -t30 --nice=0 -c32 --gpus=4 -N 1" + before_script: + - *setup_env_ecpci + script: + - bash scripts/ci/gitlab-ci/run.sh configure + - bash scripts/ci/gitlab-ci/run.sh build + - bash scripts/ci/gitlab-ci/run.sh test + after_script: + - *setup_env_ecpci + - bash scripts/ci/gitlab-ci/run.sh submit + - ccache -s + +.kokkos-hip-common: + variables: + Kokkos_DIR: "$CI_PROJECT_DIR/deps/kokkos_install" + # Cmake would not install a RPATH inside the source dir + LD_LIBRARY_PATH: "$Kokkos_DIR/lib64/:$LD_LIBRARY_PATH" + # Order matters + JOB_MODULES: >- + craype-accel-amd-gfx90a + gcc/12 + cmake + rocm/5.4.3 + git + ninja + libffi + hdf5 + zstd + +setup:crusher-kokkos-hip: + variables: + KOKKOS_VER: 3.7.01 + KOKKOS_OPTS: >- + -DCMAKE_INSTALL_PREFIX:PATH=$Kokkos_DIR + -DCMAKE_CXX_COMPILER:FILEPATH=/opt/rocm-5.4.3/hip/bin/hipcc + -DKokkos_ARCH_VEGA90A:BOOL=ON + -DKokkos_ENABLE_HIP:BOOL=ON + -DKokkos_ENABLE_HIP_RELOCATABLE_DEVICE_CODE:BOOL=OFF + -DKokkos_ENABLE_SERIAL:BOOL=ON + extends: + - .crusher-common + - .setup-common + - .kokkos-hip-common + before_script: + - *setup_env_ecpci + - *install_ccache + - .gitlab/config/kokkos.sh "$CI_PROJECT_DIR/deps" "$KOKKOS_VER" $KOKKOS_OPTS + +build:crusher-kokkos-hip: + extends: + - .crusher-common + - .build-common + - .kokkos-hip-common + before_script: + - *setup_env_ecpci + needs: + - setup:crusher-kokkos-hip + dependencies: + - setup:crusher-kokkos-hip + +.cray-common: + variables: + # Order matters + JOB_MODULES: >- + PrgEnv-cray + cmake + git + ninja + libffi + zstd + DefApps + extends: + - .crusher-common + +setup:crusher-cray: + extends: + - .setup-common + - .cray-common + +build:crusher-cray: + extends: + - .build-common + - .cray-common + needs: + - setup:crusher-cray + dependencies: + - setup:crusher-cray + +.report-status: + tags: [ shell] + rules: + - if: $CI_PIPELINE_SOURCE == "schedule" + when: never + - if: $CI_PIPELINE_SOURCE == "push" + when: never + - when: on_success + variables: + STATUS_PROJECT: ornladios/ADIOS2 + STATUS_NAME: OLCF Crusher (Frontier) + script: > + curl -X POST -H @${GITHUB_CURL_HEADERS} + "https://api.github.com/repos/${STATUS_PROJECT}/statuses/${CI_COMMIT_SHA}" + -d "{\"state\":\"${CI_JOB_NAME}\", \"context\":\"${STATUS_NAME}\",\"target_url\":\"${CI_PIPELINE_URL}\",\"description\":\"${STATUS_DESC}\"}" + environment: + name: reporting-github + +pending: + stage: pre + variables: + STATUS_DESC: Pipeline is running + extends: + - .report-status +success: + stage: post + variables: + STATUS_DESC: Pipeline failed + extends: + - .report-status + rules: + - if: $CI_PIPELINE_SOURCE == "schedule" + when: never + - if: $CI_PIPELINE_SOURCE == "push" + when: never + - when: on_success + dependencies: + - build:crusher-kokkos-hip + - build:crusher-cray +failure: + stage: post + variables: + STATUS_DESC: Pipeline succeeded + extends: + - .report-status + rules: + - if: $CI_PIPELINE_SOURCE == "schedule" + when: never + - if: $CI_PIPELINE_SOURCE == "push" + when: never + - when: on_failure + dependencies: + - build:crusher-kokkos-hip + - build:crusher-cray + +generate_pipelines: + stage: setup + tags: [ shell ] + only: + - schedules + variables: + CUSTOM_CI_BUILDS_DIR: "/lustre/orion/csc303/scratch/vbolea/ci/adios2/runtime" + script: + - .gitlab/config/generate_pipelines.py -u "https://code.olcf.ornl.gov/" -p 78 -f .gitlab/config/dynamic_pipeline.yml.in > generated_pipelines.yml + artifacts: + paths: + - generated_pipelines.yml + +launch_pipelines: + stage: build + only: + - schedules + variables: + CUSTOM_CI_BUILDS_DIR: "/lustre/orion/csc303/scratch/vbolea/ci/adios2/runtime" + trigger: + include: + - artifact: generated_pipelines.yml + job: generate_pipelines diff --git a/flake8.cfg b/flake8.cfg index 5525546057..39b0361140 100644 --- a/flake8.cfg +++ b/flake8.cfg @@ -3,4 +3,4 @@ max-line-length = 80 max-complexity = 1000 format = pylint ignore = E302,F401,F403,F405,F999,W504 -exclude = thirdparty/ +exclude = thirdparty/,.gitlab/config/SpackCIBridge.py diff --git a/scripts/ci/cmake-v2/ci-crusher-cray.cmake b/scripts/ci/cmake-v2/ci-crusher-cray.cmake new file mode 100644 index 0000000000..a148430830 --- /dev/null +++ b/scripts/ci/cmake-v2/ci-crusher-cray.cmake @@ -0,0 +1,32 @@ +# Client maintainer: vicente.bolea@kitware.com + +set(ENV{CC} craycc) +set(ENV{CXX} craycxx) + +set(dashboard_cache " +ADIOS2_USE_BZip2:BOOL=OFF +ADIOS2_USE_DataMan:BOOL=ON +ADIOS2_USE_Fortran:BOOL=OFF +ADIOS2_USE_MPI:BOOL=OFF +ADIOS2_USE_HDF5:BOOL=OFF +ADIOS2_USE_PNG:BOOL=OFF +ADIOS2_USE_Python:BOOL=OFF +ADIOS2_USE_SST:BOOL=ON +ADIOS2_USE_ZeroMQ:STRING=OFF +ADIOS2_USE_ZFP:BOOL=OFF +ADIOS2_USE_SZ:BOOL=OFF +ADIOS2_USE_Blosc:BOOL=OFF + +CMAKE_C_COMPILER_LAUNCHER=ccache +CMAKE_CXX_COMPILER_LAUNCHER=ccache +CMAKE_DISABLE_FIND_PACKAGE_BISON=ON +CMAKE_DISABLE_FIND_PACKAGE_FLEX=ON +") + +set(CTEST_TEST_ARGS + PARALLEL_LEVEL 8 + EXCLUDE "Engine.Staging.TestThreads.*" + ) +set(CTEST_CMAKE_GENERATOR "Ninja") +list(APPEND CTEST_UPDATE_NOTES_FILES "${CMAKE_CURRENT_LIST_FILE}") +include(${CMAKE_CURRENT_LIST_DIR}/ci-common.cmake) diff --git a/scripts/ci/cmake-v2/ci-crusher-kokkos-hip.cmake b/scripts/ci/cmake-v2/ci-crusher-kokkos-hip.cmake new file mode 100644 index 0000000000..afe52c92e6 --- /dev/null +++ b/scripts/ci/cmake-v2/ci-crusher-kokkos-hip.cmake @@ -0,0 +1,31 @@ +# Client maintainer: vicente.bolea@kitware.com + +set(ENV{CC} gcc) +set(ENV{CXX} g++) +set(ENV{FC} gfortran) + +set(dashboard_cache " +ADIOS2_USE_BZip2:BOOL=OFF +ADIOS2_USE_DataMan:BOOL=ON +ADIOS2_USE_Fortran:BOOL=OFF +ADIOS2_USE_MPI:BOOL=OFF +ADIOS2_USE_HDF5:BOOL=OFF +ADIOS2_USE_PNG:BOOL=OFF +ADIOS2_USE_Python:BOOL=OFF +ADIOS2_USE_SST:BOOL=ON +ADIOS2_USE_Kokkos=ON + +CMAKE_C_COMPILER_LAUNCHER=ccache +CMAKE_CXX_COMPILER_LAUNCHER=ccache +CMAKE_CUDA_COMPILER_LAUNCHER=ccache +CMAKE_DISABLE_FIND_PACKAGE_BISON=ON +CMAKE_DISABLE_FIND_PACKAGE_FLEX=ON +") + +set(CTEST_TEST_ARGS + PARALLEL_LEVEL 8 + EXCLUDE "Engine.Staging.TestThreads.*" + ) +set(CTEST_CMAKE_GENERATOR "Ninja") +list(APPEND CTEST_UPDATE_NOTES_FILES "${CMAKE_CURRENT_LIST_FILE}") +include(${CMAKE_CURRENT_LIST_DIR}/ci-common.cmake) diff --git a/scripts/ci/gitlab-ci/run.sh b/scripts/ci/gitlab-ci/run.sh index 02ee8c630f..0496b7d14a 100755 --- a/scripts/ci/gitlab-ci/run.sh +++ b/scripts/ci/gitlab-ci/run.sh @@ -1,13 +1,19 @@ #!/bin/bash --login set -e +# Strip the job name prefix +export CI_JOB_NAME="${CI_JOB_NAME#*:}" export CI_BUILD_NAME="${CI_COMMIT_BRANCH#github/}_${CI_JOB_NAME}" export CI_COMMIT_REF=${CI_COMMIT_SHA} export CI_ROOT_DIR="${CI_PROJECT_DIR}/.." export CI_SITE_NAME="${GITLAB_SITE}" export CI_SOURCE_DIR="${CI_PROJECT_DIR}" -export CI_BIN_DIR="${CI_ROOT_DIR}/${CI_BUILD_NAME}" +# In OLCF Crusher we must fix the build directory in the yml. +if [ -z "$CI_BIN_DIR" ] +then + export CI_BIN_DIR="${CI_ROOT_DIR}/${CI_BUILD_NAME}" +fi readonly CTEST_SCRIPT=scripts/ci/cmake-v2/ci-${CI_JOB_NAME}.cmake if [ ! -f "$CTEST_SCRIPT" ] @@ -36,10 +42,11 @@ fi declare -a CTEST_STEP_ARGS=("-Ddashboard_full=OFF") case ${STEP} in - update) CTEST_STEP_ARGS+=("${UPDATE_ARGS}") ;; - build) CTEST_STEP_ARGS+=("-Ddashboard_do_submit=OFF") ;; - test) CTEST_STEP_ARGS+=("-Ddashboard_do_submit=OFF") ;; - submit) CTEST_STEP_ARGS+=("-Ddashboard_do_submit_only=ON" "-Ddashboard_do_build=ON" "-Ddashboard_do_test=ON") ;; + update) CTEST_STEP_ARGS+=("${UPDATE_ARGS}") ;; + configure) CTEST_STEP_ARGS+=("-Ddashboard_do_submit=OFF") ;; + build) CTEST_STEP_ARGS+=("-Ddashboard_do_submit=OFF") ;; + test) CTEST_STEP_ARGS+=("-Ddashboard_do_submit=OFF") ;; + submit) CTEST_STEP_ARGS+=("-Ddashboard_do_submit_only=ON" "-Ddashboard_do_configure=ON" "-Ddashboard_do_build=ON" "-Ddashboard_do_test=ON") ;; esac CTEST_STEP_ARGS+=("-Ddashboard_do_${STEP}=ON")