diff --git a/eng/pipelines/coreclr/superpmi-asmdiffs-checked-release.yml b/eng/pipelines/coreclr/superpmi-asmdiffs-checked-release.yml
new file mode 100644
index 00000000000000..b1b4bea9d20928
--- /dev/null
+++ b/eng/pipelines/coreclr/superpmi-asmdiffs-checked-release.yml
@@ -0,0 +1,41 @@
+trigger: none
+
+schedules:
+- cron: "0 10 * * 6,0"
+ displayName: Sat and Sun at 2:00 AM (UTC-8:00)
+ branches:
+ include:
+ - main
+ always: true
+
+jobs:
+
+- template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/coreclr/templates/build-jit-job.yml
+ buildConfig: checked
+ platforms:
+ - windows_x64
+ - windows_x86
+ jobParameters:
+ uploadAs: 'pipelineArtifacts'
+
+- template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/coreclr/templates/build-jit-job.yml
+ buildConfig: release
+ platforms:
+ - windows_x64
+ - windows_x86
+ jobParameters:
+ uploadAs: 'pipelineArtifacts'
+
+- template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/coreclr/templates/superpmi-asmdiffs-checked-release-job.yml
+ buildConfig: checked
+ platforms:
+ - windows_x64
+ - windows_x86
+ helixQueueGroup: ci
+ helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
diff --git a/eng/pipelines/coreclr/templates/run-superpmi-asmdiffs-checked-release-job.yml b/eng/pipelines/coreclr/templates/run-superpmi-asmdiffs-checked-release-job.yml
new file mode 100644
index 00000000000000..56975233885a77
--- /dev/null
+++ b/eng/pipelines/coreclr/templates/run-superpmi-asmdiffs-checked-release-job.yml
@@ -0,0 +1,110 @@
+parameters:
+ steps: [] # optional -- any additional steps that need to happen before pulling down the jitutils repo and sending the jitutils to helix (ie building your repo)
+ variables: [] # optional -- list of additional variables to send to the template
+ jobName: '' # required -- job name
+ displayName: '' # optional -- display name for the job. Will use jobName if not passed
+ pool: '' # required -- name of the Build pool
+ container: '' # required -- name of the container
+ buildConfig: '' # required -- build configuration
+ archType: '' # required -- targeting CPU architecture
+ osGroup: '' # required -- operating system for the job
+ osSubgroup: '' # optional -- operating system subgroup
+ continueOnError: 'false' # optional -- determines whether to continue the build if the step errors
+ dependsOn: '' # optional -- dependencies of the job
+ timeoutInMinutes: 320 # optional -- timeout for the job
+ enableTelemetry: false # optional -- enable for telemetry
+ liveLibrariesBuildConfig: '' # optional -- live-live libraries configuration to use for the run
+ helixQueues: '' # required -- Helix queues
+ dependOnEvaluatePaths: false
+
+jobs:
+- template: xplat-pipeline-job.yml
+ parameters:
+ dependsOn: ${{ parameters.dependsOn }}
+ buildConfig: ${{ parameters.buildConfig }}
+ archType: ${{ parameters.archType }}
+ osGroup: ${{ parameters.osGroup }}
+ osSubgroup: ${{ parameters.osSubgroup }}
+ liveLibrariesBuildConfig: ${{ parameters.liveLibrariesBuildConfig }}
+ enableTelemetry: ${{ parameters.enableTelemetry }}
+ enablePublishBuildArtifacts: true
+ continueOnError: ${{ parameters.continueOnError }}
+ dependOnEvaluatePaths: ${{ parameters.dependOnEvaluatePaths }}
+ timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
+
+ ${{ if ne(parameters.displayName, '') }}:
+ displayName: '${{ parameters.displayName }}'
+ ${{ if eq(parameters.displayName, '') }}:
+ displayName: '${{ parameters.jobName }}'
+
+ variables:
+
+ - name: PythonScript
+ value: 'py -3'
+ - name: PipScript
+ value: 'py -3 -m pip'
+ - name: SpmiCollectionLocation
+ value: '$(Build.SourcesDirectory)\artifacts\spmi\'
+ - name: SpmiLogsLocation
+ value: '$(Build.SourcesDirectory)\artifacts\spmi_logs\'
+ - name: HelixResultLocation
+ value: '$(Build.SourcesDirectory)\artifacts\helixresults\'
+
+ - ${{ each variable in parameters.variables }}:
+ - ${{insert}}: ${{ variable }}
+
+ workspace:
+ clean: all
+ pool:
+ ${{ parameters.pool }}
+ container: ${{ parameters.container }}
+ steps:
+ - ${{ parameters.steps }}
+
+ - script: |
+ mkdir -p $(SpmiCollectionLocation)
+ displayName: Create directory for SPMI collection
+
+ - script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi_asmdiffs_checked_release_setup.py -source_directory $(Build.SourcesDirectory) -checked_directory $(buildProductRootFolderPath) -release_directory $(releaseProductRootFolderPath) -arch $(archType)
+ displayName: ${{ format('SuperPMI asmdiffs checked release setup ({0} {1})', parameters.osGroup, parameters.archType) }}
+
+ # Run superpmi asmdiffs between checked build and release build in helix
+ - template: /eng/pipelines/common/templates/runtimes/send-to-helix-step.yml
+ parameters:
+ displayName: 'Send job to Helix'
+ helixBuild: $(Build.BuildNumber)
+ helixSource: $(_HelixSource)
+ helixType: 'build/tests/'
+ helixQueues: ${{ join(',', parameters.helixQueues) }}
+ creator: dotnet-bot
+ WorkItemTimeout: 4:00 # 4 hours
+ WorkItemDirectory: '$(WorkItemDirectory)'
+ CorrelationPayloadDirectory: '$(CorrelationPayloadDirectory)'
+ helixProjectArguments: '$(Build.SourcesDirectory)/src/coreclr/scripts/superpmi-asmdiffs-checked-release.proj'
+ BuildConfig: ${{ parameters.buildConfig }}
+ osGroup: ${{ parameters.osGroup }}
+ archType: ${{ parameters.archType }}
+ shouldContinueOnError: true # Run the future step i.e. upload superpmi logs
+
+ # Always upload the available logs for diagnostics
+ - task: CopyFiles@2
+ displayName: Copying superpmi.log of all partitions
+ inputs:
+ sourceFolder: '$(HelixResultLocation)'
+ contents: '**/superpmi_*.log'
+ targetFolder: '$(SpmiLogsLocation)'
+ condition: always()
+
+ - task: PublishPipelineArtifact@1
+ displayName: Publish SuperPMI logs
+ inputs:
+ targetPath: $(SpmiLogsLocation)
+ artifactName: 'SuperPMI_Logs_$(archType)_$(buildConfig)'
+ condition: always()
+
+ - task: PublishPipelineArtifact@1
+ displayName: Publish SuperPMI build logs
+ inputs:
+ targetPath: $(Build.SourcesDirectory)/artifacts/log
+ artifactName: 'SuperPMI_BuildLogs_$(archType)_$(buildConfig)'
+ condition: always()
diff --git a/eng/pipelines/coreclr/templates/superpmi-asmdiffs-checked-release-job.yml b/eng/pipelines/coreclr/templates/superpmi-asmdiffs-checked-release-job.yml
new file mode 100644
index 00000000000000..c19ed54a2508d8
--- /dev/null
+++ b/eng/pipelines/coreclr/templates/superpmi-asmdiffs-checked-release-job.yml
@@ -0,0 +1,60 @@
+parameters:
+ buildConfig: '' # required -- build configuration
+ archType: '' # required -- targeting CPU architecture
+ osGroup: '' # required -- operating system for the job
+ osSubgroup: '' # optional -- operating system subgroup
+ pool: ''
+ timeoutInMinutes: 320 # build timeout
+ variables: {}
+ helixQueues: ''
+ dependOnEvaluatePaths: false
+ runJobTemplate: '/eng/pipelines/coreclr/templates/run-superpmi-asmdiffs-checked-release-job.yml'
+
+jobs:
+- template: ${{ parameters.runJobTemplate }}
+ parameters:
+ jobName: ${{ format('superpmi_asmdiffs_checked_release_{0}{1}_{2}', parameters.osGroup, parameters.osSubgroup, parameters.archType) }}
+ displayName: ${{ format('SuperPMI asmdiffs checked release {0}{1} {2}', parameters.osGroup, parameters.osSubgroup, parameters.archType) }}
+ pool: ${{ parameters.pool }}
+ buildConfig: ${{ parameters.buildConfig }}
+ archType: ${{ parameters.archType }}
+ osGroup: ${{ parameters.osGroup }}
+ osSubgroup: ${{ parameters.osSubgroup }}
+ dependOnEvaluatePaths: ${{ parameters.dependOnEvaluatePaths }}
+ timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
+ helixQueues: ${{ parameters.helixQueues }}
+ dependsOn:
+ - ${{ format('coreclr_jit_build_{0}{1}_{2}_{3}', parameters.osGroup, parameters.osSubgroup, parameters.archType, 'checked') }}
+ - ${{ format('coreclr_jit_build_{0}{1}_{2}_{3}', parameters.osGroup, parameters.osSubgroup, parameters.archType, 'release') }}
+
+ variables:
+
+ - ${{ each variable in parameters.variables }}:
+ - ${{ if ne(variable.name, '') }}:
+ - name: ${{ variable.name }}
+ value: ${{ variable.value }}
+ - ${{ if ne(variable.group, '') }}:
+ - group: ${{ variable.group }}
+
+ - name: releaseProductRootFolderPath
+ value: '$(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).$(archType).Release'
+ - name: releaseProductArtifactName
+ value: 'CoreCLRProduct_${{ parameters.pgoType }}_${{ parameters.runtimeVariant }}_$(osGroup)$(osSubgroup)_$(archType)_release'
+
+ steps:
+
+ # Download jit checked builds
+ - template: /eng/pipelines/common/download-artifact-step.yml
+ parameters:
+ unpackFolder: $(buildProductRootFolderPath)
+ artifactFileName: '$(buildProductArtifactName)$(archiveExtension)'
+ artifactName: '$(buildProductArtifactName)'
+ displayName: 'JIT checked build'
+
+ #Download jit release builds
+ - template: /eng/pipelines/common/download-artifact-step.yml
+ parameters:
+ unpackFolder: $(releaseProductRootFolderPath)
+ artifactFileName: '$(releaseProductArtifactName)$(archiveExtension)'
+ artifactName: '$(releaseProductArtifactName)'
+ displayName: 'JIT release build'
diff --git a/src/coreclr/scripts/superpmi-asmdiffs-checked-release.proj b/src/coreclr/scripts/superpmi-asmdiffs-checked-release.proj
new file mode 100644
index 00000000000000..4600858f22e980
--- /dev/null
+++ b/src/coreclr/scripts/superpmi-asmdiffs-checked-release.proj
@@ -0,0 +1,73 @@
+
+
+
+
+
+
+ %HELIX_PYTHONPATH%
+ %HELIX_CORRELATION_PAYLOAD%
+ %HELIX_WORKITEM_UPLOAD_ROOT%
+
+ $(BUILD_SOURCESDIRECTORY)\artifacts\helixresults
+ $(Python) $(ProductDirectory)\superpmi_asmdiffs_checked_release.py --diff_with_release -base_jit_directory $(ProductDirectory)\base -diff_jit_directory $(ProductDirectory)\diff -log_directory $(SuperpmiLogsLocation)
+ 1:00
+
+
+
+ false
+ false
+ $(_Creator)
+ $(_HelixAccessToken)
+ $(_HelixBuild)
+ $(_HelixSource)
+ $(_HelixTargetQueues)
+ $(_HelixType)
+
+
+
+
+ %(Identity)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ $(WorkItemCommand) -arch %(HelixWorkItem.Architecture) -platform %(HelixWorkItem.Platform)
+ $(WorkItemTimeout)
+ superpmi_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).log;superpmi_download_%(HelixWorkItem.Platform)_%(HelixWorkItem.Architecture).log
+
+
+
diff --git a/src/coreclr/scripts/superpmi.py b/src/coreclr/scripts/superpmi.py
old mode 100755
new mode 100644
index 3c9d417f81cc02..1034a7aacf9dda
--- a/src/coreclr/scripts/superpmi.py
+++ b/src/coreclr/scripts/superpmi.py
@@ -325,6 +325,7 @@
asm_diff_parser.add_argument("-tag", help="Specify a word to add to the directory name where the asm diffs will be placed")
asm_diff_parser.add_argument("-metrics", action="append", help="Metrics option to pass to jit-analyze. Can be specified multiple times, or pass comma-separated values.")
asm_diff_parser.add_argument("-retainOnlyTopFiles", action="store_true", help="Retain only top .dasm files with largest improvements or regressions and delete remaining files.")
+asm_diff_parser.add_argument("--diff_with_release", action="store_true", help="Specify if this is asmdiff using release binaries.")
# subparser for upload
upload_parser = subparsers.add_parser("upload", description=upload_description, parents=[core_root_parser, target_parser])
@@ -1492,6 +1493,7 @@ def replay_with_asm_diffs(self):
with ChangeDir(self.coreclr_args.core_root):
command = [self.superpmi_path] + flags + [self.base_jit_path, self.diff_jit_path, mch_file]
return_code = run_and_log(command)
+ logging.debug("return_code: %s", return_code)
base_metrics = read_csv_metrics(base_metrics_summary_file)
diff_metrics = read_csv_metrics(diff_metrics_summary_file)
@@ -1501,33 +1503,32 @@ def replay_with_asm_diffs(self):
if return_code != 0:
- # Don't report as replay failure asm diffs (return code 2) or missing data (return code 3).
+ # Don't report as replay failure asm diffs (return code 2) if not checking diffs with Release build or missing data (return code 3).
# Anything else, such as compilation failure (return code 1, typically a JIT assert) will be
# reported as a replay failure.
- if return_code != 2 and return_code != 3:
+ if (return_code != 2 or self.coreclr_args.diff_with_release) and return_code != 3:
result = False
files_with_replay_failures.append(mch_file)
if is_nonzero_length_file(fail_mcl_file):
# Unclean replay. Examine the contents of the fail.mcl file to dig into failures.
- if return_code == 0:
- logging.warning("Warning: SuperPMI returned a zero exit code, but generated a non-zero-sized mcl file")
print_fail_mcl_file_method_numbers(fail_mcl_file)
repro_base_command_line = "{} {} {}".format(self.superpmi_path, " ".join(altjit_asm_diffs_flags), self.diff_jit_path)
save_repro_mc_files(temp_location, self.coreclr_args, artifacts_base_name, repro_base_command_line)
+ # This file had asm diffs; keep track of that.
+ if is_nonzero_length_file(diff_mcl_file):
+ files_with_asm_diffs.append(mch_file)
+
# There were diffs. Go through each method that created diffs and
# create a base/diff asm file with diffable asm. In addition, create
# a standalone .mc for easy iteration.
- if is_nonzero_length_file(diff_mcl_file):
+ if is_nonzero_length_file(diff_mcl_file) and not self.coreclr_args.diff_with_release:
# AsmDiffs. Save the contents of the fail.mcl file to dig into failures.
if return_code == 0:
logging.warning("Warning: SuperPMI returned a zero exit code, but generated a non-zero-sized mcl file")
- # This file had asm diffs; keep track of that.
- files_with_asm_diffs.append(mch_file)
-
self.diff_mcl_contents = None
with open(diff_mcl_file) as file_handle:
mcl_lines = file_handle.readlines()
@@ -1711,7 +1712,7 @@ async def create_one_artifact(jit_path: str, location: str, flags) -> str:
# Construct an overall Markdown summary file.
- if len(all_md_summary_files) > 0:
+ if len(all_md_summary_files) > 0 and not self.coreclr_args.diff_with_release:
overall_md_summary_file = create_unique_file_name(self.coreclr_args.spmi_location, "diff_summary", "md")
if not os.path.isdir(self.coreclr_args.spmi_location):
os.makedirs(self.coreclr_args.spmi_location)
@@ -3310,6 +3311,11 @@ def verify_replay_common_args():
lambda unused: True,
"Unable to set retainOnlyTopFiles.")
+ coreclr_args.verify(args,
+ "diff_with_release",
+ lambda unused: True,
+ "Unable to set diff_with_release.")
+
process_base_jit_path_arg(coreclr_args)
jit_in_product_location = False
@@ -3555,6 +3561,8 @@ def main(args):
base_jit_path = coreclr_args.base_jit_path
diff_jit_path = coreclr_args.diff_jit_path
+ if coreclr_args.diff_with_release:
+ logging.info("Diff between Checked and Release.")
logging.info("Base JIT Path: %s", base_jit_path)
logging.info("Diff JIT Path: %s", diff_jit_path)
diff --git a/src/coreclr/scripts/superpmi_asmdiffs_checked_release.py b/src/coreclr/scripts/superpmi_asmdiffs_checked_release.py
new file mode 100644
index 00000000000000..72d4784a1f3068
--- /dev/null
+++ b/src/coreclr/scripts/superpmi_asmdiffs_checked_release.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python3
+#
+# Licensed to the .NET Foundation under one or more agreements.
+# The .NET Foundation licenses this file to you under the MIT license.
+#
+# Title : superpmi_asmdiffs_checked_release.py
+#
+# Notes:
+#
+# Script to run "superpmi asmdiffs" using release binaries
+# for various collections on the Helix machines.
+#
+################################################################################
+################################################################################
+
+import argparse
+import os
+import shutil
+from coreclr_arguments import *
+from jitutil import run_command
+
+parser = argparse.ArgumentParser(description="description")
+
+parser.add_argument("-arch", help="Architecture")
+parser.add_argument("-platform", help="OS platform")
+parser.add_argument("--diff_with_release", action="store_true", help="asmdiffs between JIT Checked binaries and Release binaries")
+parser.add_argument("-base_jit_directory", help="path to the directory containing base clrjit binaries")
+parser.add_argument("-diff_jit_directory", help="path to the directory containing diff clrjit binaries")
+parser.add_argument("-log_directory", help="path to the directory containing superpmi log files")
+
+def setup_args(args):
+ """ Setup the args for SuperPMI to use.
+
+ Args:
+ args (ArgParse): args parsed by arg parser
+
+ Returns:
+ args (CoreclrArguments)
+
+ """
+ coreclr_args = CoreclrArguments(args, require_built_core_root=False, require_built_product_dir=False,
+ require_built_test_dir=False, default_build_type="Checked")
+
+ coreclr_args.verify(args,
+ "arch",
+ lambda unused: True,
+ "Unable to set arch")
+
+ coreclr_args.verify(args,
+ "platform",
+ lambda unused: True,
+ "Unable to set platform")
+
+ coreclr_args.verify(args,
+ "diff_with_release",
+ lambda unused: True,
+ "Unable to set diff_with_release")
+
+ coreclr_args.verify(args,
+ "base_jit_directory",
+ lambda jit_directory: os.path.isdir(jit_directory),
+ "base_jit_directory doesn't exist")
+
+ coreclr_args.verify(args,
+ "diff_jit_directory",
+ lambda jit_directory: os.path.isdir(jit_directory),
+ "diff_jit_directory doesn't exist")
+
+ coreclr_args.verify(args,
+ "log_directory",
+ lambda log_directory: True,
+ "log_directory doesn't exist")
+
+ return coreclr_args
+
+
+def main(main_args):
+ """ Run superpmi asmdiffs process on the Helix machines.
+
+ See superpmi_asmdiffs_checked_release_setup.py for how the directory structure is set up
+ in the correlation payload. This script lives in the root of that directory tree.
+
+ Args:
+ main_args ([type]): Arguments to the script
+ """
+
+ python_path = sys.executable
+ script_dir = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
+ coreclr_args = setup_args(main_args)
+
+ # It doesn't really matter where we put the downloaded SPMI artifacts.
+ # Here, they are put in /artifacts/spmi.
+ spmi_location = os.path.join(script_dir, "artifacts", "spmi")
+
+ log_directory = coreclr_args.log_directory
+ platform_name = coreclr_args.platform
+
+ # Figure out which JITs to use
+ os_name = "win" if platform_name.lower() == "windows" else "unix"
+ arch_name = coreclr_args.arch
+ host_arch_name = "x64" if arch_name.endswith("64") else "x86"
+ os_name = "universal" if arch_name.startswith("arm") else os_name
+ base_jit_path = os.path.join(coreclr_args.base_jit_directory, 'clrjit_{}_{}_{}.dll'.format(os_name, arch_name, host_arch_name))
+ diff_jit_path = os.path.join(coreclr_args.diff_jit_directory, 'clrjit_{}_{}_{}.dll'.format(os_name, arch_name, host_arch_name))
+
+ # Core_Root is where the superpmi tools (superpmi.exe, mcs.exe) are expected to be found.
+ # We pass the full path of the JITs to use as arguments.
+ core_root_dir = script_dir
+
+ print("Running superpmi.py download to get MCH files")
+
+ log_file = os.path.join(log_directory, "superpmi_download_{}_{}.log".format(platform_name, arch_name))
+ run_command([
+ python_path,
+ os.path.join(script_dir, "superpmi.py"),
+ "download",
+ "--no_progress",
+ "-core_root", core_root_dir,
+ "-target_os", platform_name,
+ "-target_arch", arch_name,
+ "-spmi_location", spmi_location,
+ "-log_level", "debug",
+ "-log_file", log_file
+ ], _exit_on_fail=True)
+
+ print("Running superpmi.py asmdiffs between checked and release binaries")
+ log_file = os.path.join(log_directory, "superpmi_{}_{}.log".format(platform_name, arch_name))
+
+ _, _, return_code = run_command([
+ python_path,
+ os.path.join(script_dir, "superpmi.py"),
+ "asmdiffs",
+ "--diff_with_release",
+ "--no_progress",
+ "-core_root", core_root_dir,
+ "-target_os", platform_name,
+ "-target_arch", arch_name,
+ "-arch", host_arch_name,
+ "-base_jit_path", base_jit_path,
+ "-diff_jit_path", diff_jit_path,
+ "-spmi_location", spmi_location,
+ "-error_limit", "100",
+ "-log_level", "debug",
+ "-log_file", log_file])
+
+
+ # TODO: the superpmi.py asmdiffs command returns a failure code if there are MISSING data even if there are
+ # no asm diffs. We should probably only fail if there are actual failures (not MISSING or asm diffs).
+
+ if return_code != 0:
+ print("Failure in {}".format(log_file))
+ return 1
+
+ return 0
+
+
+if __name__ == "__main__":
+ args = parser.parse_args()
+ sys.exit(main(args))
diff --git a/src/coreclr/scripts/superpmi_asmdiffs_checked_release_setup.py b/src/coreclr/scripts/superpmi_asmdiffs_checked_release_setup.py
new file mode 100644
index 00000000000000..c98e83c87a843c
--- /dev/null
+++ b/src/coreclr/scripts/superpmi_asmdiffs_checked_release_setup.py
@@ -0,0 +1,177 @@
+#!/usr/bin/env python3
+#
+# Licensed to the .NET Foundation under one or more agreements.
+# The .NET Foundation licenses this file to you under the MIT license.
+#
+# Title : superpmi_asmdiffs_checked_release_setup.py
+#
+# Notes:
+#
+# Script to setup directory structure required to perform SuperPMI asmdiffs checked release in CI.
+# It creates `correlation_payload_directory` with `base` and `diff` directories
+# that contains clrjit*.dll.
+#
+################################################################################
+################################################################################
+
+import argparse
+import logging
+import os
+
+from coreclr_arguments import *
+from jitutil import copy_directory, set_pipeline_variable, run_command, TempDir, download_files
+
+parser = argparse.ArgumentParser(description="description")
+
+parser.add_argument("-arch", help="Architecture")
+parser.add_argument("-source_directory", help="path to the directory of the dotnet/runtime source tree")
+parser.add_argument("-checked_directory", help="path to the directory containing checked binaries (e.g., /artifacts/bin/coreclr/windows.x64.Checked)")
+parser.add_argument("-release_directory", help="path to the directory containing release binaries (e.g., /artifacts/bin/coreclr/windows.x64.Release)")
+
+is_windows = platform.system() == "Windows"
+
+
+def setup_args(args):
+ """ Setup the args for SuperPMI to use.
+
+ Args:
+ args (ArgParse): args parsed by arg parser
+
+ Returns:
+ args (CoreclrArguments)
+
+ """
+ coreclr_args = CoreclrArguments(args, require_built_core_root=False, require_built_product_dir=False,
+ require_built_test_dir=False, default_build_type="Checked")
+
+ coreclr_args.verify(args,
+ "arch",
+ lambda unused: True,
+ "Unable to set arch")
+
+ coreclr_args.verify(args,
+ "source_directory",
+ lambda source_directory: os.path.isdir(source_directory),
+ "source_directory doesn't exist")
+
+ coreclr_args.verify(args,
+ "checked_directory",
+ lambda checked_directory: os.path.isdir(checked_directory),
+ "checked_directory doesn't exist")
+
+ coreclr_args.verify(args,
+ "release_directory",
+ lambda release_directory: os.path.isdir(release_directory),
+ "release_directory doesn't exist")
+
+ return coreclr_args
+
+
+def match_jit_files(full_path):
+ """ Match all the JIT files that we want to copy and use.
+ Note that we currently only match Windows files, and not osx cross-compile files.
+ We also don't copy the "default" clrjit.dll, since we always use the fully specified
+ JITs, e.g., clrjit_win_x64_x64.dll.
+ """
+ file_name = os.path.basename(full_path)
+
+ if file_name.startswith("clrjit_") and file_name.endswith(".dll") and file_name.find("osx") == -1:
+ return True
+
+ return False
+
+
+def match_superpmi_tool_files(full_path):
+ """ Match all the SuperPMI tool files that we want to copy and use.
+ Note that we currently only match Windows files.
+ """
+ file_name = os.path.basename(full_path)
+
+ if file_name == "superpmi.exe" or file_name == "mcs.exe":
+ return True
+
+ return False
+
+
+def main(main_args):
+ """Main entrypoint: Prepare the Helix data for SuperPMI asmdiffs checked release Azure DevOps pipeline.
+
+ The Helix correlation payload directory is created and populated as follows:
+
+ \payload -- the correlation payload directory
+ -- contains the *.py scripts from \src\coreclr\scripts
+ -- contains superpmi.exe, mcs.exe from the target-specific build
+ \payload\base
+ -- contains the Checked JITs
+ \payload\diff
+ -- contains the Release JITs
+
+ Then, AzDO pipeline variables are set.
+
+ Args:
+ main_args ([type]): Arguments to the script
+ """
+
+ # Set up logging.
+ logger = logging.getLogger()
+ logger.setLevel(logging.INFO)
+ stream_handler = logging.StreamHandler(sys.stdout)
+ stream_handler.setLevel(logging.INFO)
+ logger.addHandler(stream_handler)
+
+ coreclr_args = setup_args(main_args)
+
+ arch = coreclr_args.arch
+ source_directory = coreclr_args.source_directory
+ checked_directory = coreclr_args.checked_directory
+ release_directory = coreclr_args.release_directory
+
+ python_path = sys.executable
+
+ # CorrelationPayload directories
+ correlation_payload_directory = os.path.join(source_directory, "payload")
+ superpmi_scripts_directory = os.path.join(source_directory, 'src', 'coreclr', 'scripts')
+ base_jit_directory = os.path.join(correlation_payload_directory, "base")
+ diff_jit_directory = os.path.join(correlation_payload_directory, "diff")
+
+ ######## Copy SuperPMI python scripts
+
+ # Copy *.py to CorrelationPayload
+ print('Copying {} -> {}'.format(superpmi_scripts_directory, correlation_payload_directory))
+ copy_directory(superpmi_scripts_directory, correlation_payload_directory, verbose_copy=True,
+ match_func=lambda path: any(path.endswith(extension) for extension in [".py"]))
+
+ ######## Copy baseline Checked JIT
+
+ # Copy clrjit*_arch.dll binaries from Checked checked_directory to base_jit_directory
+ print('Copying base Checked binaries {} -> {}'.format(checked_directory, base_jit_directory))
+ copy_directory(checked_directory, base_jit_directory, verbose_copy=True, match_func=match_jit_files)
+
+ ######## Copy diff Release JIT
+
+ # Copy clrjit*_arch.dll binaries from release_directory to diff_jit_directory
+ print('Copying diff Release binaries {} -> {}'.format(release_directory, diff_jit_directory))
+ copy_directory(release_directory, diff_jit_directory, verbose_copy=True, match_func=match_jit_files)
+
+ ######## Get SuperPMI tools
+
+ # Put the SuperPMI tools directly in the root of the correlation payload directory.
+ print('Copying SuperPMI tools {} -> {}'.format(checked_directory, correlation_payload_directory))
+ copy_directory(checked_directory, correlation_payload_directory, verbose_copy=True, match_func=match_superpmi_tool_files)
+
+
+ # Set variables
+
+ helix_source_prefix = "official"
+ creator = ""
+
+ print('Setting pipeline variables:')
+ set_pipeline_variable("CorrelationPayloadDirectory", correlation_payload_directory)
+ set_pipeline_variable("Architecture", arch)
+ set_pipeline_variable("Creator", creator)
+ set_pipeline_variable("HelixSourcePrefix", helix_source_prefix)
+
+
+if __name__ == "__main__":
+ args = parser.parse_args()
+ sys.exit(main(args))