From c18c9ee09cfb75d70b14dbb89918566e56686c35 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Sun, 24 Nov 2024 13:23:27 +0000 Subject: [PATCH 01/15] Update dependencies from https://github.com/dotnet/arcade build 20241122.3 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 9.0.0-beta.24170.7 -> To Version 10.0.0-beta.24572.3 --- eng/Version.Details.xml | 8 +- eng/common/SetupNugetSources.ps1 | 54 +-- eng/common/SetupNugetSources.sh | 62 ++-- eng/common/build.ps1 | 4 +- eng/common/build.sh | 9 - eng/common/core-templates/job/job.yml | 239 +++++++++++++ eng/common/core-templates/job/onelocbuild.yml | 121 +++++++ .../job/publish-build-assets.yml | 158 +++++++++ .../core-templates/job/source-build.yml | 94 +++++ .../job/source-index-stage1.yml | 44 +++ .../core-templates/jobs/codeql-build.yml | 33 ++ eng/common/core-templates/jobs/jobs.yml | 119 +++++++ .../core-templates/jobs/source-build.yml | 58 +++ .../post-build/common-variables.yml | 22 ++ .../core-templates/post-build/post-build.yml | 316 +++++++++++++++++ .../post-build/setup-maestro-vars.yml | 74 ++++ .../steps/cleanup-microbuild.yml | 28 ++ .../steps/component-governance.yml | 16 + .../steps/enable-internal-runtimes.yml | 32 ++ .../steps/enable-internal-sources.yml | 47 +++ .../core-templates/steps/generate-sbom.yml | 54 +++ .../steps/get-delegation-sas.yml | 46 +++ .../steps/get-federated-access-token.yml | 42 +++ .../steps/install-microbuild.yml | 43 +++ .../steps/publish-build-artifacts.yml | 20 ++ .../core-templates/steps/publish-logs.yml | 61 ++++ .../steps/publish-pipeline-artifacts.yml | 20 ++ .../core-templates/steps/retain-build.yml | 28 ++ .../core-templates/steps/send-to-helix.yml | 93 +++++ .../core-templates/steps/source-build.yml | 133 +++++++ .../steps/source-index-stage1-publish.yml | 35 ++ .../variables/pool-providers.yml | 8 + eng/common/cross/arm/sources.list.bionic | 11 - eng/common/cross/arm/sources.list.focal | 11 - eng/common/cross/arm/sources.list.jammy | 11 - eng/common/cross/arm/sources.list.jessie | 3 - eng/common/cross/arm/sources.list.xenial | 11 - eng/common/cross/arm/sources.list.zesty | 11 - eng/common/cross/arm64/sources.list.bionic | 11 - eng/common/cross/arm64/sources.list.buster | 11 - eng/common/cross/arm64/sources.list.focal | 11 - eng/common/cross/arm64/sources.list.jammy | 11 - eng/common/cross/arm64/sources.list.stretch | 12 - eng/common/cross/arm64/sources.list.xenial | 11 - eng/common/cross/arm64/sources.list.zesty | 11 - eng/common/cross/armel/sources.list.jessie | 3 - eng/common/cross/armv6/sources.list.buster | 2 - eng/common/cross/build-android-rootfs.sh | 8 +- eng/common/cross/build-rootfs.sh | 281 +++++++++++---- eng/common/cross/ppc64le/sources.list.bionic | 11 - eng/common/cross/riscv64/sources.list.sid | 1 - eng/common/cross/s390x/sources.list.bionic | 11 - eng/common/cross/tizen-fetch.sh | 2 +- eng/common/cross/toolchain.cmake | 20 ++ eng/common/cross/x64/sources.list.bionic | 11 - eng/common/cross/x64/sources.list.xenial | 11 - eng/common/cross/x86/sources.list.bionic | 11 - eng/common/cross/x86/sources.list.focal | 11 - eng/common/cross/x86/sources.list.jammy | 11 - eng/common/cross/x86/sources.list.xenial | 11 - eng/common/darc-init.ps1 | 2 +- eng/common/darc-init.sh | 2 +- eng/common/dotnet-install.sh | 5 +- eng/common/internal/Directory.Build.props | 5 + eng/common/internal/Tools.csproj | 2 +- eng/common/native/CommonLibrary.psm1 | 3 +- eng/common/native/init-compiler.sh | 117 ++++--- eng/common/native/init-distro-rid.sh | 18 +- eng/common/native/install-dependencies.sh | 65 ++++ .../post-build/add-build-to-channel.ps1 | 48 --- .../post-build/check-channel-consistency.ps1 | 10 +- eng/common/post-build/nuget-validation.ps1 | 20 +- eng/common/post-build/nuget-verification.ps1 | 121 +++++++ eng/common/post-build/post-build-utils.ps1 | 91 ----- eng/common/post-build/publish-using-darc.ps1 | 23 +- eng/common/post-build/redact-logs.ps1 | 10 +- .../post-build/sourcelink-validation.ps1 | 10 +- eng/common/post-build/symbols-validation.ps1 | 2 - .../post-build/trigger-subscriptions.ps1 | 64 ---- eng/common/sdk-task.ps1 | 2 +- eng/common/template-guidance.md | 133 +++++++ eng/common/templates-official/job/job.yml | 331 ++++-------------- .../templates-official/job/onelocbuild.yml | 115 +----- .../job/publish-build-assets.yml | 162 +-------- .../templates-official/job/source-build.yml | 70 +--- .../job/source-index-stage1.yml | 70 +--- .../templates-official/jobs/codeql-build.yml | 32 +- eng/common/templates-official/jobs/jobs.yml | 100 +----- .../templates-official/jobs/source-build.yml | 49 +-- .../post-build/common-variables.yml | 28 +- .../post-build/post-build.yml | 289 +-------------- .../post-build/setup-maestro-vars.yml | 74 +--- .../post-build/trigger-subscription.yml | 13 - .../steps/add-build-to-channel.yml | 13 - .../steps/component-governance.yml | 18 +- .../steps/enable-internal-runtimes.yml | 9 + .../steps/enable-internal-sources.yml | 7 + .../steps/generate-sbom.yml | 51 +-- .../steps/get-delegation-sas.yml | 7 + .../steps/get-federated-access-token.yml | 7 + .../steps/publish-build-artifacts.yml | 41 +++ .../templates-official/steps/publish-logs.yml | 54 +-- .../steps/publish-pipeline-artifacts.yml | 28 ++ .../templates-official/steps/retain-build.yml | 33 +- .../steps/send-to-helix.yml | 98 +----- .../templates-official/steps/source-build.yml | 134 +------ .../steps/source-index-stage1-publish.yml | 7 + .../variables/pool-providers.yml | 2 +- eng/common/templates/job/execute-sdl.yml | 139 -------- eng/common/templates/job/job.yml | 317 ++++------------- eng/common/templates/job/onelocbuild.yml | 112 +----- .../templates/job/publish-build-assets.yml | 158 +-------- eng/common/templates/job/source-build.yml | 69 +--- .../templates/job/source-index-stage1.yml | 70 +--- eng/common/templates/jobs/codeql-build.yml | 32 +- eng/common/templates/jobs/jobs.yml | 100 +----- eng/common/templates/jobs/source-build.yml | 49 +-- .../templates/post-build/common-variables.yml | 28 +- .../templates/post-build/post-build.yml | 286 +-------------- .../post-build/setup-maestro-vars.yml | 74 +--- .../post-build/trigger-subscription.yml | 13 - .../templates/steps/add-build-to-channel.yml | 13 - eng/common/templates/steps/build-reason.yml | 12 - .../templates/steps/component-governance.yml | 18 +- .../steps/enable-internal-runtimes.yml | 10 + .../steps/enable-internal-sources.yml | 7 + eng/common/templates/steps/execute-codeql.yml | 32 -- eng/common/templates/steps/execute-sdl.yml | 88 ----- eng/common/templates/steps/generate-sbom.yml | 51 +-- .../templates/steps/get-delegation-sas.yml | 7 + .../steps/get-federated-access-token.yml | 7 + .../steps/publish-build-artifacts.yml | 40 +++ eng/common/templates/steps/publish-logs.yml | 54 +-- .../steps/publish-pipeline-artifacts.yml | 34 ++ eng/common/templates/steps/retain-build.yml | 33 +- eng/common/templates/steps/run-on-unix.yml | 7 - eng/common/templates/steps/run-on-windows.yml | 7 - .../steps/run-script-ifequalelse.yml | 33 -- eng/common/templates/steps/send-to-helix.yml | 98 +----- eng/common/templates/steps/source-build.yml | 134 +------ .../steps/source-index-stage1-publish.yml | 7 + eng/common/templates/steps/telemetry-end.yml | 102 ------ .../templates/steps/telemetry-start.yml | 241 ------------- .../templates/variables/pool-providers.yml | 54 +-- .../templates/variables/sdl-variables.yml | 7 - eng/common/tools.ps1 | 31 +- eng/common/tools.sh | 28 +- global.json | 4 +- 148 files changed, 3325 insertions(+), 4538 deletions(-) create mode 100644 eng/common/core-templates/job/job.yml create mode 100644 eng/common/core-templates/job/onelocbuild.yml create mode 100644 eng/common/core-templates/job/publish-build-assets.yml create mode 100644 eng/common/core-templates/job/source-build.yml create mode 100644 eng/common/core-templates/job/source-index-stage1.yml create mode 100644 eng/common/core-templates/jobs/codeql-build.yml create mode 100644 eng/common/core-templates/jobs/jobs.yml create mode 100644 eng/common/core-templates/jobs/source-build.yml create mode 100644 eng/common/core-templates/post-build/common-variables.yml create mode 100644 eng/common/core-templates/post-build/post-build.yml create mode 100644 eng/common/core-templates/post-build/setup-maestro-vars.yml create mode 100644 eng/common/core-templates/steps/cleanup-microbuild.yml create mode 100644 eng/common/core-templates/steps/component-governance.yml create mode 100644 eng/common/core-templates/steps/enable-internal-runtimes.yml create mode 100644 eng/common/core-templates/steps/enable-internal-sources.yml create mode 100644 eng/common/core-templates/steps/generate-sbom.yml create mode 100644 eng/common/core-templates/steps/get-delegation-sas.yml create mode 100644 eng/common/core-templates/steps/get-federated-access-token.yml create mode 100644 eng/common/core-templates/steps/install-microbuild.yml create mode 100644 eng/common/core-templates/steps/publish-build-artifacts.yml create mode 100644 eng/common/core-templates/steps/publish-logs.yml create mode 100644 eng/common/core-templates/steps/publish-pipeline-artifacts.yml create mode 100644 eng/common/core-templates/steps/retain-build.yml create mode 100644 eng/common/core-templates/steps/send-to-helix.yml create mode 100644 eng/common/core-templates/steps/source-build.yml create mode 100644 eng/common/core-templates/steps/source-index-stage1-publish.yml create mode 100644 eng/common/core-templates/variables/pool-providers.yml delete mode 100644 eng/common/cross/arm/sources.list.bionic delete mode 100644 eng/common/cross/arm/sources.list.focal delete mode 100644 eng/common/cross/arm/sources.list.jammy delete mode 100644 eng/common/cross/arm/sources.list.jessie delete mode 100644 eng/common/cross/arm/sources.list.xenial delete mode 100644 eng/common/cross/arm/sources.list.zesty delete mode 100644 eng/common/cross/arm64/sources.list.bionic delete mode 100644 eng/common/cross/arm64/sources.list.buster delete mode 100644 eng/common/cross/arm64/sources.list.focal delete mode 100644 eng/common/cross/arm64/sources.list.jammy delete mode 100644 eng/common/cross/arm64/sources.list.stretch delete mode 100644 eng/common/cross/arm64/sources.list.xenial delete mode 100644 eng/common/cross/arm64/sources.list.zesty delete mode 100644 eng/common/cross/armel/sources.list.jessie delete mode 100644 eng/common/cross/armv6/sources.list.buster delete mode 100644 eng/common/cross/ppc64le/sources.list.bionic delete mode 100644 eng/common/cross/riscv64/sources.list.sid delete mode 100644 eng/common/cross/s390x/sources.list.bionic delete mode 100644 eng/common/cross/x64/sources.list.bionic delete mode 100644 eng/common/cross/x64/sources.list.xenial delete mode 100644 eng/common/cross/x86/sources.list.bionic delete mode 100644 eng/common/cross/x86/sources.list.focal delete mode 100644 eng/common/cross/x86/sources.list.jammy delete mode 100644 eng/common/cross/x86/sources.list.xenial create mode 100644 eng/common/native/install-dependencies.sh delete mode 100644 eng/common/post-build/add-build-to-channel.ps1 create mode 100644 eng/common/post-build/nuget-verification.ps1 delete mode 100644 eng/common/post-build/post-build-utils.ps1 delete mode 100644 eng/common/post-build/trigger-subscriptions.ps1 create mode 100644 eng/common/template-guidance.md delete mode 100644 eng/common/templates-official/post-build/trigger-subscription.yml delete mode 100644 eng/common/templates-official/steps/add-build-to-channel.yml create mode 100644 eng/common/templates-official/steps/enable-internal-runtimes.yml create mode 100644 eng/common/templates-official/steps/enable-internal-sources.yml create mode 100644 eng/common/templates-official/steps/get-delegation-sas.yml create mode 100644 eng/common/templates-official/steps/get-federated-access-token.yml create mode 100644 eng/common/templates-official/steps/publish-build-artifacts.yml create mode 100644 eng/common/templates-official/steps/publish-pipeline-artifacts.yml create mode 100644 eng/common/templates-official/steps/source-index-stage1-publish.yml delete mode 100644 eng/common/templates/job/execute-sdl.yml delete mode 100644 eng/common/templates/post-build/trigger-subscription.yml delete mode 100644 eng/common/templates/steps/add-build-to-channel.yml delete mode 100644 eng/common/templates/steps/build-reason.yml create mode 100644 eng/common/templates/steps/enable-internal-runtimes.yml create mode 100644 eng/common/templates/steps/enable-internal-sources.yml delete mode 100644 eng/common/templates/steps/execute-codeql.yml delete mode 100644 eng/common/templates/steps/execute-sdl.yml create mode 100644 eng/common/templates/steps/get-delegation-sas.yml create mode 100644 eng/common/templates/steps/get-federated-access-token.yml create mode 100644 eng/common/templates/steps/publish-build-artifacts.yml create mode 100644 eng/common/templates/steps/publish-pipeline-artifacts.yml delete mode 100644 eng/common/templates/steps/run-on-unix.yml delete mode 100644 eng/common/templates/steps/run-on-windows.yml delete mode 100644 eng/common/templates/steps/run-script-ifequalelse.yml create mode 100644 eng/common/templates/steps/source-index-stage1-publish.yml delete mode 100644 eng/common/templates/steps/telemetry-end.yml delete mode 100644 eng/common/templates/steps/telemetry-start.yml delete mode 100644 eng/common/templates/variables/sdl-variables.yml diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index e5a2038e1..bfc068acc 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -3,13 +3,13 @@ - + https://github.com/dotnet/arcade - 117337a1e7024e31b321d44d8b71a821e69c3967 + 7d955f9f470465e144c76d47fd2596a0e4c02a21 - + https://github.com/dotnet/arcade - 117337a1e7024e31b321d44d8b71a821e69c3967 + 7d955f9f470465e144c76d47fd2596a0e4c02a21 diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1 index efa2fd72b..5db4ad71e 100644 --- a/eng/common/SetupNugetSources.ps1 +++ b/eng/common/SetupNugetSources.ps1 @@ -1,17 +1,10 @@ -# This file is a temporary workaround for internal builds to be able to restore from private AzDO feeds. -# This file should be removed as part of this issue: https://github.com/dotnet/arcade/issues/4080 +# This script adds internal feeds required to build commits that depend on internal package sources. For instance, +# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. In addition also enables +# disabled internal Maestro (darc-int*) feeds. # -# What the script does is iterate over all package sources in the pointed NuGet.config and add a credential entry -# under for each Maestro managed private feed. Two additional credential -# entries are also added for the two private static internal feeds: dotnet3-internal and dotnet3-internal-transport. +# Optionally, this script also adds a credential entry for each of the internal feeds if supplied. # -# This script needs to be called in every job that will restore packages and which the base repo has -# private AzDO feeds in the NuGet.config. -# -# See example YAML call for this script below. Note the use of the variable `$(dn-bot-dnceng-artifact-feeds-rw)` -# from the AzureDevOps-Artifact-Feeds-Pats variable group. -# -# Any disabledPackageSources entries which start with "darc-int" will be re-enabled as part of this script executing +# See example call for this script below. # # - task: PowerShell@2 # displayName: Setup Private Feeds Credentials @@ -21,11 +14,18 @@ # arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token # env: # Token: $(dn-bot-dnceng-artifact-feeds-rw) +# +# Note that the NuGetAuthenticate task should be called after SetupNugetSources. +# This ensures that: +# - Appropriate creds are set for the added internal feeds (if not supplied to the scrupt) +# - The credential provider is installed. +# +# This logic is also abstracted into enable-internal-sources.yml. [CmdletBinding()] param ( [Parameter(Mandatory = $true)][string]$ConfigFile, - [Parameter(Mandatory = $true)][string]$Password + $Password ) $ErrorActionPreference = "Stop" @@ -48,11 +48,17 @@ function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Usern else { Write-Host "Package source $SourceName already present." } + AddCredential -Creds $creds -Source $SourceName -Username $Username -pwd $pwd } # Add a credential node for the specified source function AddCredential($creds, $source, $username, $pwd) { + # If no cred supplied, don't do anything. + if (!$pwd) { + return; + } + # Looks for credential configuration for the given SourceName. Create it if none is found. $sourceElement = $creds.SelectSingleNode($Source) if ($sourceElement -eq $null) @@ -110,11 +116,6 @@ if (!(Test-Path $ConfigFile -PathType Leaf)) { ExitWithExitCode 1 } -if (!$Password) { - Write-PipelineTelemetryError -Category 'Build' -Message 'Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. Please supply a valid PAT' - ExitWithExitCode 1 -} - # Load NuGet.config $doc = New-Object System.Xml.XmlDocument $filename = (Get-Item $ConfigFile).FullName @@ -127,11 +128,14 @@ if ($sources -eq $null) { $doc.DocumentElement.AppendChild($sources) | Out-Null } -# Looks for a node. Create it if none is found. -$creds = $doc.DocumentElement.SelectSingleNode("packageSourceCredentials") -if ($creds -eq $null) { - $creds = $doc.CreateElement("packageSourceCredentials") - $doc.DocumentElement.AppendChild($creds) | Out-Null +$creds = $null +if ($Password) { + # Looks for a node. Create it if none is found. + $creds = $doc.DocumentElement.SelectSingleNode("packageSourceCredentials") + if ($creds -eq $null) { + $creds = $doc.CreateElement("packageSourceCredentials") + $doc.DocumentElement.AppendChild($creds) | Out-Null + } } # Check for disabledPackageSources; we'll enable any darc-int ones we find there @@ -153,7 +157,7 @@ if ($dotnet31Source -ne $null) { AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password } -$dotnetVersions = @('5','6','7','8') +$dotnetVersions = @('5','6','7','8','9') foreach ($dotnetVersion in $dotnetVersions) { $feedPrefix = "dotnet" + $dotnetVersion; @@ -164,4 +168,4 @@ foreach ($dotnetVersion in $dotnetVersions) { } } -$doc.Save($filename) \ No newline at end of file +$doc.Save($filename) diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh index d387c7eac..4604b61b0 100644 --- a/eng/common/SetupNugetSources.sh +++ b/eng/common/SetupNugetSources.sh @@ -1,28 +1,27 @@ #!/usr/bin/env bash -# This file is a temporary workaround for internal builds to be able to restore from private AzDO feeds. -# This file should be removed as part of this issue: https://github.com/dotnet/arcade/issues/4080 +# This script adds internal feeds required to build commits that depend on internal package sources. For instance, +# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. In addition also enables +# disabled internal Maestro (darc-int*) feeds. +# +# Optionally, this script also adds a credential entry for each of the internal feeds if supplied. # -# What the script does is iterate over all package sources in the pointed NuGet.config and add a credential entry -# under for each Maestro's managed private feed. Two additional credential -# entries are also added for the two private static internal feeds: dotnet3-internal and dotnet3-internal-transport. -# -# This script needs to be called in every job that will restore packages and which the base repo has -# private AzDO feeds in the NuGet.config. -# -# See example YAML call for this script below. Note the use of the variable `$(dn-bot-dnceng-artifact-feeds-rw)` -# from the AzureDevOps-Artifact-Feeds-Pats variable group. -# -# Any disabledPackageSources entries which start with "darc-int" will be re-enabled as part of this script executing. +# See example call for this script below. # # - task: Bash@3 -# displayName: Setup Private Feeds Credentials +# displayName: Setup Internal Feeds # inputs: # filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh -# arguments: $(Build.SourcesDirectory)/NuGet.config $Token +# arguments: $(Build.SourcesDirectory)/NuGet.config # condition: ne(variables['Agent.OS'], 'Windows_NT') -# env: -# Token: $(dn-bot-dnceng-artifact-feeds-rw) +# - task: NuGetAuthenticate@1 +# +# Note that the NuGetAuthenticate task should be called after SetupNugetSources. +# This ensures that: +# - Appropriate creds are set for the added internal feeds (if not supplied to the scrupt) +# - The credential provider is installed. +# +# This logic is also abstracted into enable-internal-sources.yml. ConfigFile=$1 CredToken=$2 @@ -48,11 +47,6 @@ if [ ! -f "$ConfigFile" ]; then ExitWithExitCode 1 fi -if [ -z "$CredToken" ]; then - Write-PipelineTelemetryError -category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. Please supply a valid PAT" - ExitWithExitCode 1 -fi - if [[ `uname -s` == "Darwin" ]]; then NL=$'\\\n' TB='' @@ -105,7 +99,7 @@ if [ "$?" == "0" ]; then PackageSources+=('dotnet3.1-internal-transport') fi -DotNetVersions=('5' '6' '7' '8') +DotNetVersions=('5' '6' '7' '8' '9') for DotNetVersion in ${DotNetVersions[@]} ; do FeedPrefix="dotnet${DotNetVersion}"; @@ -140,18 +134,20 @@ PackageSources+="$IFS" PackageSources+=$(grep -oh '"darc-int-[^"]*"' $ConfigFile | tr -d '"') IFS=$PrevIFS -for FeedName in ${PackageSources[@]} ; do - # Check if there is no existing credential for this FeedName - grep -i "<$FeedName>" $ConfigFile - if [ "$?" != "0" ]; then - echo "Adding credentials for $FeedName." +if [ "$CredToken" ]; then + for FeedName in ${PackageSources[@]} ; do + # Check if there is no existing credential for this FeedName + grep -i "<$FeedName>" $ConfigFile + if [ "$?" != "0" ]; then + echo "Adding credentials for $FeedName." - PackageSourceCredentialsNodeFooter="" - NewCredential="${TB}${TB}<$FeedName>${NL}${NL}${NL}" + PackageSourceCredentialsNodeFooter="" + NewCredential="${TB}${TB}<$FeedName>${NL}${NL}${NL}" - sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile - fi -done + sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile + fi + done +fi # Re-enable any entries in disabledPackageSources where the feed name contains darc-int grep -i "" $ConfigFile diff --git a/eng/common/build.ps1 b/eng/common/build.ps1 index 83e6d82e0..438f9920c 100644 --- a/eng/common/build.ps1 +++ b/eng/common/build.ps1 @@ -19,7 +19,6 @@ Param( [switch] $pack, [switch] $publish, [switch] $clean, - [switch] $verticalBuild, [switch][Alias('pb')]$productBuild, [switch][Alias('bl')]$binaryLog, [switch][Alias('nobl')]$excludeCIBinarylog, @@ -60,7 +59,6 @@ function Print-Usage() { Write-Host " -sign Sign build outputs" Write-Host " -publish Publish artifacts (e.g. symbols)" Write-Host " -clean Clean the solution" - Write-Host " -verticalBuild Run in 'vertical build' infra mode." Write-Host " -productBuild Build the solution in the way it will be built in the full .NET product (VMR) build (short: -pb)" Write-Host "" @@ -124,7 +122,7 @@ function Build { /p:Deploy=$deploy ` /p:Test=$test ` /p:Pack=$pack ` - /p:DotNetBuildRepo=$($productBuild -or $verticalBuild) ` + /p:DotNetBuildRepo=$productBuild ` /p:IntegrationTest=$integrationTest ` /p:PerformanceTest=$performanceTest ` /p:Sign=$sign ` diff --git a/eng/common/build.sh b/eng/common/build.sh index d82ebf742..483647daf 100644 --- a/eng/common/build.sh +++ b/eng/common/build.sh @@ -62,7 +62,6 @@ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" restore=false build=false source_build=false -vertical_build=false product_build=false rebuild=false test=false @@ -141,13 +140,6 @@ while [[ $# > 0 ]]; do restore=true pack=true ;; - -verticalbuild|-vb) - build=true - vertical_build=true - product_build=true - restore=true - pack=true - ;; -test|-t) test=true ;; @@ -239,7 +231,6 @@ function Build { /p:Restore=$restore \ /p:Build=$build \ /p:DotNetBuildRepo=$product_build \ - /p:ArcadeBuildFromSource=$source_build \ /p:DotNetBuildSourceOnly=$source_build \ /p:Rebuild=$rebuild \ /p:Test=$test \ diff --git a/eng/common/core-templates/job/job.yml b/eng/common/core-templates/job/job.yml new file mode 100644 index 000000000..295c9a231 --- /dev/null +++ b/eng/common/core-templates/job/job.yml @@ -0,0 +1,239 @@ +parameters: +# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + cancelTimeoutInMinutes: '' + condition: '' + container: '' + continueOnError: false + dependsOn: '' + displayName: '' + pool: '' + steps: [] + strategy: '' + timeoutInMinutes: '' + variables: [] + workspace: '' + templateContext: {} + +# Job base template specific parameters + # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md + # publishing defaults + artifacts: '' + enableMicrobuild: false + enableMicrobuildForMacAndLinux: false + enablePublishBuildArtifacts: false + enablePublishBuildAssets: false + enablePublishTestResults: false + enablePublishUsingPipelines: false + enableBuildRetry: false + mergeTestResults: false + testRunTitle: '' + testResultsFormat: '' + name: '' + componentGovernanceSteps: [] + preSteps: [] + artifactPublishSteps: [] + runAsPublic: false + +# 1es specific parameters + is1ESPipeline: '' + +jobs: +- job: ${{ parameters.name }} + + ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}: + cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }} + + ${{ if ne(parameters.condition, '') }}: + condition: ${{ parameters.condition }} + + ${{ if ne(parameters.container, '') }}: + container: ${{ parameters.container }} + + ${{ if ne(parameters.continueOnError, '') }}: + continueOnError: ${{ parameters.continueOnError }} + + ${{ if ne(parameters.dependsOn, '') }}: + dependsOn: ${{ parameters.dependsOn }} + + ${{ if ne(parameters.displayName, '') }}: + displayName: ${{ parameters.displayName }} + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + + ${{ if ne(parameters.strategy, '') }}: + strategy: ${{ parameters.strategy }} + + ${{ if ne(parameters.timeoutInMinutes, '') }}: + timeoutInMinutes: ${{ parameters.timeoutInMinutes }} + + ${{ if ne(parameters.templateContext, '') }}: + templateContext: ${{ parameters.templateContext }} + + variables: + - ${{ if ne(parameters.enableTelemetry, 'false') }}: + - name: DOTNET_CLI_TELEMETRY_PROFILE + value: '$(Build.Repository.Uri)' + - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}: + - name: EnableRichCodeNavigation + value: 'true' + # Retry signature validation up to three times, waiting 2 seconds between attempts. + # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures + - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY + value: 3,2000 + - ${{ each variable in parameters.variables }}: + # handle name-value variable syntax + # example: + # - name: [key] + # value: [value] + - ${{ if ne(variable.name, '') }}: + - name: ${{ variable.name }} + value: ${{ variable.value }} + + # handle variable groups + - ${{ if ne(variable.group, '') }}: + - group: ${{ variable.group }} + + # handle template variable syntax + # example: + # - template: path/to/template.yml + # parameters: + # [key]: [value] + - ${{ if ne(variable.template, '') }}: + - template: ${{ variable.template }} + ${{ if ne(variable.parameters, '') }}: + parameters: ${{ variable.parameters }} + + # handle key-value variable syntax. + # example: + # - [key]: [value] + - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}: + - ${{ each pair in variable }}: + - name: ${{ pair.key }} + value: ${{ pair.value }} + + # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds + - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - group: DotNet-HelixApi-Access + + ${{ if ne(parameters.workspace, '') }}: + workspace: ${{ parameters.workspace }} + + steps: + - ${{ if eq(parameters.is1ESPipeline, '') }}: + - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error + + - ${{ if ne(parameters.preSteps, '') }}: + - ${{ each preStep in parameters.preSteps }}: + - ${{ preStep }} + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - template: /eng/common/core-templates/steps/install-microbuild.yml + parameters: + enableMicrobuild: ${{ parameters.enableMicrobuild }} + enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }} + continueOnError: ${{ parameters.continueOnError }} + + - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}: + - task: NuGetAuthenticate@1 + + - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}: + - task: DownloadPipelineArtifact@2 + inputs: + buildType: current + artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }} + targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }} + itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }} + + - ${{ each step in parameters.steps }}: + - ${{ step }} + + - ${{ if eq(parameters.enableRichCodeNavigation, true) }}: + - task: RichCodeNavIndexer@0 + displayName: RichCodeNav Upload + inputs: + languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }} + environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }} + richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin + uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }} + continueOnError: true + + - ${{ each step in parameters.componentGovernanceSteps }}: + - ${{ step }} + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - template: /eng/common/core-templates/steps/cleanup-microbuild.yml + parameters: + enableMicrobuild: ${{ parameters.enableMicrobuild }} + enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }} + continueOnError: ${{ parameters.continueOnError }} + + # Publish test results + - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}: + - task: PublishTestResults@2 + displayName: Publish XUnit Test Results + inputs: + testResultsFormat: 'xUnit' + testResultsFiles: '*.xml' + searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' + testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit + mergeTestResults: ${{ parameters.mergeTestResults }} + continueOnError: true + condition: always() + - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}: + - task: PublishTestResults@2 + displayName: Publish TRX Test Results + inputs: + testResultsFormat: 'VSTest' + testResultsFiles: '*.trx' + searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' + testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx + mergeTestResults: ${{ parameters.mergeTestResults }} + continueOnError: true + condition: always() + + # gather artifacts + - ${{ if ne(parameters.artifacts.publish, '') }}: + - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: + - task: CopyFiles@2 + displayName: Gather binaries for publish to artifacts + inputs: + SourceFolder: 'artifacts/bin' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin' + - task: CopyFiles@2 + displayName: Gather packages for publish to artifacts + inputs: + SourceFolder: 'artifacts/packages' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages' + - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: + - task: CopyFiles@2 + displayName: Gather logs for publish to artifacts + inputs: + SourceFolder: 'artifacts/log' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log' + continueOnError: true + condition: always() + + - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: + - task: CopyFiles@2 + displayName: Gather logs for publish to artifacts + inputs: + SourceFolder: 'artifacts/log/$(_BuildConfig)' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)' + continueOnError: true + condition: always() + - ${{ if eq(parameters.enableBuildRetry, 'true') }}: + - task: CopyFiles@2 + displayName: Gather buildconfiguration for build retry + inputs: + SourceFolder: '$(Build.SourcesDirectory)/eng/common/BuildConfiguration' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/eng/common/BuildConfiguration' + continueOnError: true + condition: always() + - ${{ each step in parameters.artifactPublishSteps }}: + - ${{ step }} diff --git a/eng/common/core-templates/job/onelocbuild.yml b/eng/common/core-templates/job/onelocbuild.yml new file mode 100644 index 000000000..00feec8eb --- /dev/null +++ b/eng/common/core-templates/job/onelocbuild.yml @@ -0,0 +1,121 @@ +parameters: + # Optional: dependencies of the job + dependsOn: '' + + # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool + pool: '' + + CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex + GithubPat: $(BotAccount-dotnet-bot-repo-PAT) + + SourcesDirectory: $(Build.SourcesDirectory) + CreatePr: true + AutoCompletePr: false + ReusePr: true + UseLfLineEndings: true + UseCheckedInLocProjectJson: false + SkipLocProjectJsonGeneration: false + LanguageSet: VS_Main_Languages + LclSource: lclFilesInRepo + LclPackageId: '' + RepoType: gitHub + GitHubOrg: dotnet + MirrorRepo: '' + MirrorBranch: main + condition: '' + JobNameSuffix: '' + is1ESPipeline: '' +jobs: +- job: OneLocBuild${{ parameters.JobNameSuffix }} + + dependsOn: ${{ parameters.dependsOn }} + + displayName: OneLocBuild${{ parameters.JobNameSuffix }} + + variables: + - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat + - name: _GenerateLocProjectArguments + value: -SourcesDirectory ${{ parameters.SourcesDirectory }} + -LanguageSet "${{ parameters.LanguageSet }}" + -CreateNeutralXlfs + - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}: + - name: _GenerateLocProjectArguments + value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + ${{ if eq(parameters.pool, '') }}: + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + + steps: + - ${{ if eq(parameters.is1ESPipeline, '') }}: + - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error + + - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}: + - task: Powershell@2 + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1 + arguments: $(_GenerateLocProjectArguments) + displayName: Generate LocProject.json + condition: ${{ parameters.condition }} + + - task: OneLocBuild@2 + displayName: OneLocBuild + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + inputs: + locProj: eng/Localize/LocProject.json + outDir: $(Build.ArtifactStagingDirectory) + lclSource: ${{ parameters.LclSource }} + lclPackageId: ${{ parameters.LclPackageId }} + isCreatePrSelected: ${{ parameters.CreatePr }} + isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }} + ${{ if eq(parameters.CreatePr, true) }}: + isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }} + ${{ if eq(parameters.RepoType, 'gitHub') }}: + isShouldReusePrSelected: ${{ parameters.ReusePr }} + packageSourceAuth: patAuth + patVariable: ${{ parameters.CeapexPat }} + ${{ if eq(parameters.RepoType, 'gitHub') }}: + repoType: ${{ parameters.RepoType }} + gitHubPatVariable: "${{ parameters.GithubPat }}" + ${{ if ne(parameters.MirrorRepo, '') }}: + isMirrorRepoSelected: true + gitHubOrganization: ${{ parameters.GitHubOrg }} + mirrorRepo: ${{ parameters.MirrorRepo }} + mirrorBranch: ${{ parameters.MirrorBranch }} + condition: ${{ parameters.condition }} + + - template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + displayName: Publish Localization Files + pathToPublish: '$(Build.ArtifactStagingDirectory)/loc' + publishLocation: Container + artifactName: Loc + condition: ${{ parameters.condition }} + + - template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + displayName: Publish LocProject.json + pathToPublish: '$(Build.SourcesDirectory)/eng/Localize/' + publishLocation: Container + artifactName: Loc + condition: ${{ parameters.condition }} \ No newline at end of file diff --git a/eng/common/core-templates/job/publish-build-assets.yml b/eng/common/core-templates/job/publish-build-assets.yml new file mode 100644 index 000000000..3d3356e31 --- /dev/null +++ b/eng/common/core-templates/job/publish-build-assets.yml @@ -0,0 +1,158 @@ +parameters: + configuration: 'Debug' + + # Optional: condition for the job to run + condition: '' + + # Optional: 'true' if future jobs should run even if this job fails + continueOnError: false + + # Optional: dependencies of the job + dependsOn: '' + + # Optional: Include PublishBuildArtifacts task + enablePublishBuildArtifacts: false + + # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool + pool: {} + + # Optional: should run as a public build even in the internal project + # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. + runAsPublic: false + + # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing + publishUsingPipelines: false + + # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing + publishAssetsImmediately: false + + artifactsPublishingAdditionalParameters: '' + + signingValidationAdditionalParameters: '' + + is1ESPipeline: '' + +jobs: +- job: Asset_Registry_Publish + + dependsOn: ${{ parameters.dependsOn }} + timeoutInMinutes: 150 + + ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + displayName: Publish Assets + ${{ else }}: + displayName: Publish to Build Asset Registry + + variables: + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - group: Publish-Build-Assets + - group: AzureDevOps-Artifact-Feeds-Pats + - name: runCodesignValidationInjection + value: false + # unconditional - needed for logs publishing (redactor tool version) + - template: /eng/common/core-templates/post-build/common-variables.yml + + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: + name: NetCore1ESPool-Publishing-Internal + image: windows.vs2019.amd64 + os: windows + steps: + - ${{ if eq(parameters.is1ESPipeline, '') }}: + - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - checkout: self + fetchDepth: 3 + clean: true + + - task: DownloadBuildArtifacts@0 + displayName: Download artifact + inputs: + artifactName: AssetManifests + downloadPath: '$(Build.StagingDirectory)/Download' + checkDownloadedFiles: true + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + + - task: NuGetAuthenticate@1 + + - task: AzureCLI@2 + displayName: Publish Build Assets + inputs: + azureSubscription: "Darc: Maestro Production" + scriptType: ps + scriptLocation: scriptPath + scriptPath: $(Build.SourcesDirectory)/eng/common/sdk-task.ps1 + arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet + /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests' + /p:MaestroApiEndpoint=https://maestro.dot.net + /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }} + /p:OfficialBuildId=$(Build.BuildNumber) + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + + - task: powershell@2 + displayName: Create ReleaseConfigs Artifact + inputs: + targetType: inline + script: | + New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force + $filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt" + Add-Content -Path $filePath -Value $(BARBuildId) + Add-Content -Path $filePath -Value "$(DefaultChannels)" + Add-Content -Path $filePath -Value $(IsStableBuild) + + $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt" + if (Test-Path -Path $symbolExclusionfile) + { + Write-Host "SymbolExclusionFile exists" + Copy-Item -Path $symbolExclusionfile -Destination "$(Build.StagingDirectory)/ReleaseConfigs" + } + + - template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + displayName: Publish ReleaseConfigs Artifact + pathToPublish: '$(Build.StagingDirectory)/ReleaseConfigs' + publishLocation: Container + artifactName: ReleaseConfigs + + - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: AzureCLI@2 + displayName: Publish Using Darc + inputs: + azureSubscription: "Darc: Maestro Production" + scriptType: ps + scriptLocation: scriptPath + scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 + arguments: > + -BuildId $(BARBuildId) + -PublishingInfraVersion 3 + -AzdoToken '$(System.AccessToken)' + -WaitPublishingFinish true + -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' + -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' + + - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: + - template: /eng/common/core-templates/steps/publish-logs.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + JobLabel: 'Publish_Artifacts_Logs' diff --git a/eng/common/core-templates/job/source-build.yml b/eng/common/core-templates/job/source-build.yml new file mode 100644 index 000000000..05f7ad6ef --- /dev/null +++ b/eng/common/core-templates/job/source-build.yml @@ -0,0 +1,94 @@ +parameters: + # This template adds arcade-powered source-build to CI. The template produces a server job with a + # default ID 'Source_Build_Complete' to put in a dependency list if necessary. + + # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed. + jobNamePrefix: 'Source_Build' + + # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for + # managed-only repositories. This is an object with these properties: + # + # name: '' + # The name of the job. This is included in the job ID. + # targetRID: '' + # The name of the target RID to use, instead of the one auto-detected by Arcade. + # portableBuild: false + # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than + # linux-x64), and compiling against distro-provided packages rather than portable ones. The + # default is portable mode. + # skipPublishValidation: false + # Disables publishing validation. By default, a check is performed to ensure no packages are + # published by source-build. + # container: '' + # A container to use. Runs in docker. + # pool: {} + # A pool to use. Runs directly on an agent. + # buildScript: '' + # Specifies the build script to invoke to perform the build in the repo. The default + # './build.sh' should work for typical Arcade repositories, but this is customizable for + # difficult situations. + # jobProperties: {} + # A list of job properties to inject at the top level, for potential extensibility beyond + # container and pool. + platform: {} + + is1ESPipeline: '' + + # If set to true and running on a non-public project, + # Internal nuget and blob storage locations will be enabled. + # This is not enabled by default because many repositories do not need internal sources + # and do not need to have the required service connections approved in the pipeline. + enableInternalSources: false + +jobs: +- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }} + displayName: Source-Build (${{ parameters.platform.name }}) + + ${{ each property in parameters.platform.jobProperties }}: + ${{ property.key }}: ${{ property.value }} + + ${{ if ne(parameters.platform.container, '') }}: + container: ${{ parameters.platform.container }} + + ${{ if eq(parameters.platform.pool, '') }}: + # The default VM host AzDO pool. This should be capable of running Docker containers: almost all + # source-build builds run in Docker, including the default managed platform. + # /eng/common/core-templates/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic + ${{ if eq(parameters.is1ESPipeline, 'true') }}: + pool: + ${{ if eq(variables['System.TeamProject'], 'public') }}: + name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')] + demands: ImageOverride -equals build.ubuntu.2004.amd64 + ${{ if eq(variables['System.TeamProject'], 'internal') }}: + name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')] + image: 1es-mariner-2 + os: linux + ${{ else }}: + pool: + ${{ if eq(variables['System.TeamProject'], 'public') }}: + name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')] + demands: ImageOverride -equals Build.Ubuntu.2204.Amd64.Open + ${{ if eq(variables['System.TeamProject'], 'internal') }}: + name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')] + demands: ImageOverride -equals Build.Ubuntu.2204.Amd64 + ${{ if ne(parameters.platform.pool, '') }}: + pool: ${{ parameters.platform.pool }} + + workspace: + clean: all + + steps: + - ${{ if eq(parameters.is1ESPipeline, '') }}: + - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error + + - ${{ if eq(parameters.enableInternalSources, true) }}: + - template: /eng/common/core-templates/steps/enable-internal-sources.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + - template: /eng/common/core-templates/steps/enable-internal-runtimes.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + - template: /eng/common/core-templates/steps/source-build.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + platform: ${{ parameters.platform }} diff --git a/eng/common/core-templates/job/source-index-stage1.yml b/eng/common/core-templates/job/source-index-stage1.yml new file mode 100644 index 000000000..30530359a --- /dev/null +++ b/eng/common/core-templates/job/source-index-stage1.yml @@ -0,0 +1,44 @@ +parameters: + runAsPublic: false + sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci" + preSteps: [] + binlogPath: artifacts/log/Debug/Build.binlog + condition: '' + dependsOn: '' + pool: '' + is1ESPipeline: '' + +jobs: +- job: SourceIndexStage1 + dependsOn: ${{ parameters.dependsOn }} + condition: ${{ parameters.condition }} + variables: + - name: BinlogPath + value: ${{ parameters.binlogPath }} + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + ${{ if eq(parameters.pool, '') }}: + pool: + ${{ if eq(variables['System.TeamProject'], 'public') }}: + name: $(DncEngPublicBuildPool) + image: windows.vs2022.amd64.open + ${{ if eq(variables['System.TeamProject'], 'internal') }}: + name: $(DncEngInternalBuildPool) + image: windows.vs2022.amd64 + + steps: + - ${{ if eq(parameters.is1ESPipeline, '') }}: + - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error + + - ${{ each preStep in parameters.preSteps }}: + - ${{ preStep }} + - script: ${{ parameters.sourceIndexBuildCommand }} + displayName: Build Repository + + - template: /eng/common/core-templates/steps/source-index-stage1-publish.yml + parameters: + binLogPath: ${{ parameters.binLogPath }} \ No newline at end of file diff --git a/eng/common/core-templates/jobs/codeql-build.yml b/eng/common/core-templates/jobs/codeql-build.yml new file mode 100644 index 000000000..f2144252c --- /dev/null +++ b/eng/common/core-templates/jobs/codeql-build.yml @@ -0,0 +1,33 @@ +parameters: + # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md + continueOnError: false + # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + jobs: [] + # Optional: if specified, restore and use this version of Guardian instead of the default. + overrideGuardianVersion: '' + is1ESPipeline: '' + +jobs: +- template: /eng/common/core-templates/jobs/jobs.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + enableMicrobuild: false + enablePublishBuildArtifacts: false + enablePublishTestResults: false + enablePublishBuildAssets: false + enablePublishUsingPipelines: false + enableTelemetry: true + + variables: + - group: Publish-Build-Assets + # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in + # sync with the packages.config file. + - name: DefaultGuardianVersion + value: 0.109.0 + - name: GuardianPackagesConfigFile + value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config + - name: GuardianVersion + value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} + + jobs: ${{ parameters.jobs }} + diff --git a/eng/common/core-templates/jobs/jobs.yml b/eng/common/core-templates/jobs/jobs.yml new file mode 100644 index 000000000..ea69be434 --- /dev/null +++ b/eng/common/core-templates/jobs/jobs.yml @@ -0,0 +1,119 @@ +parameters: + # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md + continueOnError: false + + # Optional: Include PublishBuildArtifacts task + enablePublishBuildArtifacts: false + + # Optional: Enable publishing using release pipelines + enablePublishUsingPipelines: false + + # Optional: Enable running the source-build jobs to build repo from source + enableSourceBuild: false + + # Optional: Parameters for source-build template. + # See /eng/common/core-templates/jobs/source-build.yml for options + sourceBuildParameters: [] + + graphFileGeneration: + # Optional: Enable generating the graph files at the end of the build + enabled: false + # Optional: Include toolset dependencies in the generated graph files + includeToolset: false + + # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + jobs: [] + + # Optional: Override automatically derived dependsOn value for "publish build assets" job + publishBuildAssetsDependsOn: '' + + # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage. + publishAssetsImmediately: false + + # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml) + artifactsPublishingAdditionalParameters: '' + signingValidationAdditionalParameters: '' + + # Optional: should run as a public build even in the internal project + # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. + runAsPublic: false + + enableSourceIndex: false + sourceIndexParams: {} + + artifacts: {} + is1ESPipeline: '' + +# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, +# and some (Microbuild) should only be applied to non-PR cases for internal builds. + +jobs: +- ${{ each job in parameters.jobs }}: + - ${{ if eq(parameters.is1ESPipeline, 'true') }}: + - template: /eng/common/templates-official/job/job.yml + parameters: + # pass along parameters + ${{ each parameter in parameters }}: + ${{ if ne(parameter.key, 'jobs') }}: + ${{ parameter.key }}: ${{ parameter.value }} + + # pass along job properties + ${{ each property in job }}: + ${{ if ne(property.key, 'job') }}: + ${{ property.key }}: ${{ property.value }} + + name: ${{ job.job }} + + - ${{ else }}: + - template: /eng/common/templates/job/job.yml + parameters: + # pass along parameters + ${{ each parameter in parameters }}: + ${{ if ne(parameter.key, 'jobs') }}: + ${{ parameter.key }}: ${{ parameter.value }} + + # pass along job properties + ${{ each property in job }}: + ${{ if ne(property.key, 'job') }}: + ${{ property.key }}: ${{ property.value }} + + name: ${{ job.job }} + +- ${{ if eq(parameters.enableSourceBuild, true) }}: + - template: /eng/common/core-templates/jobs/source-build.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + allCompletedJobId: Source_Build_Complete + ${{ each parameter in parameters.sourceBuildParameters }}: + ${{ parameter.key }}: ${{ parameter.value }} + +- ${{ if eq(parameters.enableSourceIndex, 'true') }}: + - template: ../job/source-index-stage1.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + runAsPublic: ${{ parameters.runAsPublic }} + ${{ each parameter in parameters.sourceIndexParams }}: + ${{ parameter.key }}: ${{ parameter.value }} + +- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}: + - template: ../job/publish-build-assets.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + continueOnError: ${{ parameters.continueOnError }} + dependsOn: + - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}: + - ${{ each job in parameters.publishBuildAssetsDependsOn }}: + - ${{ job.job }} + - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}: + - ${{ each job in parameters.jobs }}: + - ${{ job.job }} + - ${{ if eq(parameters.enableSourceBuild, true) }}: + - Source_Build_Complete + + runAsPublic: ${{ parameters.runAsPublic }} + publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }} + publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }} + enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} + signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }} diff --git a/eng/common/core-templates/jobs/source-build.yml b/eng/common/core-templates/jobs/source-build.yml new file mode 100644 index 000000000..a10ccfbee --- /dev/null +++ b/eng/common/core-templates/jobs/source-build.yml @@ -0,0 +1,58 @@ +parameters: + # This template adds arcade-powered source-build to CI. A job is created for each platform, as + # well as an optional server job that completes when all platform jobs complete. + + # The name of the "join" job for all source-build platforms. If set to empty string, the job is + # not included. Existing repo pipelines can use this job depend on all source-build jobs + # completing without maintaining a separate list of every single job ID: just depend on this one + # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'. + allCompletedJobId: '' + + # See /eng/common/core-templates/job/source-build.yml + jobNamePrefix: 'Source_Build' + + # This is the default platform provided by Arcade, intended for use by a managed-only repo. + defaultManagedPlatform: + name: 'Managed' + container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream9' + + # Defines the platforms on which to run build jobs. One job is created for each platform, and the + # object in this array is sent to the job template as 'platform'. If no platforms are specified, + # one job runs on 'defaultManagedPlatform'. + platforms: [] + + is1ESPipeline: '' + + # If set to true and running on a non-public project, + # Internal nuget and blob storage locations will be enabled. + # This is not enabled by default because many repositories do not need internal sources + # and do not need to have the required service connections approved in the pipeline. + enableInternalSources: false + +jobs: + +- ${{ if ne(parameters.allCompletedJobId, '') }}: + - job: ${{ parameters.allCompletedJobId }} + displayName: Source-Build Complete + pool: server + dependsOn: + - ${{ each platform in parameters.platforms }}: + - ${{ parameters.jobNamePrefix }}_${{ platform.name }} + - ${{ if eq(length(parameters.platforms), 0) }}: + - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }} + +- ${{ each platform in parameters.platforms }}: + - template: /eng/common/core-templates/job/source-build.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + jobNamePrefix: ${{ parameters.jobNamePrefix }} + platform: ${{ platform }} + enableInternalSources: ${{ parameters.enableInternalSources }} + +- ${{ if eq(length(parameters.platforms), 0) }}: + - template: /eng/common/core-templates/job/source-build.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + jobNamePrefix: ${{ parameters.jobNamePrefix }} + platform: ${{ parameters.defaultManagedPlatform }} + enableInternalSources: ${{ parameters.enableInternalSources }} diff --git a/eng/common/core-templates/post-build/common-variables.yml b/eng/common/core-templates/post-build/common-variables.yml new file mode 100644 index 000000000..d5627a994 --- /dev/null +++ b/eng/common/core-templates/post-build/common-variables.yml @@ -0,0 +1,22 @@ +variables: + - group: Publish-Build-Assets + + # Whether the build is internal or not + - name: IsInternalBuild + value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }} + + # Default Maestro++ API Endpoint and API Version + - name: MaestroApiEndPoint + value: "https://maestro.dot.net" + - name: MaestroApiVersion + value: "2020-02-20" + + - name: SourceLinkCLIVersion + value: 3.0.0 + - name: SymbolToolVersion + value: 1.0.1 + - name: BinlogToolVersion + value: 1.0.11 + + - name: runCodesignValidationInjection + value: false diff --git a/eng/common/core-templates/post-build/post-build.yml b/eng/common/core-templates/post-build/post-build.yml new file mode 100644 index 000000000..454fd75c7 --- /dev/null +++ b/eng/common/core-templates/post-build/post-build.yml @@ -0,0 +1,316 @@ +parameters: + # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST. + # Publishing V1 is no longer supported + # Publishing V2 is no longer supported + # Publishing V3 is the default + - name: publishingInfraVersion + displayName: Which version of publishing should be used to promote the build definition? + type: number + default: 3 + values: + - 3 + + - name: BARBuildId + displayName: BAR Build Id + type: number + default: 0 + + - name: PromoteToChannelIds + displayName: Channel to promote BARBuildId to + type: string + default: '' + + - name: enableSourceLinkValidation + displayName: Enable SourceLink validation + type: boolean + default: false + + - name: enableSigningValidation + displayName: Enable signing validation + type: boolean + default: true + + - name: enableSymbolValidation + displayName: Enable symbol validation + type: boolean + default: false + + - name: enableNugetValidation + displayName: Enable NuGet validation + type: boolean + default: true + + - name: publishInstallersAndChecksums + displayName: Publish installers and checksums + type: boolean + default: true + + - name: SDLValidationParameters + type: object + default: + enable: false + publishGdn: false + continueOnError: false + params: '' + artifactNames: '' + downloadArtifacts: true + + # These parameters let the user customize the call to sdk-task.ps1 for publishing + # symbols & general artifacts as well as for signing validation + - name: symbolPublishingAdditionalParameters + displayName: Symbol publishing additional parameters + type: string + default: '' + + - name: artifactsPublishingAdditionalParameters + displayName: Artifact publishing additional parameters + type: string + default: '' + + - name: signingValidationAdditionalParameters + displayName: Signing validation additional parameters + type: string + default: '' + + # Which stages should finish execution before post-build stages start + - name: validateDependsOn + type: object + default: + - build + + - name: publishDependsOn + type: object + default: + - Validate + + # Optional: Call asset publishing rather than running in a separate stage + - name: publishAssetsImmediately + type: boolean + default: false + + - name: is1ESPipeline + type: boolean + default: false + +stages: +- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: + - stage: Validate + dependsOn: ${{ parameters.validateDependsOn }} + displayName: Validate Build Assets + variables: + - template: /eng/common/core-templates/post-build/common-variables.yml + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + jobs: + - job: + displayName: NuGet Validation + condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true')) + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + ${{ if eq(parameters.is1ESPipeline, true) }}: + name: $(DncEngInternalBuildPool) + image: windows.vs2022.amd64 + os: windows + ${{ else }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2022.amd64 + + steps: + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: PackageArtifacts + checkDownloadedFiles: true + + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1 + arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ + + - job: + displayName: Signing Validation + condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true')) + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + ${{ if eq(parameters.is1ESPipeline, true) }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + ${{ else }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2022.amd64 + steps: + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: PackageArtifacts + checkDownloadedFiles: true + itemPattern: | + ** + !**/Microsoft.SourceBuild.Intermediate.*.nupkg + + # This is necessary whenever we want to publish/restore to an AzDO private feed + # Since sdk-task.ps1 tries to restore packages we need to do this authentication here + # otherwise it'll complain about accessing a private feed. + - task: NuGetAuthenticate@1 + displayName: 'Authenticate to AzDO Feeds' + + # Signing validation will optionally work with the buildmanifest file which is downloaded from + # Azure DevOps above. + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: eng\common\sdk-task.ps1 + arguments: -task SigningValidation -restore -msbuildEngine vs + /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' + /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt' + ${{ parameters.signingValidationAdditionalParameters }} + + - template: /eng/common/core-templates/steps/publish-logs.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + StageLabel: 'Validation' + JobLabel: 'Signing' + BinlogToolVersion: $(BinlogToolVersion) + + - job: + displayName: SourceLink Validation + condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true') + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + ${{ if eq(parameters.is1ESPipeline, true) }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + ${{ else }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2022.amd64 + steps: + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Blob Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: BlobArtifacts + checkDownloadedFiles: true + + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1 + arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ + -ExtractPath $(Agent.BuildDirectory)/Extract/ + -GHRepoName $(Build.Repository.Name) + -GHCommit $(Build.SourceVersion) + -SourcelinkCliVersion $(SourceLinkCLIVersion) + continueOnError: true + +- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}: + - stage: publish_using_darc + ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: + dependsOn: ${{ parameters.publishDependsOn }} + ${{ else }}: + dependsOn: ${{ parameters.validateDependsOn }} + displayName: Publish using Darc + variables: + - template: /eng/common/core-templates/post-build/common-variables.yml + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + jobs: + - job: + displayName: Publish Using Darc + timeoutInMinutes: 120 + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + ${{ if eq(parameters.is1ESPipeline, true) }}: + name: NetCore1ESPool-Publishing-Internal + image: windows.vs2019.amd64 + os: windows + ${{ else }}: + name: NetCore1ESPool-Publishing-Internal + demands: ImageOverride -equals windows.vs2019.amd64 + steps: + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: NuGetAuthenticate@1 + + - task: AzureCLI@2 + displayName: Publish Using Darc + inputs: + azureSubscription: "Darc: Maestro Production" + scriptType: ps + scriptLocation: scriptPath + scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 + arguments: > + -BuildId $(BARBuildId) + -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} + -AzdoToken '$(System.AccessToken)' + -WaitPublishingFinish true + -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' + -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' diff --git a/eng/common/core-templates/post-build/setup-maestro-vars.yml b/eng/common/core-templates/post-build/setup-maestro-vars.yml new file mode 100644 index 000000000..f7602980d --- /dev/null +++ b/eng/common/core-templates/post-build/setup-maestro-vars.yml @@ -0,0 +1,74 @@ +parameters: + BARBuildId: '' + PromoteToChannelIds: '' + is1ESPipeline: '' + +steps: + - ${{ if eq(parameters.is1ESPipeline, '') }}: + - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error + + - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}: + - task: DownloadBuildArtifacts@0 + displayName: Download Release Configs + inputs: + buildType: current + artifactName: ReleaseConfigs + checkDownloadedFiles: true + + - task: AzureCLI@2 + name: setReleaseVars + displayName: Set Release Configs Vars + inputs: + azureSubscription: "Darc: Maestro Production" + scriptType: pscore + scriptLocation: inlineScript + inlineScript: | + try { + if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') { + $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt + + $BarId = $Content | Select -Index 0 + $Channels = $Content | Select -Index 1 + $IsStableBuild = $Content | Select -Index 2 + + $AzureDevOpsProject = $Env:System_TeamProject + $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId + $AzureDevOpsBuildId = $Env:Build_BuildId + } + else { + . $(Build.SourcesDirectory)\eng\common\tools.ps1 + $darc = Get-Darc + $buildInfo = & $darc get-build ` + --id ${{ parameters.BARBuildId }} ` + --extended ` + --output-format json ` + --ci ` + | convertFrom-Json + + $BarId = ${{ parameters.BARBuildId }} + $Channels = $Env:PromoteToMaestroChannels -split "," + $Channels = $Channels -join "][" + $Channels = "[$Channels]" + + $IsStableBuild = $buildInfo.stable + $AzureDevOpsProject = $buildInfo.azureDevOpsProject + $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId + $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId + } + + Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId" + Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels" + Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild" + + Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject" + Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId" + Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId" + } + catch { + Write-Host $_ + Write-Host $_.Exception + Write-Host $_.ScriptStackTrace + exit 1 + } + env: + PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }} diff --git a/eng/common/core-templates/steps/cleanup-microbuild.yml b/eng/common/core-templates/steps/cleanup-microbuild.yml new file mode 100644 index 000000000..c0fdcd337 --- /dev/null +++ b/eng/common/core-templates/steps/cleanup-microbuild.yml @@ -0,0 +1,28 @@ +parameters: + # Enable cleanup tasks for MicroBuild + enableMicrobuild: false + # Enable cleanup tasks for MicroBuild on Mac and Linux + # Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT' + enableMicrobuildForMacAndLinux: false + continueOnError: false + +steps: + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + - task: MicroBuildCleanup@1 + displayName: Execute Microbuild cleanup tasks + condition: and( + always(), + or( + and( + eq(variables['Agent.Os'], 'Windows_NT'), + in(variables['_SignType'], 'real', 'test') + ), + and( + ${{ eq(parameters.enableMicrobuildForMacAndLinux, true) }}, + ne(variables['Agent.Os'], 'Windows_NT'), + eq(variables['_SignType'], 'real') + ) + )) + continueOnError: ${{ parameters.continueOnError }} + env: + TeamName: $(_TeamName) diff --git a/eng/common/core-templates/steps/component-governance.yml b/eng/common/core-templates/steps/component-governance.yml new file mode 100644 index 000000000..cf0649aa9 --- /dev/null +++ b/eng/common/core-templates/steps/component-governance.yml @@ -0,0 +1,16 @@ +parameters: + disableComponentGovernance: false + componentGovernanceIgnoreDirectories: '' + is1ESPipeline: false + displayName: 'Component Detection' + +steps: +- ${{ if eq(parameters.disableComponentGovernance, 'true') }}: + - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true" + displayName: Set skipComponentGovernanceDetection variable +- ${{ if ne(parameters.disableComponentGovernance, 'true') }}: + - task: ComponentGovernanceComponentDetection@0 + continueOnError: true + displayName: ${{ parameters.displayName }} + inputs: + ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} diff --git a/eng/common/core-templates/steps/enable-internal-runtimes.yml b/eng/common/core-templates/steps/enable-internal-runtimes.yml new file mode 100644 index 000000000..6bdbf62ac --- /dev/null +++ b/eng/common/core-templates/steps/enable-internal-runtimes.yml @@ -0,0 +1,32 @@ +# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64' +# variable with the base64-encoded SAS token, by default + +parameters: +- name: federatedServiceConnection + type: string + default: 'dotnetbuilds-internal-read' +- name: outputVariableName + type: string + default: 'dotnetbuilds-internal-container-read-token-base64' +- name: expiryInHours + type: number + default: 1 +- name: base64Encode + type: boolean + default: true +- name: is1ESPipeline + type: boolean + default: false + +steps: +- ${{ if ne(variables['System.TeamProject'], 'public') }}: + - template: /eng/common/core-templates/steps/get-delegation-sas.yml + parameters: + federatedServiceConnection: ${{ parameters.federatedServiceConnection }} + outputVariableName: ${{ parameters.outputVariableName }} + expiryInHours: ${{ parameters.expiryInHours }} + base64Encode: ${{ parameters.base64Encode }} + storageAccount: dotnetbuilds + container: internal + permissions: rl + is1ESPipeline: ${{ parameters.is1ESPipeline }} \ No newline at end of file diff --git a/eng/common/core-templates/steps/enable-internal-sources.yml b/eng/common/core-templates/steps/enable-internal-sources.yml new file mode 100644 index 000000000..64f881bff --- /dev/null +++ b/eng/common/core-templates/steps/enable-internal-sources.yml @@ -0,0 +1,47 @@ +parameters: +# This is the Azure federated service connection that we log into to get an access token. +- name: nugetFederatedServiceConnection + type: string + default: 'dnceng-artifacts-feeds-read' +- name: is1ESPipeline + type: boolean + default: false +# Legacy parameters to allow for PAT usage +- name: legacyCredential + type: string + default: '' + +steps: +- ${{ if ne(variables['System.TeamProject'], 'public') }}: + - ${{ if ne(parameters.legacyCredential, '') }}: + - task: PowerShell@2 + displayName: Setup Internal Feeds + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 + arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token + env: + Token: ${{ parameters.legacyCredential }} + # If running on dnceng (internal project), just use the default behavior for NuGetAuthenticate. + # If running on DevDiv, NuGetAuthenticate is not really an option. It's scoped to a single feed, and we have many feeds that + # may be added. Instead, we'll use the traditional approach (add cred to nuget.config), but use an account token. + - ${{ else }}: + - ${{ if eq(variables['System.TeamProject'], 'internal') }}: + - task: PowerShell@2 + displayName: Setup Internal Feeds + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 + arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config + - ${{ else }}: + - template: /eng/common/templates/steps/get-federated-access-token.yml + parameters: + federatedServiceConnection: ${{ parameters.nugetFederatedServiceConnection }} + outputVariableName: 'dnceng-artifacts-feeds-read-access-token' + - task: PowerShell@2 + displayName: Setup Internal Feeds + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 + arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $(dnceng-artifacts-feeds-read-access-token) + # This is required in certain scenarios to install the ADO credential provider. + # It installed by default in some msbuild invocations (e.g. VS msbuild), but needs to be installed for others + # (e.g. dotnet msbuild). + - task: NuGetAuthenticate@1 diff --git a/eng/common/core-templates/steps/generate-sbom.yml b/eng/common/core-templates/steps/generate-sbom.yml new file mode 100644 index 000000000..d938b60e1 --- /dev/null +++ b/eng/common/core-templates/steps/generate-sbom.yml @@ -0,0 +1,54 @@ +# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated. +# PackageName - The name of the package this SBOM represents. +# PackageVersion - The version of the package this SBOM represents. +# ManifestDirPath - The path of the directory where the generated manifest files will be placed +# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector. + +parameters: + PackageVersion: 9.0.0 + BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + PackageName: '.NET' + ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom + IgnoreDirectories: '' + sbomContinueOnError: true + is1ESPipeline: false + # disable publishArtifacts if some other step is publishing the artifacts (like job.yml). + publishArtifacts: true + +steps: +- task: PowerShell@2 + displayName: Prep for SBOM generation in (Non-linux) + condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin')) + inputs: + filePath: ./eng/common/generate-sbom-prep.ps1 + arguments: ${{parameters.manifestDirPath}} + +# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461 +- script: | + chmod +x ./eng/common/generate-sbom-prep.sh + ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}} + displayName: Prep for SBOM generation in (Linux) + condition: eq(variables['Agent.Os'], 'Linux') + continueOnError: ${{ parameters.sbomContinueOnError }} + +- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0 + displayName: 'Generate SBOM manifest' + continueOnError: ${{ parameters.sbomContinueOnError }} + inputs: + PackageName: ${{ parameters.packageName }} + BuildDropPath: ${{ parameters.buildDropPath }} + PackageVersion: ${{ parameters.packageVersion }} + ManifestDirPath: ${{ parameters.manifestDirPath }} + ${{ if ne(parameters.IgnoreDirectories, '') }}: + AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}' + +- ${{ if eq(parameters.publishArtifacts, 'true')}}: + - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + displayName: Publish SBOM manifest + continueOnError: ${{parameters.sbomContinueOnError}} + targetPath: '${{ parameters.manifestDirPath }}' + artifactName: $(ARTIFACT_NAME) + diff --git a/eng/common/core-templates/steps/get-delegation-sas.yml b/eng/common/core-templates/steps/get-delegation-sas.yml new file mode 100644 index 000000000..d2901470a --- /dev/null +++ b/eng/common/core-templates/steps/get-delegation-sas.yml @@ -0,0 +1,46 @@ +parameters: +- name: federatedServiceConnection + type: string +- name: outputVariableName + type: string +- name: expiryInHours + type: number + default: 1 +- name: base64Encode + type: boolean + default: false +- name: storageAccount + type: string +- name: container + type: string +- name: permissions + type: string + default: 'rl' +- name: is1ESPipeline + type: boolean + default: false + +steps: +- task: AzureCLI@2 + displayName: 'Generate delegation SAS Token for ${{ parameters.storageAccount }}/${{ parameters.container }}' + inputs: + azureSubscription: ${{ parameters.federatedServiceConnection }} + scriptType: 'pscore' + scriptLocation: 'inlineScript' + inlineScript: | + # Calculate the expiration of the SAS token and convert to UTC + $expiry = (Get-Date).AddHours(${{ parameters.expiryInHours }}).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ") + + $sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv + + if ($LASTEXITCODE -ne 0) { + Write-Error "Failed to generate SAS token." + exit 1 + } + + if ('${{ parameters.base64Encode }}' -eq 'true') { + $sas = [Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($sas)) + } + + Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value" + Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true]$sas" diff --git a/eng/common/core-templates/steps/get-federated-access-token.yml b/eng/common/core-templates/steps/get-federated-access-token.yml new file mode 100644 index 000000000..3a4d4410c --- /dev/null +++ b/eng/common/core-templates/steps/get-federated-access-token.yml @@ -0,0 +1,42 @@ +parameters: +- name: federatedServiceConnection + type: string +- name: outputVariableName + type: string +- name: is1ESPipeline + type: boolean +- name: stepName + type: string + default: 'getFederatedAccessToken' +- name: condition + type: string + default: '' +# Resource to get a token for. Common values include: +# - '499b84ac-1321-427f-aa17-267ca6975798' for Azure DevOps +# - 'https://storage.azure.com/' for storage +# Defaults to Azure DevOps +- name: resource + type: string + default: '499b84ac-1321-427f-aa17-267ca6975798' +- name: isStepOutputVariable + type: boolean + default: false + +steps: +- task: AzureCLI@2 + displayName: 'Getting federated access token for feeds' + name: ${{ parameters.stepName }} + ${{ if ne(parameters.condition, '') }}: + condition: ${{ parameters.condition }} + inputs: + azureSubscription: ${{ parameters.federatedServiceConnection }} + scriptType: 'pscore' + scriptLocation: 'inlineScript' + inlineScript: | + $accessToken = az account get-access-token --query accessToken --resource ${{ parameters.resource }} --output tsv + if ($LASTEXITCODE -ne 0) { + Write-Error "Failed to get access token for resource '${{ parameters.resource }}'" + exit 1 + } + Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value" + Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true;isOutput=${{ parameters.isStepOutputVariable }}]$accessToken" \ No newline at end of file diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml new file mode 100644 index 000000000..9abe726e5 --- /dev/null +++ b/eng/common/core-templates/steps/install-microbuild.yml @@ -0,0 +1,43 @@ +parameters: + # Enable cleanup tasks for MicroBuild + enableMicrobuild: false + # Enable cleanup tasks for MicroBuild on Mac and Linux + # Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT' + enableMicrobuildForMacAndLinux: false + continueOnError: false + +steps: + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + # Remove Python downgrade with https://github.com/dotnet/arcade/issues/15151 + - ${{ if and(eq(parameters.enableMicrobuildForMacAndLinux, 'true'), ne(variables['Agent.Os'], 'Windows_NT')) }}: + - task: UsePythonVersion@0 + displayName: 'Use Python 3.11.x' + inputs: + versionSpec: '3.11.x' + + - task: MicroBuildSigningPlugin@4 + displayName: Install MicroBuild plugin + inputs: + signType: $(_SignType) + zipSources: false + feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json + ${{ if and(eq(parameters.enableMicrobuildForMacAndLinux, 'true'), ne(variables['Agent.Os'], 'Windows_NT')) }}: + azureSubscription: 'MicroBuild Signing Task (DevDiv)' + env: + TeamName: $(_TeamName) + MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)' + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + continueOnError: ${{ parameters.continueOnError }} + condition: and( + succeeded(), + or( + and( + eq(variables['Agent.Os'], 'Windows_NT'), + in(variables['_SignType'], 'real', 'test') + ), + and( + ${{ eq(parameters.enableMicrobuildForMacAndLinux, true) }}, + ne(variables['Agent.Os'], 'Windows_NT'), + eq(variables['_SignType'], 'real') + ) + )) diff --git a/eng/common/core-templates/steps/publish-build-artifacts.yml b/eng/common/core-templates/steps/publish-build-artifacts.yml new file mode 100644 index 000000000..f24ce3466 --- /dev/null +++ b/eng/common/core-templates/steps/publish-build-artifacts.yml @@ -0,0 +1,20 @@ +parameters: +- name: is1ESPipeline + type: boolean + default: false +- name: args + type: object + default: {} +steps: +- ${{ if ne(parameters.is1ESPipeline, true) }}: + - template: /eng/common/templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + ${{ each parameter in parameters.args }}: + ${{ parameter.key }}: ${{ parameter.value }} +- ${{ else }}: + - template: /eng/common/templates-official/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + ${{ each parameter in parameters.args }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/core-templates/steps/publish-logs.yml b/eng/common/core-templates/steps/publish-logs.yml new file mode 100644 index 000000000..de24d0087 --- /dev/null +++ b/eng/common/core-templates/steps/publish-logs.yml @@ -0,0 +1,61 @@ +parameters: + StageLabel: '' + JobLabel: '' + CustomSensitiveDataList: '' + # A default - in case value from eng/common/core-templates/post-build/common-variables.yml is not passed + BinlogToolVersion: '1.0.11' + is1ESPipeline: false + +steps: +- task: Powershell@2 + displayName: Prepare Binlogs to Upload + inputs: + targetType: inline + script: | + New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ + Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ + continueOnError: true + condition: always() + +- task: PowerShell@2 + displayName: Redact Logs + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1 + # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml + # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt' + # If the file exists - sensitive data for redaction will be sourced from it + # (single entry per line, lines starting with '# ' are considered comments and skipped) + arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs' + -BinlogToolVersion ${{parameters.BinlogToolVersion}} + -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt' + '$(publishing-dnceng-devdiv-code-r-build-re)' + '$(MaestroAccessToken)' + '$(dn-bot-all-orgs-artifact-feeds-rw)' + '$(akams-client-id)' + '$(microsoft-symbol-server-pat)' + '$(symweb-symbol-server-pat)' + '$(dnceng-symbol-server-pat)' + '$(dn-bot-all-orgs-build-rw-code-rw)' + '$(System.AccessToken)' + ${{parameters.CustomSensitiveDataList}} + continueOnError: true + condition: always() + +- task: CopyFiles@2 + displayName: Gather post build logs + inputs: + SourceFolder: '$(Build.SourcesDirectory)/PostBuildLogs' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/PostBuildLogs' + condition: always() + +- template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + displayName: Publish Logs + pathToPublish: '$(Build.ArtifactStagingDirectory)/PostBuildLogs' + publishLocation: Container + artifactName: PostBuildLogs + continueOnError: true + condition: always() diff --git a/eng/common/core-templates/steps/publish-pipeline-artifacts.yml b/eng/common/core-templates/steps/publish-pipeline-artifacts.yml new file mode 100644 index 000000000..2efec04dc --- /dev/null +++ b/eng/common/core-templates/steps/publish-pipeline-artifacts.yml @@ -0,0 +1,20 @@ +parameters: +- name: is1ESPipeline + type: boolean + default: false + +- name: args + type: object + default: {} + +steps: +- ${{ if ne(parameters.is1ESPipeline, true) }}: + - template: /eng/common/templates/steps/publish-pipeline-artifacts.yml + parameters: + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} +- ${{ else }}: + - template: /eng/common/templates-official/steps/publish-pipeline-artifacts.yml + parameters: + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/core-templates/steps/retain-build.yml b/eng/common/core-templates/steps/retain-build.yml new file mode 100644 index 000000000..83d97a26a --- /dev/null +++ b/eng/common/core-templates/steps/retain-build.yml @@ -0,0 +1,28 @@ +parameters: + # Optional azure devops PAT with build execute permissions for the build's organization, + # only needed if the build that should be retained ran on a different organization than + # the pipeline where this template is executing from + Token: '' + # Optional BuildId to retain, defaults to the current running build + BuildId: '' + # Azure devops Organization URI for the build in the https://dev.azure.com/ format. + # Defaults to the organization the current pipeline is running on + AzdoOrgUri: '$(System.CollectionUri)' + # Azure devops project for the build. Defaults to the project the current pipeline is running on + AzdoProject: '$(System.TeamProject)' + +steps: + - task: powershell@2 + inputs: + targetType: 'filePath' + filePath: eng/common/retain-build.ps1 + pwsh: true + arguments: > + -AzdoOrgUri: ${{parameters.AzdoOrgUri}} + -AzdoProject ${{parameters.AzdoProject}} + -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }} + -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}} + displayName: Enable permanent build retention + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + BUILD_ID: $(Build.BuildId) \ No newline at end of file diff --git a/eng/common/core-templates/steps/send-to-helix.yml b/eng/common/core-templates/steps/send-to-helix.yml new file mode 100644 index 000000000..68fa739c4 --- /dev/null +++ b/eng/common/core-templates/steps/send-to-helix.yml @@ -0,0 +1,93 @@ +# Please remember to update the documentation if you make changes to these parameters! +parameters: + HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/ + HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/' + HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number + HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues + HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group + HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY + HelixProjectArguments: '' # optional -- arguments passed to the build command + HelixConfiguration: '' # optional -- additional property attached to a job + HelixPreCommands: '' # optional -- commands to run before Helix work item execution + HelixPostCommands: '' # optional -- commands to run after Helix work item execution + WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects + WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects + WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects + CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload + XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true + XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects + XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects + XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner + XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects + IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion + DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json + DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json + WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget." + IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set + HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net ) + Creator: '' # optional -- if the build is external, use this to specify who is sending the job + DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO + condition: succeeded() # optional -- condition for step to execute; defaults to succeeded() + continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false + +steps: + - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"' + displayName: ${{ parameters.DisplayNamePrefix }} (Windows) + env: + BuildConfig: $(_BuildConfig) + HelixSource: ${{ parameters.HelixSource }} + HelixType: ${{ parameters.HelixType }} + HelixBuild: ${{ parameters.HelixBuild }} + HelixConfiguration: ${{ parameters.HelixConfiguration }} + HelixTargetQueues: ${{ parameters.HelixTargetQueues }} + HelixAccessToken: ${{ parameters.HelixAccessToken }} + HelixPreCommands: ${{ parameters.HelixPreCommands }} + HelixPostCommands: ${{ parameters.HelixPostCommands }} + WorkItemDirectory: ${{ parameters.WorkItemDirectory }} + WorkItemCommand: ${{ parameters.WorkItemCommand }} + WorkItemTimeout: ${{ parameters.WorkItemTimeout }} + CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} + XUnitProjects: ${{ parameters.XUnitProjects }} + XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} + XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} + XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} + XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} + IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} + DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} + DotNetCliVersion: ${{ parameters.DotNetCliVersion }} + WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} + HelixBaseUri: ${{ parameters.HelixBaseUri }} + Creator: ${{ parameters.Creator }} + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} + - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog + displayName: ${{ parameters.DisplayNamePrefix }} (Unix) + env: + BuildConfig: $(_BuildConfig) + HelixSource: ${{ parameters.HelixSource }} + HelixType: ${{ parameters.HelixType }} + HelixBuild: ${{ parameters.HelixBuild }} + HelixConfiguration: ${{ parameters.HelixConfiguration }} + HelixTargetQueues: ${{ parameters.HelixTargetQueues }} + HelixAccessToken: ${{ parameters.HelixAccessToken }} + HelixPreCommands: ${{ parameters.HelixPreCommands }} + HelixPostCommands: ${{ parameters.HelixPostCommands }} + WorkItemDirectory: ${{ parameters.WorkItemDirectory }} + WorkItemCommand: ${{ parameters.WorkItemCommand }} + WorkItemTimeout: ${{ parameters.WorkItemTimeout }} + CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} + XUnitProjects: ${{ parameters.XUnitProjects }} + XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} + XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} + XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} + XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} + IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} + DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} + DotNetCliVersion: ${{ parameters.DotNetCliVersion }} + WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} + HelixBaseUri: ${{ parameters.HelixBaseUri }} + Creator: ${{ parameters.Creator }} + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} diff --git a/eng/common/core-templates/steps/source-build.yml b/eng/common/core-templates/steps/source-build.yml new file mode 100644 index 000000000..4da05afe0 --- /dev/null +++ b/eng/common/core-templates/steps/source-build.yml @@ -0,0 +1,133 @@ +parameters: + # This template adds arcade-powered source-build to CI. + + # This is a 'steps' template, and is intended for advanced scenarios where the existing build + # infra has a careful build methodology that must be followed. For example, a repo + # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline + # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to + # GitHub. Using this steps template leaves room for that infra to be included. + + # Defines the platform on which to run the steps. See 'eng/common/core-templates/job/source-build.yml' + # for details. The entire object is described in the 'job' template for simplicity, even though + # the usage of the properties on this object is split between the 'job' and 'steps' templates. + platform: {} + is1ESPipeline: false + +steps: +# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.) +- script: | + set -x + df -h + + # If file changes are detected, set CopyWipIntoInnerSourceBuildRepo to copy the WIP changes into the inner source build repo. + internalRestoreArgs= + if ! git diff --quiet; then + internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true' + # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo. + # This only works if there is a username/email configured, which won't be the case in most CI runs. + git config --get user.email + if [ $? -ne 0 ]; then + git config user.email dn-bot@microsoft.com + git config user.name dn-bot + fi + fi + + # If building on the internal project, the internal storage variable may be available (usually only if needed) + # In that case, add variables to allow the download of internal runtimes if the specified versions are not found + # in the default public locations. + internalRuntimeDownloadArgs= + if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then + internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)' + fi + + buildConfig=Release + # Check if AzDO substitutes in a build config from a variable, and use it if so. + if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then + buildConfig='$(_BuildConfig)' + fi + + officialBuildArgs= + if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then + officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)' + fi + + targetRidArgs= + if [ '${{ parameters.platform.targetRID }}' != '' ]; then + targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}' + fi + + runtimeOsArgs= + if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then + runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}' + fi + + baseOsArgs= + if [ '${{ parameters.platform.baseOS }}' != '' ]; then + baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}' + fi + + publishArgs= + if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then + publishArgs='--publish' + fi + + assetManifestFileName=SourceBuild_RidSpecific.xml + if [ '${{ parameters.platform.name }}' != '' ]; then + assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml + fi + + portableBuildArgs= + if [ '${{ parameters.platform.portableBuild }}' != '' ]; then + portableBuildArgs='/p:PortabelBuild=${{ parameters.platform.portableBuild }}' + fi + + ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ + --configuration $buildConfig \ + --restore --build --pack $publishArgs -bl \ + $officialBuildArgs \ + $internalRuntimeDownloadArgs \ + $internalRestoreArgs \ + $targetRidArgs \ + $runtimeOsArgs \ + $baseOsArgs \ + $portableBuildArgs \ + /p:DotNetBuildSourceOnly=true \ + /p:DotNetBuildRepo=true \ + /p:AssetManifestFileName=$assetManifestFileName + displayName: Build + +# Upload build logs for diagnosis. +- task: CopyFiles@2 + displayName: Prepare BuildLogs staging directory + inputs: + SourceFolder: '$(Build.SourcesDirectory)' + Contents: | + **/*.log + **/*.binlog + artifacts/sb/prebuilt-report/** + TargetFolder: '$(Build.StagingDirectory)/BuildLogs' + CleanTargetFolder: true + continueOnError: true + condition: succeededOrFailed() + +- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + displayName: Publish BuildLogs + targetPath: '$(Build.StagingDirectory)/BuildLogs' + artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt) + continueOnError: true + condition: succeededOrFailed() + sbomEnabled: false # we don't need SBOM for logs + +# Manually inject component detection so that we can ignore the source build upstream cache, which contains +# a nupkg cache of input packages (a local feed). +# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir' +# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets +- template: /eng/common/core-templates/steps/component-governance.yml + parameters: + displayName: Component Detection (Exclude upstream cache) + is1ESPipeline: ${{ parameters.is1ESPipeline }} + componentGovernanceIgnoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache' + disableComponentGovernance: ${{ eq(variables['System.TeamProject'], 'public') }} diff --git a/eng/common/core-templates/steps/source-index-stage1-publish.yml b/eng/common/core-templates/steps/source-index-stage1-publish.yml new file mode 100644 index 000000000..473a22c47 --- /dev/null +++ b/eng/common/core-templates/steps/source-index-stage1-publish.yml @@ -0,0 +1,35 @@ +parameters: + sourceIndexUploadPackageVersion: 2.0.0-20240522.1 + sourceIndexProcessBinlogPackageVersion: 1.0.1-20240522.1 + sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json + binlogPath: artifacts/log/Debug/Build.binlog + +steps: +- task: UseDotNet@2 + displayName: "Source Index: Use .NET 8 SDK" + inputs: + packageType: sdk + version: 8.0.x + installationPath: $(Agent.TempDirectory)/dotnet + workingDirectory: $(Agent.TempDirectory) + +- script: | + $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version ${{parameters.sourceIndexProcessBinlogPackageVersion}} --add-source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools + $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version ${{parameters.sourceIndexUploadPackageVersion}} --add-source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools + displayName: "Source Index: Download netsourceindex Tools" + # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk. + workingDirectory: $(Agent.TempDirectory) + +- script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i ${{parameters.BinlogPath}} -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output + displayName: "Source Index: Process Binlog into indexable sln" + +- ${{ if and(ne(parameters.runAsPublic, 'true'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - task: AzureCLI@2 + displayName: "Source Index: Upload Source Index stage1 artifacts to Azure" + inputs: + azureSubscription: 'SourceDotNet Stage1 Publish' + addSpnToEnvironment: true + scriptType: 'ps' + scriptLocation: 'inlineScript' + inlineScript: | + $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1 diff --git a/eng/common/core-templates/variables/pool-providers.yml b/eng/common/core-templates/variables/pool-providers.yml new file mode 100644 index 000000000..41053d382 --- /dev/null +++ b/eng/common/core-templates/variables/pool-providers.yml @@ -0,0 +1,8 @@ +parameters: + is1ESPipeline: false + +variables: + - ${{ if eq(parameters.is1ESPipeline, 'true') }}: + - template: /eng/common/templates-official/variables/pool-providers.yml + - ${{ else }}: + - template: /eng/common/templates/variables/pool-providers.yml \ No newline at end of file diff --git a/eng/common/cross/arm/sources.list.bionic b/eng/common/cross/arm/sources.list.bionic deleted file mode 100644 index 210955740..000000000 --- a/eng/common/cross/arm/sources.list.bionic +++ /dev/null @@ -1,11 +0,0 @@ -deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted -deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted - -deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse -deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse diff --git a/eng/common/cross/arm/sources.list.focal b/eng/common/cross/arm/sources.list.focal deleted file mode 100644 index 4de2600c1..000000000 --- a/eng/common/cross/arm/sources.list.focal +++ /dev/null @@ -1,11 +0,0 @@ -deb http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted -deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted - -deb http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse -deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse diff --git a/eng/common/cross/arm/sources.list.jammy b/eng/common/cross/arm/sources.list.jammy deleted file mode 100644 index 6bb045302..000000000 --- a/eng/common/cross/arm/sources.list.jammy +++ /dev/null @@ -1,11 +0,0 @@ -deb http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted -deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted - -deb http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse -deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse diff --git a/eng/common/cross/arm/sources.list.jessie b/eng/common/cross/arm/sources.list.jessie deleted file mode 100644 index 4d142ac9b..000000000 --- a/eng/common/cross/arm/sources.list.jessie +++ /dev/null @@ -1,3 +0,0 @@ -# Debian (sid) # UNSTABLE -deb http://ftp.debian.org/debian/ sid main contrib non-free -deb-src http://ftp.debian.org/debian/ sid main contrib non-free diff --git a/eng/common/cross/arm/sources.list.xenial b/eng/common/cross/arm/sources.list.xenial deleted file mode 100644 index 56fbb36a5..000000000 --- a/eng/common/cross/arm/sources.list.xenial +++ /dev/null @@ -1,11 +0,0 @@ -deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted -deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted - -deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse -deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse diff --git a/eng/common/cross/arm/sources.list.zesty b/eng/common/cross/arm/sources.list.zesty deleted file mode 100644 index ea2c14a78..000000000 --- a/eng/common/cross/arm/sources.list.zesty +++ /dev/null @@ -1,11 +0,0 @@ -deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted -deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted - -deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse -deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse diff --git a/eng/common/cross/arm64/sources.list.bionic b/eng/common/cross/arm64/sources.list.bionic deleted file mode 100644 index 210955740..000000000 --- a/eng/common/cross/arm64/sources.list.bionic +++ /dev/null @@ -1,11 +0,0 @@ -deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted -deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted - -deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse -deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse diff --git a/eng/common/cross/arm64/sources.list.buster b/eng/common/cross/arm64/sources.list.buster deleted file mode 100644 index 7194ac64a..000000000 --- a/eng/common/cross/arm64/sources.list.buster +++ /dev/null @@ -1,11 +0,0 @@ -deb http://deb.debian.org/debian buster main -deb-src http://deb.debian.org/debian buster main - -deb http://deb.debian.org/debian-security/ buster/updates main -deb-src http://deb.debian.org/debian-security/ buster/updates main - -deb http://deb.debian.org/debian buster-updates main -deb-src http://deb.debian.org/debian buster-updates main - -deb http://deb.debian.org/debian buster-backports main contrib non-free -deb-src http://deb.debian.org/debian buster-backports main contrib non-free diff --git a/eng/common/cross/arm64/sources.list.focal b/eng/common/cross/arm64/sources.list.focal deleted file mode 100644 index 4de2600c1..000000000 --- a/eng/common/cross/arm64/sources.list.focal +++ /dev/null @@ -1,11 +0,0 @@ -deb http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted -deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted - -deb http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse -deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse diff --git a/eng/common/cross/arm64/sources.list.jammy b/eng/common/cross/arm64/sources.list.jammy deleted file mode 100644 index 6bb045302..000000000 --- a/eng/common/cross/arm64/sources.list.jammy +++ /dev/null @@ -1,11 +0,0 @@ -deb http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted -deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted - -deb http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse -deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse diff --git a/eng/common/cross/arm64/sources.list.stretch b/eng/common/cross/arm64/sources.list.stretch deleted file mode 100644 index 0e1215774..000000000 --- a/eng/common/cross/arm64/sources.list.stretch +++ /dev/null @@ -1,12 +0,0 @@ -deb http://deb.debian.org/debian stretch main -deb-src http://deb.debian.org/debian stretch main - -deb http://deb.debian.org/debian-security/ stretch/updates main -deb-src http://deb.debian.org/debian-security/ stretch/updates main - -deb http://deb.debian.org/debian stretch-updates main -deb-src http://deb.debian.org/debian stretch-updates main - -deb http://deb.debian.org/debian stretch-backports main contrib non-free -deb-src http://deb.debian.org/debian stretch-backports main contrib non-free - diff --git a/eng/common/cross/arm64/sources.list.xenial b/eng/common/cross/arm64/sources.list.xenial deleted file mode 100644 index 56fbb36a5..000000000 --- a/eng/common/cross/arm64/sources.list.xenial +++ /dev/null @@ -1,11 +0,0 @@ -deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted -deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted - -deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse -deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse diff --git a/eng/common/cross/arm64/sources.list.zesty b/eng/common/cross/arm64/sources.list.zesty deleted file mode 100644 index ea2c14a78..000000000 --- a/eng/common/cross/arm64/sources.list.zesty +++ /dev/null @@ -1,11 +0,0 @@ -deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe -deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe - -deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted -deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted - -deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse -deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse diff --git a/eng/common/cross/armel/sources.list.jessie b/eng/common/cross/armel/sources.list.jessie deleted file mode 100644 index 3d9c3059d..000000000 --- a/eng/common/cross/armel/sources.list.jessie +++ /dev/null @@ -1,3 +0,0 @@ -# Debian (jessie) # Stable -deb http://ftp.debian.org/debian/ jessie main contrib non-free -deb-src http://ftp.debian.org/debian/ jessie main contrib non-free diff --git a/eng/common/cross/armv6/sources.list.buster b/eng/common/cross/armv6/sources.list.buster deleted file mode 100644 index f27fc4fb3..000000000 --- a/eng/common/cross/armv6/sources.list.buster +++ /dev/null @@ -1,2 +0,0 @@ -deb http://raspbian.raspberrypi.org/raspbian/ buster main contrib non-free rpi -deb-src http://raspbian.raspberrypi.org/raspbian/ buster main contrib non-free rpi diff --git a/eng/common/cross/build-android-rootfs.sh b/eng/common/cross/build-android-rootfs.sh index f163fb9da..7e9ba2b75 100644 --- a/eng/common/cross/build-android-rootfs.sh +++ b/eng/common/cross/build-android-rootfs.sh @@ -5,15 +5,15 @@ __NDK_Version=r21 usage() { echo "Creates a toolchain and sysroot used for cross-compiling for Android." - echo. + echo echo "Usage: $0 [BuildArch] [ApiLevel]" - echo. + echo echo "BuildArch is the target architecture of Android. Currently only arm64 is supported." echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html" - echo. + echo echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior" echo "by setting the TOOLCHAIN_DIR environment variable" - echo. + echo echo "By default, the NDK will be downloaded into the cross/android-rootfs/android-ndk-$__NDK_Version directory. If you already have an NDK installation," echo "you can set the NDK_DIR environment variable to have this script use that installation of the NDK." echo "By default, this script will generate a file, android_platform, in the root of the ROOTFS_DIR directory that contains the RID for the supported and tested Android build: android.28-arm64. This file is to replace '/etc/os-release', which is not available for Android." diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh index 99a9dd6bb..20ae8c286 100644 --- a/eng/common/cross/build-rootfs.sh +++ b/eng/common/cross/build-rootfs.sh @@ -30,7 +30,8 @@ __IllumosArch=arm7 __HaikuArch=arm __QEMUArch=arm __UbuntuArch=armhf -__UbuntuRepo="http://ports.ubuntu.com/" +__UbuntuRepo= +__UbuntuSuites="updates security backports" __LLDB_Package="liblldb-3.9-dev" __SkipUnmount=0 @@ -65,13 +66,14 @@ __UbuntuPackages+=" libcurl4-openssl-dev" __UbuntuPackages+=" libkrb5-dev" __UbuntuPackages+=" libssl-dev" __UbuntuPackages+=" zlib1g-dev" +__UbuntuPackages+=" libbrotli-dev" __AlpinePackages+=" curl-dev" __AlpinePackages+=" krb5-dev" __AlpinePackages+=" openssl-dev" __AlpinePackages+=" zlib-dev" -__FreeBSDBase="13.2-RELEASE" +__FreeBSDBase="13.3-RELEASE" __FreeBSDPkg="1.17.0" __FreeBSDABI="13" __FreeBSDPackages="libunwind" @@ -90,18 +92,18 @@ __HaikuPackages="gcc_syslibs" __HaikuPackages+=" gcc_syslibs_devel" __HaikuPackages+=" gmp" __HaikuPackages+=" gmp_devel" -__HaikuPackages+=" icu66" -__HaikuPackages+=" icu66_devel" +__HaikuPackages+=" icu[0-9]+" +__HaikuPackages+=" icu[0-9]*_devel" __HaikuPackages+=" krb5" __HaikuPackages+=" krb5_devel" __HaikuPackages+=" libiconv" __HaikuPackages+=" libiconv_devel" -__HaikuPackages+=" llvm12_libunwind" -__HaikuPackages+=" llvm12_libunwind_devel" +__HaikuPackages+=" llvm[0-9]*_libunwind" +__HaikuPackages+=" llvm[0-9]*_libunwind_devel" __HaikuPackages+=" mpfr" __HaikuPackages+=" mpfr_devel" -__HaikuPackages+=" openssl" -__HaikuPackages+=" openssl_devel" +__HaikuPackages+=" openssl3" +__HaikuPackages+=" openssl3_devel" __HaikuPackages+=" zlib" __HaikuPackages+=" zlib_devel" @@ -129,6 +131,7 @@ __AlpineKeys=' 616db30d:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnpUpyWDWjlUk3smlWeA0\nlIMW+oJ38t92CRLHH3IqRhyECBRW0d0aRGtq7TY8PmxjjvBZrxTNDpJT6KUk4LRm\na6A6IuAI7QnNK8SJqM0DLzlpygd7GJf8ZL9SoHSH+gFsYF67Cpooz/YDqWrlN7Vw\ntO00s0B+eXy+PCXYU7VSfuWFGK8TGEv6HfGMALLjhqMManyvfp8hz3ubN1rK3c8C\nUS/ilRh1qckdbtPvoDPhSbTDmfU1g/EfRSIEXBrIMLg9ka/XB9PvWRrekrppnQzP\nhP9YE3x/wbFc5QqQWiRCYyQl/rgIMOXvIxhkfe8H5n1Et4VAorkpEAXdsfN8KSVv\nLSMazVlLp9GYq5SUpqYX3KnxdWBgN7BJoZ4sltsTpHQ/34SXWfu3UmyUveWj7wp0\nx9hwsPirVI00EEea9AbP7NM2rAyu6ukcm4m6ATd2DZJIViq2es6m60AE6SMCmrQF\nwmk4H/kdQgeAELVfGOm2VyJ3z69fQuywz7xu27S6zTKi05Qlnohxol4wVb6OB7qG\nLPRtK9ObgzRo/OPumyXqlzAi/Yvyd1ZQk8labZps3e16bQp8+pVPiumWioMFJDWV\nGZjCmyMSU8V6MB6njbgLHoyg2LCukCAeSjbPGGGYhnKLm1AKSoJh3IpZuqcKCk5C\n8CM1S15HxV78s9dFntEqIokCAwEAAQ== ' __Keyring= +__KeyringFile="/usr/share/keyrings/ubuntu-archive-keyring.gpg" __SkipSigCheck=0 __UseMirror=0 @@ -142,7 +145,6 @@ while :; do case $lowerI in -\?|-h|--help) usage - exit 1 ;; arm) __BuildArch=arm @@ -163,6 +165,7 @@ while :; do __UbuntuArch=armel __UbuntuRepo="http://ftp.debian.org/debian/" __CodeName=jessie + __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" ;; armv6) __BuildArch=armv6 @@ -170,10 +173,12 @@ while :; do __QEMUArch=arm __UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/" __CodeName=buster + __KeyringFile="/usr/share/keyrings/raspbian-archive-keyring.gpg" __LLDB_Package="liblldb-6.0-dev" + __UbuntuSuites= - if [[ -e "/usr/share/keyrings/raspbian-archive-keyring.gpg" ]]; then - __Keyring="--keyring /usr/share/keyrings/raspbian-archive-keyring.gpg" + if [[ -e "$__KeyringFile" ]]; then + __Keyring="--keyring $__KeyringFile" fi ;; riscv64) @@ -182,13 +187,8 @@ while :; do __AlpinePackages="${__AlpinePackages// lldb-dev/}" __QEMUArch=riscv64 __UbuntuArch=riscv64 - __UbuntuRepo="http://deb.debian.org/debian" __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" unset __LLDB_Package - - if [[ -e "/usr/share/keyrings/debian-archive-keyring.gpg" ]]; then - __Keyring="--keyring /usr/share/keyrings/debian-archive-keyring.gpg --include=debian-archive-keyring" - fi ;; ppc64le) __BuildArch=ppc64le @@ -229,12 +229,19 @@ while :; do __UbuntuRepo="http://archive.ubuntu.com/ubuntu/" ;; lldb*) - version="${lowerI/lldb/}" - parts=(${version//./ }) + version="$(echo "$lowerI" | tr -d '[:alpha:]-=')" + majorVersion="${version%%.*}" + + [ -z "${version##*.*}" ] && minorVersion="${version#*.}" + if [ -z "$minorVersion" ]; then + minorVersion=0 + fi # for versions > 6.0, lldb has dropped the minor version - if [[ "${parts[0]}" -gt 6 ]]; then - version="${parts[0]}" + if [ "$majorVersion" -le 6 ]; then + version="$majorVersion.$minorVersion" + else + version="$majorVersion" fi __LLDB_Package="liblldb-${version}-dev" @@ -243,15 +250,19 @@ while :; do unset __LLDB_Package ;; llvm*) - version="${lowerI/llvm/}" - parts=(${version//./ }) - __LLVM_MajorVersion="${parts[0]}" - __LLVM_MinorVersion="${parts[1]}" - - # for versions > 6.0, llvm has dropped the minor version - if [[ -z "$__LLVM_MinorVersion" && "$__LLVM_MajorVersion" -le 6 ]]; then - __LLVM_MinorVersion=0; + version="$(echo "$lowerI" | tr -d '[:alpha:]-=')" + __LLVM_MajorVersion="${version%%.*}" + + [ -z "${version##*.*}" ] && __LLVM_MinorVersion="${version#*.}" + if [ -z "$__LLVM_MinorVersion" ]; then + __LLVM_MinorVersion=0 + fi + + # for versions > 6.0, lldb has dropped the minor version + if [ "$__LLVM_MajorVersion" -gt 6 ]; then + __LLVM_MinorVersion= fi + ;; xenial) # Ubuntu 16.04 if [[ "$__CodeName" != "jessie" ]]; then @@ -278,8 +289,17 @@ while :; do __CodeName=jammy fi ;; + noble) # Ubuntu 24.04 + if [[ "$__CodeName" != "jessie" ]]; then + __CodeName=noble + fi + if [[ -n "$__LLDB_Package" ]]; then + __LLDB_Package="liblldb-18-dev" + fi + ;; jessie) # Debian 8 __CodeName=jessie + __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" if [[ -z "$__UbuntuRepo" ]]; then __UbuntuRepo="http://ftp.debian.org/debian/" @@ -288,6 +308,7 @@ while :; do stretch) # Debian 9 __CodeName=stretch __LLDB_Package="liblldb-6.0-dev" + __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" if [[ -z "$__UbuntuRepo" ]]; then __UbuntuRepo="http://ftp.debian.org/debian/" @@ -296,6 +317,7 @@ while :; do buster) # Debian 10 __CodeName=buster __LLDB_Package="liblldb-6.0-dev" + __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" if [[ -z "$__UbuntuRepo" ]]; then __UbuntuRepo="http://ftp.debian.org/debian/" @@ -303,6 +325,15 @@ while :; do ;; bullseye) # Debian 11 __CodeName=bullseye + __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ftp.debian.org/debian/" + fi + ;; + bookworm) # Debian 12 + __CodeName=bookworm + __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" if [[ -z "$__UbuntuRepo" ]]; then __UbuntuRepo="http://ftp.debian.org/debian/" @@ -310,6 +341,7 @@ while :; do ;; sid) # Debian sid __CodeName=sid + __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" if [[ -z "$__UbuntuRepo" ]]; then __UbuntuRepo="http://ftp.debian.org/debian/" @@ -323,15 +355,14 @@ while :; do alpine*) __CodeName=alpine __UbuntuRepo= - version="${lowerI/alpine/}" - if [[ "$version" == "edge" ]]; then + if [[ "$lowerI" == "alpineedge" ]]; then __AlpineVersion=edge else - parts=(${version//./ }) - __AlpineMajorVersion="${parts[0]}" - __AlpineMinoVersion="${parts[1]}" - __AlpineVersion="$__AlpineMajorVersion.$__AlpineMinoVersion" + version="$(echo "$lowerI" | tr -d '[:alpha:]-=')" + __AlpineMajorVersion="${version%%.*}" + __AlpineMinorVersion="${version#*.}" + __AlpineVersion="$__AlpineMajorVersion.$__AlpineMinorVersion" fi ;; freebsd13) @@ -420,6 +451,10 @@ fi __UbuntuPackages+=" ${__LLDB_Package:-}" +if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ports.ubuntu.com/" +fi + if [[ -n "$__LLVM_MajorVersion" ]]; then __UbuntuPackages+=" libclang-common-${__LLVM_MajorVersion}${__LLVM_MinorVersion:+.$__LLVM_MinorVersion}-dev" fi @@ -442,13 +477,39 @@ fi mkdir -p "$__RootfsDir" __RootfsDir="$( cd "$__RootfsDir" && pwd )" +__hasWget= +ensureDownloadTool() +{ + if command -v wget &> /dev/null; then + __hasWget=1 + elif command -v curl &> /dev/null; then + __hasWget=0 + else + >&2 echo "ERROR: either wget or curl is required by this script." + exit 1 + fi +} + if [[ "$__CodeName" == "alpine" ]]; then __ApkToolsVersion=2.12.11 - __ApkToolsSHA512SUM=53e57b49230da07ef44ee0765b9592580308c407a8d4da7125550957bb72cb59638e04f8892a18b584451c8d841d1c7cb0f0ab680cc323a3015776affaa3be33 __ApkToolsDir="$(mktemp -d)" __ApkKeysDir="$(mktemp -d)" + arch="$(uname -m)" - wget "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic//v$__ApkToolsVersion/x86_64/apk.static" -P "$__ApkToolsDir" + ensureDownloadTool + + if [[ "$__hasWget" == 1 ]]; then + wget -P "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static" + else + curl -SLO --create-dirs --output-dir "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static" + fi + if [[ "$arch" == "x86_64" ]]; then + __ApkToolsSHA512SUM="53e57b49230da07ef44ee0765b9592580308c407a8d4da7125550957bb72cb59638e04f8892a18b584451c8d841d1c7cb0f0ab680cc323a3015776affaa3be33" + elif [[ "$arch" == "aarch64" ]]; then + __ApkToolsSHA512SUM="9e2b37ecb2b56c05dad23d379be84fd494c14bd730b620d0d576bda760588e1f2f59a7fcb2f2080577e0085f23a0ca8eadd993b4e61c2ab29549fdb71969afd0" + else + echo "WARNING: add missing hash for your host architecture. To find the value, use: 'find /tmp -name apk.static -exec sha512sum {} \;'" + fi echo "$__ApkToolsSHA512SUM $__ApkToolsDir/apk.static" | sha512sum -c chmod +x "$__ApkToolsDir/apk.static" @@ -477,12 +538,14 @@ if [[ "$__CodeName" == "alpine" ]]; then fi # initialize DB + # shellcheck disable=SC2086 "$__ApkToolsDir/apk.static" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" --initdb add if [[ "$__AlpineLlvmLibsLookup" == 1 ]]; then + # shellcheck disable=SC2086 __AlpinePackages+=" $("$__ApkToolsDir/apk.static" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ @@ -491,6 +554,7 @@ if [[ "$__CodeName" == "alpine" ]]; then fi # install all packages in one go + # shellcheck disable=SC2086 "$__ApkToolsDir/apk.static" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ @@ -501,12 +565,23 @@ if [[ "$__CodeName" == "alpine" ]]; then elif [[ "$__CodeName" == "freebsd" ]]; then mkdir -p "$__RootfsDir"/usr/local/etc JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} - wget -O - "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version + + ensureDownloadTool + + if [[ "$__hasWget" == 1 ]]; then + wget -O- "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version + else + curl -SL "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version + fi echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf mkdir -p "$__RootfsDir"/tmp # get and build package manager - wget -O - "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf - + if [[ "$__hasWget" == 1 ]]; then + wget -O- "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf - + else + curl -SL "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf - + fi cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}" # needed for install to succeed mkdir -p "$__RootfsDir"/host/etc @@ -514,27 +589,43 @@ elif [[ "$__CodeName" == "freebsd" ]]; then rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}" # install packages we need. INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update + # shellcheck disable=SC2086 INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages elif [[ "$__CodeName" == "illumos" ]]; then mkdir "$__RootfsDir/tmp" pushd "$__RootfsDir/tmp" JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} + + ensureDownloadTool + echo "Downloading sysroot." - wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf - + if [[ "$__hasWget" == 1 ]]; then + wget -O- https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf - + else + curl -SL https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf - + fi echo "Building binutils. Please wait.." - wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf - + if [[ "$__hasWget" == 1 ]]; then + wget -O- https://ftp.gnu.org/gnu/binutils/binutils-2.42.tar.xz | tar -xJf - + else + curl -SL https://ftp.gnu.org/gnu/binutils/binutils-2.42.tar.xz | tar -xJf - + fi mkdir build-binutils && cd build-binutils - ../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" + ../binutils-2.42/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.11" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" make -j "$JOBS" && make install && cd .. echo "Building gcc. Please wait.." - wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf - + if [[ "$__hasWget" == 1 ]]; then + wget -O- https://ftp.gnu.org/gnu/gcc/gcc-13.3.0/gcc-13.3.0.tar.xz | tar -xJf - + else + curl -SL https://ftp.gnu.org/gnu/gcc/gcc-13.3.0/gcc-13.3.0.tar.xz | tar -xJf - + fi CFLAGS="-fPIC" CXXFLAGS="-fPIC" CXXFLAGS_FOR_TARGET="-fPIC" CFLAGS_FOR_TARGET="-fPIC" export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET mkdir build-gcc && cd build-gcc - ../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \ + ../gcc-13.3.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.11" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \ --with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \ --disable-libquadmath-support --disable-shared --enable-tls make -j "$JOBS" && make install && cd .. @@ -542,9 +633,13 @@ elif [[ "$__CodeName" == "illumos" ]]; then if [[ "$__UseMirror" == 1 ]]; then BaseUrl=https://pkgsrc.smartos.skylime.net fi - BaseUrl="$BaseUrl/packages/SmartOS/trunk/${__illumosArch}/All" + BaseUrl="$BaseUrl/packages/SmartOS/2019Q4/${__illumosArch}/All" echo "Downloading manifest" - wget "$BaseUrl" + if [[ "$__hasWget" == 1 ]]; then + wget "$BaseUrl" + else + curl -SLO "$BaseUrl" + fi echo "Downloading dependencies." read -ra array <<<"$__IllumosPackages" for package in "${array[@]}"; do @@ -552,7 +647,11 @@ elif [[ "$__CodeName" == "illumos" ]]; then # find last occurrence of package in listing and extract its name package="$(sed -En '/.*href="('"$package"'-[0-9].*).tgz".*/h;$!d;g;s//\1/p' All)" echo "Resolved name '$package'" - wget "$BaseUrl"/"$package".tgz + if [[ "$__hasWget" == 1 ]]; then + wget "$BaseUrl"/"$package".tgz + else + curl -SLO "$BaseUrl"/"$package".tgz + fi ar -x "$package".tgz tar --skip-old-files -xzf "$package".tmp.tg* -C "$__RootfsDir" 2>/dev/null done @@ -561,10 +660,17 @@ elif [[ "$__CodeName" == "illumos" ]]; then rm -rf "$__RootfsDir"/{tmp,+*} mkdir -p "$__RootfsDir"/usr/include/net mkdir -p "$__RootfsDir"/usr/include/netpacket - wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h - wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h - wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h - wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h + if [[ "$__hasWget" == 1 ]]; then + wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h + wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h + wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h + wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h + else + curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h + curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h + curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h + curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h + fi elif [[ "$__CodeName" == "haiku" ]]; then JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} @@ -574,29 +680,55 @@ elif [[ "$__CodeName" == "haiku" ]]; then mkdir "$__RootfsDir/tmp/download" - echo "Downloading Haiku package tool" - git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 $__RootfsDir/tmp/script - wget -O "$__RootfsDir/tmp/download/hosttools.zip" $($__RootfsDir/tmp/script/fetch.sh --hosttools) + ensureDownloadTool + + echo "Downloading Haiku package tools" + git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 "$__RootfsDir/tmp/script" + if [[ "$__hasWget" == 1 ]]; then + wget -O "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)" + else + curl -SLo "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)" + fi + unzip -o "$__RootfsDir/tmp/download/hosttools.zip" -d "$__RootfsDir/tmp/bin" - DepotBaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg" - HpkgBaseUrl="https://eu.hpkg.haiku-os.org/haiku/master/$__HaikuArch/current" + HaikuBaseUrl="https://eu.hpkg.haiku-os.org/haiku/master/$__HaikuArch/current" + HaikuPortsBaseUrl="https://eu.hpkg.haiku-os.org/haikuports/master/$__HaikuArch/current" + + echo "Downloading HaikuPorts package repository index..." + if [[ "$__hasWget" == 1 ]]; then + wget -P "$__RootfsDir/tmp/download" "$HaikuPortsBaseUrl/repo" + else + curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HaikuPortsBaseUrl/repo" + fi - # Download Haiku packages echo "Downloading Haiku packages" read -ra array <<<"$__HaikuPackages" for package in "${array[@]}"; do echo "Downloading $package..." - # API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60 - # The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598 - hpkgDownloadUrl="$(wget -qO- --post-data='{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \ - --header='Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')" - wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl" + hpkgFilename="$(LD_LIBRARY_PATH="$__RootfsDir/tmp/bin" "$__RootfsDir/tmp/bin/package_repo" list -f "$__RootfsDir/tmp/download/repo" | + grep -E "${package}-" | sort -V | tail -n 1 | xargs)" + if [ -z "$hpkgFilename" ]; then + >&2 echo "ERROR: package $package missing." + exit 1 + fi + echo "Resolved filename: $hpkgFilename..." + hpkgDownloadUrl="$HaikuPortsBaseUrl/packages/$hpkgFilename" + if [[ "$__hasWget" == 1 ]]; then + wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl" + else + curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$hpkgDownloadUrl" + fi done for package in haiku haiku_devel; do echo "Downloading $package..." - hpkgVersion="$(wget -qO- $HpkgBaseUrl | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')" - wget -P "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg" + if [[ "$__hasWget" == 1 ]]; then + hpkgVersion="$(wget -qO- "$HaikuBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')" + wget -P "$__RootfsDir/tmp/download" "$HaikuBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg" + else + hpkgVersion="$(curl -sSL "$HaikuBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')" + curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HaikuBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg" + fi done # Set up the sysroot @@ -609,7 +741,11 @@ elif [[ "$__CodeName" == "haiku" ]]; then # Download buildtools echo "Downloading Haiku buildtools" - wget -O "$__RootfsDir/tmp/download/buildtools.zip" $($__RootfsDir/tmp/script/fetch.sh --buildtools --arch=$__HaikuArch) + if [[ "$__hasWget" == 1 ]]; then + wget -O "$__RootfsDir/tmp/download/buildtools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --buildtools --arch=$__HaikuArch)" + else + curl -SLo "$__RootfsDir/tmp/download/buildtools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --buildtools --arch=$__HaikuArch)" + fi unzip -o "$__RootfsDir/tmp/download/buildtools.zip" -d "$__RootfsDir" # Cleaning up temporary files @@ -622,10 +758,22 @@ elif [[ -n "$__CodeName" ]]; then __Keyring="$__Keyring --force-check-gpg" fi + # shellcheck disable=SC2086 + echo running debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo" debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo" - cp "$__CrossDir/$__BuildArch/sources.list.$__CodeName" "$__RootfsDir/etc/apt/sources.list" + + mkdir -p "$__RootfsDir/etc/apt/sources.list.d/" + cat > "$__RootfsDir/etc/apt/sources.list.d/$__CodeName.sources" <:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>) + add_link_options($<$:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>) +endif() + +option(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC "Statically link against the C++ standard library" OFF) +if(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC) + add_link_options($<$:-static-libstdc++>) +endif() + +set(CLR_CMAKE_CXX_ABI_LIBRARY "" CACHE STRING "C++ ABI implementation library to link against. Only supported with the Clang compiler.") +if (CLR_CMAKE_CXX_ABI_LIBRARY) + # The user may specify the ABI library with the 'lib' prefix, like 'libstdc++'. Strip the prefix here so the linker finds the right library. + string(REGEX REPLACE "^lib(.+)" "\\1" CLR_CMAKE_CXX_ABI_LIBRARY ${CLR_CMAKE_CXX_ABI_LIBRARY}) + # We need to specify this as a linker-backend option as Clang will filter this option out when linking to libc++. + add_link_options("LINKER:-l${CLR_CMAKE_CXX_ABI_LIBRARY}") +endif() + set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) diff --git a/eng/common/cross/x64/sources.list.bionic b/eng/common/cross/x64/sources.list.bionic deleted file mode 100644 index a71ccadcf..000000000 --- a/eng/common/cross/x64/sources.list.bionic +++ /dev/null @@ -1,11 +0,0 @@ -deb http://archive.ubuntu.com/ubuntu/ bionic main restricted universe -deb-src http://archive.ubuntu.com/ubuntu/ bionic main restricted universe - -deb http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe -deb-src http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe - -deb http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted -deb-src http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted - -deb http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse -deb-src http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse diff --git a/eng/common/cross/x64/sources.list.xenial b/eng/common/cross/x64/sources.list.xenial deleted file mode 100644 index ad9c5a014..000000000 --- a/eng/common/cross/x64/sources.list.xenial +++ /dev/null @@ -1,11 +0,0 @@ -deb http://archive.ubuntu.com/ubuntu/ xenial main restricted universe -deb-src http://archive.ubuntu.com/ubuntu/ xenial main restricted universe - -deb http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe -deb-src http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe - -deb http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted -deb-src http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted - -deb http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse -deb-src http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse diff --git a/eng/common/cross/x86/sources.list.bionic b/eng/common/cross/x86/sources.list.bionic deleted file mode 100644 index a71ccadcf..000000000 --- a/eng/common/cross/x86/sources.list.bionic +++ /dev/null @@ -1,11 +0,0 @@ -deb http://archive.ubuntu.com/ubuntu/ bionic main restricted universe -deb-src http://archive.ubuntu.com/ubuntu/ bionic main restricted universe - -deb http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe -deb-src http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe - -deb http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted -deb-src http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted - -deb http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse -deb-src http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse diff --git a/eng/common/cross/x86/sources.list.focal b/eng/common/cross/x86/sources.list.focal deleted file mode 100644 index 99d573133..000000000 --- a/eng/common/cross/x86/sources.list.focal +++ /dev/null @@ -1,11 +0,0 @@ -deb http://archive.ubuntu.com/ubuntu/ focal main restricted universe -deb-src http://archive.ubuntu.com/ubuntu/ focal main restricted universe - -deb http://archive.ubuntu.com/ubuntu/ focal-updates main restricted universe -deb-src http://archive.ubuntu.com/ubuntu/ focal-updates main restricted universe - -deb http://archive.ubuntu.com/ubuntu/ focal-backports main restricted -deb-src http://archive.ubuntu.com/ubuntu/ focal-backports main restricted - -deb http://archive.ubuntu.com/ubuntu/ focal-security main restricted universe multiverse -deb-src http://archive.ubuntu.com/ubuntu/ focal-security main restricted universe multiverse diff --git a/eng/common/cross/x86/sources.list.jammy b/eng/common/cross/x86/sources.list.jammy deleted file mode 100644 index af1c1feae..000000000 --- a/eng/common/cross/x86/sources.list.jammy +++ /dev/null @@ -1,11 +0,0 @@ -deb http://archive.ubuntu.com/ubuntu/ jammy main restricted universe -deb-src http://archive.ubuntu.com/ubuntu/ jammy main restricted universe - -deb http://archive.ubuntu.com/ubuntu/ jammy-updates main restricted universe -deb-src http://archive.ubuntu.com/ubuntu/ jammy-updates main restricted universe - -deb http://archive.ubuntu.com/ubuntu/ jammy-backports main restricted -deb-src http://archive.ubuntu.com/ubuntu/ jammy-backports main restricted - -deb http://archive.ubuntu.com/ubuntu/ jammy-security main restricted universe multiverse -deb-src http://archive.ubuntu.com/ubuntu/ jammy-security main restricted universe multiverse diff --git a/eng/common/cross/x86/sources.list.xenial b/eng/common/cross/x86/sources.list.xenial deleted file mode 100644 index ad9c5a014..000000000 --- a/eng/common/cross/x86/sources.list.xenial +++ /dev/null @@ -1,11 +0,0 @@ -deb http://archive.ubuntu.com/ubuntu/ xenial main restricted universe -deb-src http://archive.ubuntu.com/ubuntu/ xenial main restricted universe - -deb http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe -deb-src http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe - -deb http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted -deb-src http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted - -deb http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse -deb-src http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse diff --git a/eng/common/darc-init.ps1 b/eng/common/darc-init.ps1 index 8fda30bdc..e33743105 100644 --- a/eng/common/darc-init.ps1 +++ b/eng/common/darc-init.ps1 @@ -1,6 +1,6 @@ param ( $darcVersion = $null, - $versionEndpoint = 'https://maestro.dot.net/api/assets/darc-version?api-version=2019-01-16', + $versionEndpoint = 'https://maestro.dot.net/api/assets/darc-version?api-version=2020-02-20', $verbosity = 'minimal', $toolpath = $null ) diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh index c305ae6bd..36dbd45e1 100644 --- a/eng/common/darc-init.sh +++ b/eng/common/darc-init.sh @@ -2,7 +2,7 @@ source="${BASH_SOURCE[0]}" darcVersion='' -versionEndpoint='https://maestro.dot.net/api/assets/darc-version?api-version=2019-01-16' +versionEndpoint='https://maestro.dot.net/api/assets/darc-version?api-version=2020-02-20' verbosity='minimal' while [[ $# > 0 ]]; do diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh index 7e69e3a9e..7b9d97e3b 100644 --- a/eng/common/dotnet-install.sh +++ b/eng/common/dotnet-install.sh @@ -71,6 +71,9 @@ case $cpuname in i[3-6]86) buildarch=x86 ;; + riscv64) + buildarch=riscv64 + ;; *) echo "Unknown CPU $cpuname detected, treating it as x64" buildarch=x64 @@ -82,7 +85,7 @@ if [[ $architecture != "" ]] && [[ $architecture != $buildarch ]]; then dotnetRoot="$dotnetRoot/$architecture" fi -InstallDotNet $dotnetRoot $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || { +InstallDotNet "$dotnetRoot" $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || { local exit_code=$? Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "dotnet-install.sh failed (exit code '$exit_code')." >&2 ExitWithExitCode $exit_code diff --git a/eng/common/internal/Directory.Build.props b/eng/common/internal/Directory.Build.props index a735fe9a1..f1d041c33 100644 --- a/eng/common/internal/Directory.Build.props +++ b/eng/common/internal/Directory.Build.props @@ -1,6 +1,11 @@ + + false + false + + diff --git a/eng/common/internal/Tools.csproj b/eng/common/internal/Tools.csproj index 8fa77e5b1..32f79dfb3 100644 --- a/eng/common/internal/Tools.csproj +++ b/eng/common/internal/Tools.csproj @@ -3,8 +3,8 @@ net472 - false false + false diff --git a/eng/common/native/CommonLibrary.psm1 b/eng/common/native/CommonLibrary.psm1 index ca38268c4..f71f6af6c 100644 --- a/eng/common/native/CommonLibrary.psm1 +++ b/eng/common/native/CommonLibrary.psm1 @@ -277,7 +277,8 @@ function Get-MachineArchitecture { if (($ProcessorArchitecture -Eq "AMD64") -Or ($ProcessorArchitecture -Eq "IA64") -Or ($ProcessorArchitecture -Eq "ARM64") -Or - ($ProcessorArchitecture -Eq "LOONGARCH64")) { + ($ProcessorArchitecture -Eq "LOONGARCH64") -Or + ($ProcessorArchitecture -Eq "RISCV64")) { return "x64" } return "x86" diff --git a/eng/common/native/init-compiler.sh b/eng/common/native/init-compiler.sh index afdeb7a4d..9a0e1f2b4 100644 --- a/eng/common/native/init-compiler.sh +++ b/eng/common/native/init-compiler.sh @@ -2,7 +2,9 @@ # # This file detects the C/C++ compiler and exports it to the CC/CXX environment variables # -# NOTE: some scripts source this file and rely on stdout being empty, make sure to not output anything here! +# NOTE: some scripts source this file and rely on stdout being empty, make sure +# to not output *anything* here, unless it is an error message that fails the +# build. if [ -z "$build_arch" ] || [ -z "$compiler" ]; then echo "Usage..." @@ -17,11 +19,9 @@ case "$compiler" in # clangx.y or clang-x.y version="$(echo "$compiler" | tr -d '[:alpha:]-=')" majorVersion="${version%%.*}" - [ -z "${version##*.*}" ] && minorVersion="${version#*.}" - if [ -z "$minorVersion" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -le 6 ]; then - minorVersion=0; - fi + # LLVM based on v18 released in early 2024, with two releases per year + maxVersion="$((18 + ((($(date +%Y) - 2024) * 12 + $(date +%-m) - 3) / 6)))" compiler=clang ;; @@ -29,7 +29,9 @@ case "$compiler" in # gccx.y or gcc-x.y version="$(echo "$compiler" | tr -d '[:alpha:]-=')" majorVersion="${version%%.*}" - [ -z "${version##*.*}" ] && minorVersion="${version#*.}" + + # GCC based on v14 released in early 2024, with one release per year + maxVersion="$((14 + ((($(date +%Y) - 2024) * 12 + $(date +%-m) - 3) / 12)))" compiler=gcc ;; esac @@ -47,91 +49,98 @@ check_version_exists() { desired_version=-1 # Set up the environment to be used for building with the desired compiler. - if command -v "$compiler-$1.$2" > /dev/null; then - desired_version="-$1.$2" - elif command -v "$compiler$1$2" > /dev/null; then - desired_version="$1$2" - elif command -v "$compiler-$1$2" > /dev/null; then - desired_version="-$1$2" + if command -v "$compiler-$1" > /dev/null; then + desired_version="-$1" + elif command -v "$compiler$1" > /dev/null; then + desired_version="$1" fi echo "$desired_version" } +__baseOS="$(uname)" +set_compiler_version_from_CC() { + if [ "$__baseOS" = "Darwin" ]; then + # On Darwin, the versions from -version/-dumpversion refer to Xcode + # versions, not llvm versions, so we can't rely on them. + return + fi + + version="$("$CC" -dumpversion)" + if [ -z "$version" ]; then + echo "Error: $CC -dumpversion didn't provide a version" + exit 1 + fi + + # gcc and clang often display 3 part versions. However, gcc can show only 1 part in some environments. + IFS=. read -r majorVersion _ < /dev/null; then - if [ "$(uname)" != "Darwin" ]; then - echo "Warning: Specific version of $compiler not found, falling back to use the one in PATH." - fi - CC="$(command -v "$compiler")" - CXX="$(command -v "$cxxCompiler")" - else - echo "No usable version of $compiler found." + if ! command -v "$compiler" > /dev/null; then + echo "Error: No compatible version of $compiler was found within the range of $minVersion to $maxVersion. Please upgrade your toolchain or specify the compiler explicitly using CLR_CC and CLR_CXX environment variables." exit 1 fi - else - if [ "$compiler" = "clang" ] && [ "$majorVersion" -lt 5 ]; then - if [ "$build_arch" = "arm" ] || [ "$build_arch" = "armel" ]; then - if command -v "$compiler" > /dev/null; then - echo "Warning: Found clang version $majorVersion which is not supported on arm/armel architectures, falling back to use clang from PATH." - CC="$(command -v "$compiler")" - CXX="$(command -v "$cxxCompiler")" - else - echo "Found clang version $majorVersion which is not supported on arm/armel architectures, and there is no clang in PATH." - exit 1 - fi - fi - fi + + CC="$(command -v "$compiler" 2> /dev/null)" + CXX="$(command -v "$cxxCompiler" 2> /dev/null)" + set_compiler_version_from_CC fi else - desired_version="$(check_version_exists "$majorVersion" "$minorVersion")" + desired_version="$(check_version_exists "$majorVersion")" if [ "$desired_version" = "-1" ]; then - echo "Could not find specific version of $compiler: $majorVersion $minorVersion." + echo "Error: Could not find specific version of $compiler: $majorVersion." exit 1 fi fi if [ -z "$CC" ]; then - CC="$(command -v "$compiler$desired_version")" - CXX="$(command -v "$cxxCompiler$desired_version")" - if [ -z "$CXX" ]; then CXX="$(command -v "$cxxCompiler")"; fi + CC="$(command -v "$compiler$desired_version" 2> /dev/null)" + CXX="$(command -v "$cxxCompiler$desired_version" 2> /dev/null)" + if [ -z "$CXX" ]; then CXX="$(command -v "$cxxCompiler" 2> /dev/null)"; fi + set_compiler_version_from_CC fi else if [ ! -f "$CLR_CC" ]; then - echo "CLR_CC is set but path '$CLR_CC' does not exist" + echo "Error: CLR_CC is set but path '$CLR_CC' does not exist" exit 1 fi CC="$CLR_CC" CXX="$CLR_CXX" + set_compiler_version_from_CC fi if [ -z "$CC" ]; then - echo "Unable to find $compiler." + echo "Error: Unable to find $compiler." exit 1 fi -# Only lld version >= 9 can be considered stable. lld supports s390x starting from 18.0. -if [ "$compiler" = "clang" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -ge 9 ] && ([ "$build_arch" != "s390x" ] || [ "$majorVersion" -ge 18 ]); then - if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then - LDFLAGS="-fuse-ld=lld" +if [ "$__baseOS" != "Darwin" ]; then + # On Darwin, we always want to use the Apple linker. + + # Only lld version >= 9 can be considered stable. lld supports s390x starting from 18.0. + if [ "$compiler" = "clang" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -ge 9 ] && { [ "$build_arch" != "s390x" ] || [ "$majorVersion" -ge 18 ]; }; then + if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then + LDFLAGS="-fuse-ld=lld" + fi fi fi -SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version")" +SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version" 2> /dev/null)" export CC CXX LDFLAGS SCAN_BUILD_COMMAND diff --git a/eng/common/native/init-distro-rid.sh b/eng/common/native/init-distro-rid.sh index 5dcbfd700..83ea7aab0 100644 --- a/eng/common/native/init-distro-rid.sh +++ b/eng/common/native/init-distro-rid.sh @@ -20,10 +20,6 @@ getNonPortableDistroRid() # shellcheck disable=SC1091 if [ -e "${rootfsDir}/etc/os-release" ]; then . "${rootfsDir}/etc/os-release" - if [ "${ID}" = "rhel" ] || [ "${ID}" = "rocky" ] || [ "${ID}" = "alpine" ]; then - VERSION_ID="${VERSION_ID%.*}" # Remove the last version digit for these distros - fi - if echo "${VERSION_ID:-}" | grep -qE '^([[:digit:]]|\.)+$'; then nonPortableRid="${ID}.${VERSION_ID}-${targetArch}" else @@ -48,19 +44,7 @@ getNonPortableDistroRid() nonPortableRid="android.$__android_sdk_version-${targetArch}" elif [ "$targetOs" = "illumos" ]; then __uname_version=$(uname -v) - case "$__uname_version" in - omnios-*) - __omnios_major_version=$(echo "$__uname_version" | cut -c9-10) - nonPortableRid="omnios.$__omnios_major_version-${targetArch}" - ;; - joyent_*) - __smartos_major_version=$(echo "$__uname_version" | cut -c9-10) - nonPortableRid="smartos.$__smartos_major_version-${targetArch}" - ;; - *) - nonPortableRid="illumos-${targetArch}" - ;; - esac + nonPortableRid="illumos-${targetArch}" elif [ "$targetOs" = "solaris" ]; then __uname_version=$(uname -v) __solaris_major_version=$(echo "$__uname_version" | cut -d'.' -f1) diff --git a/eng/common/native/install-dependencies.sh b/eng/common/native/install-dependencies.sh new file mode 100644 index 000000000..dc396a955 --- /dev/null +++ b/eng/common/native/install-dependencies.sh @@ -0,0 +1,65 @@ +#!/bin/sh + +set -e + +# This is a simple script primarily used for CI to install necessary dependencies +# +# Usage: +# +# ./install-dependencies.sh + +os="$(echo "$1" | tr "[:upper:]" "[:lower:]")" + +if [ -z "$os" ]; then + . "$(dirname "$0")"/init-os-and-arch.sh +fi + +case "$os" in + linux) + if [ -e /etc/os-release ]; then + . /etc/os-release + fi + + if [ "$ID" = "debian" ] || [ "$ID_LIKE" = "debian" ]; then + apt update + + apt install -y build-essential gettext locales cmake llvm clang lld lldb liblldb-dev libunwind8-dev libicu-dev liblttng-ust-dev \ + libssl-dev libkrb5-dev zlib1g-dev pigz cpio + + localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8 + elif [ "$ID" = "fedora" ]; then + dnf install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel zlib-devel lttng-ust-devel pigz cpio + elif [ "$ID" = "alpine" ]; then + apk add build-base cmake bash curl clang llvm-dev lld lldb krb5-dev lttng-ust-dev icu-dev zlib-dev openssl-dev pigz cpio + else + echo "Unsupported distro. distro: $ID" + exit 1 + fi + ;; + + osx|maccatalyst|ios|iossimulator|tvos|tvossimulator) + echo "Installed xcode version: $(xcode-select -p)" + + export HOMEBREW_NO_INSTALL_CLEANUP=1 + export HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1 + # Skip brew update for now, see https://github.com/actions/setup-python/issues/577 + # brew update --preinstall + + # Temporarily uninstall pkg-config@0.29.2 to work around https://github.com/actions/runner-images/issues/10984 + brew uninstall --ignore-dependencies --force pkg-config@0.29.2 + + brew bundle --no-upgrade --no-lock --file=- < .\verify.ps1 *.nupkg + Verifies the metadata of all .nupkg files in the currect working directory. +.EXAMPLE + PS> .\verify.ps1 --help + Displays the help text of the downloaded verifiction tool. +.LINK + https://github.com/NuGet/NuGetGallery/blob/master/src/VerifyMicrosoftPackage/README.md +#> + +# This script was copied from https://github.com/NuGet/NuGetGallery/blob/3e25ad135146676bcab0050a516939d9958bfa5d/src/VerifyMicrosoftPackage/verify.ps1 + +[CmdletBinding(PositionalBinding = $false)] +param( + [string]$NuGetExePath, + [string]$PackageSource = "https://api.nuget.org/v3/index.json", + [string]$DownloadPath, + [Parameter(ValueFromRemainingArguments = $true)] + [string[]]$args +) + +# The URL to download nuget.exe. +$nugetExeUrl = "https://dist.nuget.org/win-x86-commandline/v4.9.4/nuget.exe" + +# The package ID of the verification tool. +$packageId = "NuGet.VerifyMicrosoftPackage" + +# The location that nuget.exe and the verification tool will be downloaded to. +if (!$DownloadPath) { + $DownloadPath = (Join-Path $env:TEMP "NuGet.VerifyMicrosoftPackage") +} + +$fence = New-Object -TypeName string -ArgumentList '=', 80 + +# Create the download directory, if it doesn't already exist. +if (!(Test-Path $DownloadPath)) { + New-Item -ItemType Directory $DownloadPath | Out-Null +} +Write-Host "Using download path: $DownloadPath" + +if ($NuGetExePath) { + $nuget = $NuGetExePath +} else { + $downloadedNuGetExe = Join-Path $DownloadPath "nuget.exe" + + # Download nuget.exe, if it doesn't already exist. + if (!(Test-Path $downloadedNuGetExe)) { + Write-Host "Downloading nuget.exe from $nugetExeUrl..." + $ProgressPreference = 'SilentlyContinue' + try { + Invoke-WebRequest $nugetExeUrl -OutFile $downloadedNuGetExe + $ProgressPreference = 'Continue' + } catch { + $ProgressPreference = 'Continue' + Write-Error $_ + Write-Error "nuget.exe failed to download." + exit + } + } + + $nuget = $downloadedNuGetExe +} + +Write-Host "Using nuget.exe path: $nuget" +Write-Host " " + +# Download the latest version of the verification tool. +Write-Host "Downloading the latest version of $packageId from $packageSource..." +Write-Host $fence +& $nuget install $packageId ` + -Prerelease ` + -OutputDirectory $DownloadPath ` + -Source $PackageSource +Write-Host $fence +Write-Host " " + +if ($LASTEXITCODE -ne 0) { + Write-Error "nuget.exe failed to fetch the verify tool." + exit +} + +# Find the most recently downloaded tool +Write-Host "Finding the most recently downloaded verification tool." +$verifyProbePath = Join-Path $DownloadPath "$packageId.*" +$verifyPath = Get-ChildItem -Path $verifyProbePath -Directory ` + | Sort-Object -Property LastWriteTime -Descending ` + | Select-Object -First 1 +$verify = Join-Path $verifyPath "tools\NuGet.VerifyMicrosoftPackage.exe" +Write-Host "Using verification tool: $verify" +Write-Host " " + +# Execute the verification tool. +Write-Host "Executing the verify tool..." +Write-Host $fence +& $verify $args +Write-Host $fence +Write-Host " " + +# Respond to the exit code. +if ($LASTEXITCODE -ne 0) { + Write-Error "The verify tool found some problems." +} else { + Write-Output "The verify tool succeeded." +} diff --git a/eng/common/post-build/post-build-utils.ps1 b/eng/common/post-build/post-build-utils.ps1 deleted file mode 100644 index 534f6988d..000000000 --- a/eng/common/post-build/post-build-utils.ps1 +++ /dev/null @@ -1,91 +0,0 @@ -# Most of the functions in this file require the variables `MaestroApiEndPoint`, -# `MaestroApiVersion` and `MaestroApiAccessToken` to be globally available. - -$ErrorActionPreference = 'Stop' -Set-StrictMode -Version 2.0 - -# `tools.ps1` checks $ci to perform some actions. Since the post-build -# scripts don't necessarily execute in the same agent that run the -# build.ps1/sh script this variable isn't automatically set. -$ci = $true -$disableConfigureToolsetImport = $true -. $PSScriptRoot\..\tools.ps1 - -function Create-MaestroApiRequestHeaders([string]$ContentType = 'application/json') { - Validate-MaestroVars - - $headers = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]' - $headers.Add('Accept', $ContentType) - $headers.Add('Authorization',"Bearer $MaestroApiAccessToken") - return $headers -} - -function Get-MaestroChannel([int]$ChannelId) { - Validate-MaestroVars - - $apiHeaders = Create-MaestroApiRequestHeaders - $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}?api-version=$MaestroApiVersion" - - $result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" } - return $result -} - -function Get-MaestroBuild([int]$BuildId) { - Validate-MaestroVars - - $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken - $apiEndpoint = "$MaestroApiEndPoint/api/builds/${BuildId}?api-version=$MaestroApiVersion" - - $result = try { return Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" } - return $result -} - -function Get-MaestroSubscriptions([string]$SourceRepository, [int]$ChannelId) { - Validate-MaestroVars - - $SourceRepository = [System.Web.HttpUtility]::UrlEncode($SourceRepository) - $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken - $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions?sourceRepository=$SourceRepository&channelId=$ChannelId&api-version=$MaestroApiVersion" - - $result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" } - return $result -} - -function Assign-BuildToChannel([int]$BuildId, [int]$ChannelId) { - Validate-MaestroVars - - $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken - $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}/builds/${BuildId}?api-version=$MaestroApiVersion" - Invoke-WebRequest -Method Post -Uri $apiEndpoint -Headers $apiHeaders | Out-Null -} - -function Trigger-Subscription([string]$SubscriptionId) { - Validate-MaestroVars - - $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken - $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions/$SubscriptionId/trigger?api-version=$MaestroApiVersion" - Invoke-WebRequest -Uri $apiEndpoint -Headers $apiHeaders -Method Post | Out-Null -} - -function Validate-MaestroVars { - try { - Get-Variable MaestroApiEndPoint | Out-Null - Get-Variable MaestroApiVersion | Out-Null - Get-Variable MaestroApiAccessToken | Out-Null - - if (!($MaestroApiEndPoint -Match '^http[s]?://maestro-(int|prod).westus2.cloudapp.azure.com$')) { - Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiEndPoint is not a valid Maestro URL. '$MaestroApiEndPoint'" - ExitWithExitCode 1 - } - - if (!($MaestroApiVersion -Match '^[0-9]{4}-[0-9]{2}-[0-9]{2}$')) { - Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiVersion does not match a version string in the format yyyy-MM-DD. '$MaestroApiVersion'" - ExitWithExitCode 1 - } - } - catch { - Write-PipelineTelemetryError -Category 'MaestroVars' -Message 'Error: Variables `MaestroApiEndPoint`, `MaestroApiVersion` and `MaestroApiAccessToken` are required while using this script.' - Write-Host $_ - ExitWithExitCode 1 - } -} diff --git a/eng/common/post-build/publish-using-darc.ps1 b/eng/common/post-build/publish-using-darc.ps1 index 5a3a32ea8..90b58e32a 100644 --- a/eng/common/post-build/publish-using-darc.ps1 +++ b/eng/common/post-build/publish-using-darc.ps1 @@ -2,7 +2,6 @@ param( [Parameter(Mandatory=$true)][int] $BuildId, [Parameter(Mandatory=$true)][int] $PublishingInfraVersion, [Parameter(Mandatory=$true)][string] $AzdoToken, - [Parameter(Mandatory=$true)][string] $MaestroToken, [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net', [Parameter(Mandatory=$true)][string] $WaitPublishingFinish, [Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters, @@ -10,7 +9,12 @@ param( ) try { - . $PSScriptRoot\post-build-utils.ps1 + # `tools.ps1` checks $ci to perform some actions. Since the post-build + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + $disableConfigureToolsetImport = $true + . $PSScriptRoot\..\tools.ps1 $darc = Get-Darc @@ -31,13 +35,14 @@ try { } & $darc add-build-to-channel ` - --id $buildId ` - --publishing-infra-version $PublishingInfraVersion ` - --default-channels ` - --source-branch main ` - --azdev-pat $AzdoToken ` - --bar-uri $MaestroApiEndPoint ` - --password $MaestroToken ` + --id $buildId ` + --publishing-infra-version $PublishingInfraVersion ` + --default-channels ` + --source-branch main ` + --azdev-pat "$AzdoToken" ` + --bar-uri "$MaestroApiEndPoint" ` + --ci ` + --verbose ` @optionalParams if ($LastExitCode -ne 0) { diff --git a/eng/common/post-build/redact-logs.ps1 b/eng/common/post-build/redact-logs.ps1 index 82d91f6fd..b7fc19591 100644 --- a/eng/common/post-build/redact-logs.ps1 +++ b/eng/common/post-build/redact-logs.ps1 @@ -11,7 +11,15 @@ param( ) try { - . $PSScriptRoot\post-build-utils.ps1 + $ErrorActionPreference = 'Stop' + Set-StrictMode -Version 2.0 + + # `tools.ps1` checks $ci to perform some actions. Since the post-build + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + $disableConfigureToolsetImport = $true + . $PSScriptRoot\..\tools.ps1 $packageName = 'binlogtool' diff --git a/eng/common/post-build/sourcelink-validation.ps1 b/eng/common/post-build/sourcelink-validation.ps1 index 4011d324e..1976ef70f 100644 --- a/eng/common/post-build/sourcelink-validation.ps1 +++ b/eng/common/post-build/sourcelink-validation.ps1 @@ -6,7 +6,15 @@ param( [Parameter(Mandatory=$true)][string] $SourcelinkCliVersion # Version of SourceLink CLI to use ) -. $PSScriptRoot\post-build-utils.ps1 +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 + +# `tools.ps1` checks $ci to perform some actions. Since the post-build +# scripts don't necessarily execute in the same agent that run the +# build.ps1/sh script this variable isn't automatically set. +$ci = $true +$disableConfigureToolsetImport = $true +. $PSScriptRoot\..\tools.ps1 # Cache/HashMap (File -> Exist flag) used to consult whether a file exist # in the repository at a specific commit point. This is populated by inserting diff --git a/eng/common/post-build/symbols-validation.ps1 b/eng/common/post-build/symbols-validation.ps1 index cd2181baf..7146e593f 100644 --- a/eng/common/post-build/symbols-validation.ps1 +++ b/eng/common/post-build/symbols-validation.ps1 @@ -322,8 +322,6 @@ function InstallDotnetSymbol { } try { - . $PSScriptRoot\post-build-utils.ps1 - InstallDotnetSymbol foreach ($Job in @(Get-Job)) { diff --git a/eng/common/post-build/trigger-subscriptions.ps1 b/eng/common/post-build/trigger-subscriptions.ps1 deleted file mode 100644 index ac9a95778..000000000 --- a/eng/common/post-build/trigger-subscriptions.ps1 +++ /dev/null @@ -1,64 +0,0 @@ -param( - [Parameter(Mandatory=$true)][string] $SourceRepo, - [Parameter(Mandatory=$true)][int] $ChannelId, - [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken, - [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net', - [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16' -) - -try { - . $PSScriptRoot\post-build-utils.ps1 - - # Get all the $SourceRepo subscriptions - $normalizedSourceRepo = $SourceRepo.Replace('dnceng@', '') - $subscriptions = Get-MaestroSubscriptions -SourceRepository $normalizedSourceRepo -ChannelId $ChannelId - - if (!$subscriptions) { - Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message "No subscriptions found for source repo '$normalizedSourceRepo' in channel '$ChannelId'" - ExitWithExitCode 0 - } - - $subscriptionsToTrigger = New-Object System.Collections.Generic.List[string] - $failedTriggeredSubscription = $false - - # Get all enabled subscriptions that need dependency flow on 'everyBuild' - foreach ($subscription in $subscriptions) { - if ($subscription.enabled -and $subscription.policy.updateFrequency -like 'everyBuild' -and $subscription.channel.id -eq $ChannelId) { - Write-Host "Should trigger this subscription: ${$subscription.id}" - [void]$subscriptionsToTrigger.Add($subscription.id) - } - } - - foreach ($subscriptionToTrigger in $subscriptionsToTrigger) { - try { - Write-Host "Triggering subscription '$subscriptionToTrigger'." - - Trigger-Subscription -SubscriptionId $subscriptionToTrigger - - Write-Host 'done.' - } - catch - { - Write-Host "There was an error while triggering subscription '$subscriptionToTrigger'" - Write-Host $_ - Write-Host $_.ScriptStackTrace - $failedTriggeredSubscription = $true - } - } - - if ($subscriptionsToTrigger.Count -eq 0) { - Write-Host "No subscription matched source repo '$normalizedSourceRepo' and channel ID '$ChannelId'." - } - elseif ($failedTriggeredSubscription) { - Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message 'At least one subscription failed to be triggered...' - ExitWithExitCode 1 - } - else { - Write-Host 'All subscriptions were triggered successfully!' - } -} -catch { - Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message $_ - ExitWithExitCode 1 -} diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1 index 091023970..4f0546dce 100644 --- a/eng/common/sdk-task.ps1 +++ b/eng/common/sdk-task.ps1 @@ -64,7 +64,7 @@ try { $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty } if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) { - $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.8.5" -MemberType NoteProperty + $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.12.0" -MemberType NoteProperty } if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") { $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true diff --git a/eng/common/template-guidance.md b/eng/common/template-guidance.md new file mode 100644 index 000000000..98bbc1ded --- /dev/null +++ b/eng/common/template-guidance.md @@ -0,0 +1,133 @@ +# Overview + +Arcade provides templates for public (`/templates`) and 1ES pipeline templates (`/templates-official`) scenarios. Pipelines which are required to be managed by 1ES pipeline templates should reference `/templates-offical`, all other pipelines may reference `/templates`. + +## How to use + +Basic guidance is: + +- 1ES Pipeline Template or 1ES Microbuild template runs should reference `eng/common/templates-official`. Any internal production-graded pipeline should use these templates. + +- All other runs should reference `eng/common/templates`. + +See [azure-pipelines.yml](../../azure-pipelines.yml) (templates-official example) or [azure-pipelines-pr.yml](../../azure-pipelines-pr.yml) (templates example) for examples. + +#### The `templateIs1ESManaged` parameter + +The `templateIs1ESManaged` is available on most templates and affects which of the variants is used for nested templates. See [Development Notes](#development-notes) below for more information on the `templateIs1ESManaged1 parameter. + +- For templates under `job/`, `jobs/`, `steps`, or `post-build/`, this parameter must be explicitly set. + +## Multiple outputs + +1ES pipeline templates impose a policy where every publish artifact execution results in additional security scans being injected into your pipeline. When using `templates-official/jobs/jobs.yml`, Arcade reduces the number of additional security injections by gathering all publishing outputs into the [Build.ArtifactStagingDirectory](https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#build-variables-devops-services), and utilizing the [outputParentDirectory](https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/outputs#multiple-outputs) feature of 1ES pipeline templates. When implementing your pipeline, if you ensure publish artifacts are located in the `$(Build.ArtifactStagingDirectory)`, and utilize the 1ES provided template context, then you can reduce the number of security scans for your pipeline. + +Example: +``` yaml +# azure-pipelines.yml +extends: + template: azure-pipelines/MicroBuild.1ES.Official.yml@MicroBuildTemplate + parameters: + stages: + - stage: build + jobs: + - template: /eng/common/templates-official/jobs/jobs.yml@self + parameters: + # 1ES makes use of outputs to reduce security task injection overhead + templateContext: + outputs: + - output: pipelineArtifact + displayName: 'Publish logs from source' + continueOnError: true + condition: always() + targetPath: $(Build.ArtifactStagingDirectory)/artifacts/log + artifactName: Logs + jobs: + - job: Windows + steps: + - script: echo "friendly neighborhood" > artifacts/marvel/spiderman.txt + # copy build outputs to artifact staging directory for publishing + - task: CopyFiles@2 + displayName: Gather build output + inputs: + SourceFolder: '$(Build.SourcesDirectory)/artifacts/marvel' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/marvel' +``` + +Note: Multiple outputs are ONLY applicable to 1ES PT publishing (only usable when referencing `templates-official`). + +## Development notes + +**Folder / file structure** + +``` text +eng\common\ + [templates || templates-official]\ + job\ + job.yml (shim + artifact publishing logic) + onelocbuild.yml (shim) + publish-build-assets.yml (shim) + source-build.yml (shim) + source-index-stage1.yml (shim) + jobs\ + codeql-build.yml (shim) + jobs.yml (shim) + source-build.yml (shim) + post-build\ + post-build.yml (shim) + common-variabls.yml (shim) + setup-maestro-vars.yml (shim) + steps\ + publish-build-artifacts.yml (logic) + publish-pipeline-artifacts.yml (logic) + component-governance.yml (shim) + generate-sbom.yml (shim) + publish-logs.yml (shim) + retain-build.yml (shim) + send-to-helix.yml (shim) + source-build.yml (shim) + variables\ + pool-providers.yml (logic + redirect) # templates/variables/pool-providers.yml will redirect to templates-official/variables/pool-providers.yml if you are running in the internal project + sdl-variables.yml (logic) + core-templates\ + job\ + job.yml (logic) + onelocbuild.yml (logic) + publish-build-assets.yml (logic) + source-build.yml (logic) + source-index-stage1.yml (logic) + jobs\ + codeql-build.yml (logic) + jobs.yml (logic) + source-build.yml (logic) + post-build\ + common-variabls.yml (logic) + post-build.yml (logic) + setup-maestro-vars.yml (logic) + steps\ + component-governance.yml (logic) + generate-sbom.yml (logic) + publish-build-artifacts.yml (redirect) + publish-logs.yml (logic) + publish-pipeline-artifacts.yml (redirect) + retain-build.yml (logic) + send-to-helix.yml (logic) + source-build.yml (logic) + variables\ + pool-providers.yml (redirect) +``` + +In the table above, a file is designated as "shim", "logic", or "redirect". + +- shim - represents a yaml file which is an intermediate step between pipeline logic and .Net Core Engineering's templates (`core-templates`) and defines the `is1ESPipeline` parameter value. + +- logic - represents actual base template logic. + +- redirect- represents a file in `core-templates` which redirects to the "logic" file in either `templates` or `templates-official`. + +Logic for Arcade's templates live **primarily** in the `core-templates` folder. The exceptions to the location of the logic files are around artifact publishing, which is handled differently between 1es pipeline templates and standard templates. `templates` and `templates-official` provide shim entry points which redirect to `core-templates` while also defining the `is1ESPipeline` parameter. If a shim is referenced in `templates`, then `is1ESPipeline` is set to `false`. If a shim is referenced in `templates-official`, then `is1ESPipeline` is set to `true`. + +Within `templates` and `templates-official`, the templates at the "stages", and "jobs" / "job" level have been replaced with shims. Templates at the "steps" and "variables" level are typically too granular to be replaced with shims and instead persist logic which is directly applicable to either scenario. + +Within `core-templates`, there are a handful of places where logic is dependent on which shim entry point was used. In those places, we redirect back to the respective logic file in `templates` or `templates-official`. diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml index dfc3c0cf0..605692d2f 100644 --- a/eng/common/templates-official/job/job.yml +++ b/eng/common/templates-official/job/job.yml @@ -1,263 +1,80 @@ -# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, -# and some (Microbuild) should only be applied to non-PR cases for internal builds. - parameters: -# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job - cancelTimeoutInMinutes: '' - condition: '' - container: '' - continueOnError: false - dependsOn: '' - displayName: '' - pool: '' - steps: [] - strategy: '' - timeoutInMinutes: '' - variables: [] - workspace: '' - templateContext: '' - -# Job base template specific parameters - # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md - artifacts: '' - enableMicrobuild: false - enablePublishBuildArtifacts: false - enablePublishBuildAssets: false - enablePublishTestResults: false - enablePublishUsingPipelines: false - enableBuildRetry: false - disableComponentGovernance: '' - componentGovernanceIgnoreDirectories: '' - mergeTestResults: false - testRunTitle: '' - testResultsFormat: '' - name: '' - preSteps: [] - runAsPublic: false # Sbom related params enableSbom: true - PackageVersion: 7.0.0 + runAsPublic: false + PackageVersion: 9.0.0 BuildDropPath: '$(Build.SourcesDirectory)/artifacts' jobs: -- job: ${{ parameters.name }} - - ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}: - cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }} - - ${{ if ne(parameters.condition, '') }}: - condition: ${{ parameters.condition }} - - ${{ if ne(parameters.container, '') }}: - container: ${{ parameters.container }} - - ${{ if ne(parameters.continueOnError, '') }}: - continueOnError: ${{ parameters.continueOnError }} - - ${{ if ne(parameters.dependsOn, '') }}: - dependsOn: ${{ parameters.dependsOn }} - - ${{ if ne(parameters.displayName, '') }}: - displayName: ${{ parameters.displayName }} - - ${{ if ne(parameters.pool, '') }}: - pool: ${{ parameters.pool }} - - ${{ if ne(parameters.strategy, '') }}: - strategy: ${{ parameters.strategy }} - - ${{ if ne(parameters.timeoutInMinutes, '') }}: - timeoutInMinutes: ${{ parameters.timeoutInMinutes }} - - ${{ if ne(parameters.templateContext, '') }}: - templateContext: ${{ parameters.templateContext }} - - variables: - - ${{ if ne(parameters.enableTelemetry, 'false') }}: - - name: DOTNET_CLI_TELEMETRY_PROFILE - value: '$(Build.Repository.Uri)' - - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}: - - name: EnableRichCodeNavigation - value: 'true' - # Retry signature validation up to three times, waiting 2 seconds between attempts. - # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures - - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY - value: 3,2000 - - ${{ each variable in parameters.variables }}: - # handle name-value variable syntax - # example: - # - name: [key] - # value: [value] - - ${{ if ne(variable.name, '') }}: - - name: ${{ variable.name }} - value: ${{ variable.value }} - - # handle variable groups - - ${{ if ne(variable.group, '') }}: - - group: ${{ variable.group }} - - # handle template variable syntax - # example: - # - template: path/to/template.yml - # parameters: - # [key]: [value] - - ${{ if ne(variable.template, '') }}: - - template: ${{ variable.template }} - ${{ if ne(variable.parameters, '') }}: - parameters: ${{ variable.parameters }} - - # handle key-value variable syntax. - # example: - # - [key]: [value] - - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}: - - ${{ each pair in variable }}: - - name: ${{ pair.key }} - value: ${{ pair.value }} - - # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds - - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - group: DotNet-HelixApi-Access - - ${{ if ne(parameters.workspace, '') }}: - workspace: ${{ parameters.workspace }} - - steps: - - ${{ if ne(parameters.preSteps, '') }}: - - ${{ each preStep in parameters.preSteps }}: - - ${{ preStep }} - - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - - task: MicroBuildSigningPlugin@3 - displayName: Install MicroBuild plugin - inputs: - signType: $(_SignType) - zipSources: false - feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json - env: - TeamName: $(_TeamName) - continueOnError: ${{ parameters.continueOnError }} - condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) - - - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}: - - task: NuGetAuthenticate@1 - - - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}: - - task: DownloadPipelineArtifact@2 - inputs: - buildType: current - artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }} - targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }} - itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }} - - - ${{ each step in parameters.steps }}: - - ${{ step }} - - - ${{ if eq(parameters.enableRichCodeNavigation, true) }}: - - task: RichCodeNavIndexer@0 - displayName: RichCodeNav Upload - inputs: - languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }} - environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }} - richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin - uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }} - continueOnError: true - - - template: /eng/common/templates-official/steps/component-governance.yml - parameters: - ${{ if eq(parameters.disableComponentGovernance, '') }}: - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}: - disableComponentGovernance: false - ${{ else }}: - disableComponentGovernance: true - ${{ else }}: - disableComponentGovernance: ${{ parameters.disableComponentGovernance }} - componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} - - - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: MicroBuildCleanup@1 - displayName: Execute Microbuild cleanup tasks - condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) - continueOnError: ${{ parameters.continueOnError }} - env: - TeamName: $(_TeamName) - - - ${{ if ne(parameters.artifacts.publish, '') }}: - - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: - - task: CopyFiles@2 - displayName: Gather binaries for publish to artifacts - inputs: - SourceFolder: 'artifacts/bin' - Contents: '**' - TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin' - - task: CopyFiles@2 - displayName: Gather packages for publish to artifacts - inputs: - SourceFolder: 'artifacts/packages' - Contents: '**' - TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages' - - task: 1ES.PublishBuildArtifacts@1 - displayName: Publish pipeline artifacts - inputs: - PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts' - PublishLocation: Container - ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} - continueOnError: true - condition: always() - - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: 'artifacts/log' - artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }} - displayName: 'Publish logs' - continueOnError: true - condition: always() - - - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}: - - task: 1ES.PublishBuildArtifacts@1 - displayName: Publish Logs - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)' - PublishLocation: Container - ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }} - continueOnError: true - condition: always() - - - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}: - - task: PublishTestResults@2 - displayName: Publish XUnit Test Results - inputs: - testResultsFormat: 'xUnit' - testResultsFiles: '*.xml' - searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' - testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit - mergeTestResults: ${{ parameters.mergeTestResults }} - continueOnError: true - condition: always() - - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}: - - task: PublishTestResults@2 - displayName: Publish TRX Test Results - inputs: - testResultsFormat: 'VSTest' - testResultsFiles: '*.trx' - searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' - testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx - mergeTestResults: ${{ parameters.mergeTestResults }} - continueOnError: true - condition: always() - - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}: - - template: /eng/common/templates-official/steps/generate-sbom.yml - parameters: - PackageVersion: ${{ parameters.packageVersion}} - BuildDropPath: ${{ parameters.buildDropPath }} - IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} - - - ${{ if eq(parameters.enableBuildRetry, 'true') }}: - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration' - artifactName: 'BuildConfiguration' - displayName: 'Publish build retry configuration' - continueOnError: true \ No newline at end of file +- template: /eng/common/core-templates/job/job.yml + parameters: + is1ESPipeline: true + + componentGovernanceSteps: + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}: + - template: /eng/common/templates/steps/generate-sbom.yml + parameters: + PackageVersion: ${{ parameters.packageVersion }} + BuildDropPath: ${{ parameters.buildDropPath }} + publishArtifacts: false + + # publish artifacts + # for 1ES managed templates, use the templateContext.output to handle multiple outputs. + templateContext: + outputParentDirectory: $(Build.ArtifactStagingDirectory) + outputs: + - ${{ if ne(parameters.artifacts.publish, '') }}: + - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: + - output: buildArtifacts + displayName: Publish pipeline artifacts + PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts' + ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} + condition: always() + continueOnError: true + - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: + - output: pipelineArtifact + targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log' + artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)_Attempt$(System.JobAttempt)') }} + displayName: 'Publish logs' + continueOnError: true + condition: always() + sbomEnabled: false # we don't need SBOM for logs + + - ${{ if eq(parameters.enablePublishBuildArtifacts, true) }}: + - output: buildArtifacts + displayName: Publish Logs + PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)' + publishLocation: Container + ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }} + continueOnError: true + condition: always() + sbomEnabled: false # we don't need SBOM for logs + + - ${{ if eq(parameters.enableBuildRetry, 'true') }}: + - output: pipelineArtifact + targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/eng/common/BuildConfiguration' + artifactName: 'BuildConfiguration' + displayName: 'Publish build retry configuration' + continueOnError: true + sbomEnabled: false # we don't need SBOM for BuildConfiguration + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}: + - output: pipelineArtifact + displayName: Publish SBOM manifest + continueOnError: true + targetPath: $(Build.ArtifactStagingDirectory)/sbom + artifactName: $(ARTIFACT_NAME) + + # add any outputs provided via root yaml + - ${{ if ne(parameters.templateContext.outputs, '') }}: + - ${{ each output in parameters.templateContext.outputs }}: + - ${{ output }} + + # add any remaining templateContext properties + ${{ each context in parameters.templateContext }}: + ${{ if and(ne(context.key, 'outputParentDirectory'), ne(context.key, 'outputs')) }}: + ${{ context.key }}: ${{ context.value }} + + ${{ each parameter in parameters }}: + ${{ if and(ne(parameter.key, 'templateContext'), ne(parameter.key, 'is1ESPipeline')) }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/job/onelocbuild.yml b/eng/common/templates-official/job/onelocbuild.yml index ba9ba4930..0f0c514b9 100644 --- a/eng/common/templates-official/job/onelocbuild.yml +++ b/eng/common/templates-official/job/onelocbuild.yml @@ -1,112 +1,7 @@ -parameters: - # Optional: dependencies of the job - dependsOn: '' - - # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool - pool: '' - - CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex - GithubPat: $(BotAccount-dotnet-bot-repo-PAT) - - SourcesDirectory: $(Build.SourcesDirectory) - CreatePr: true - AutoCompletePr: false - ReusePr: true - UseLfLineEndings: true - UseCheckedInLocProjectJson: false - SkipLocProjectJsonGeneration: false - LanguageSet: VS_Main_Languages - LclSource: lclFilesInRepo - LclPackageId: '' - RepoType: gitHub - GitHubOrg: dotnet - MirrorRepo: '' - MirrorBranch: main - condition: '' - JobNameSuffix: '' - jobs: -- job: OneLocBuild${{ parameters.JobNameSuffix }} - - dependsOn: ${{ parameters.dependsOn }} - - displayName: OneLocBuild${{ parameters.JobNameSuffix }} - - variables: - - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat - - name: _GenerateLocProjectArguments - value: -SourcesDirectory ${{ parameters.SourcesDirectory }} - -LanguageSet "${{ parameters.LanguageSet }}" - -CreateNeutralXlfs - - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}: - - name: _GenerateLocProjectArguments - value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson - - template: /eng/common/templates-official/variables/pool-providers.yml - - ${{ if ne(parameters.pool, '') }}: - pool: ${{ parameters.pool }} - ${{ if eq(parameters.pool, '') }}: - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: AzurePipelines-EO - image: 1ESPT-Windows2022 - demands: Cmd - os: windows - # If it's not devdiv, it's dnceng - ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: - name: $(DncEngInternalBuildPool) - image: 1es-windows-2022-pt - os: windows - - steps: - - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}: - - task: Powershell@2 - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1 - arguments: $(_GenerateLocProjectArguments) - displayName: Generate LocProject.json - condition: ${{ parameters.condition }} - - - task: OneLocBuild@2 - displayName: OneLocBuild - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - inputs: - locProj: eng/Localize/LocProject.json - outDir: $(Build.ArtifactStagingDirectory) - lclSource: ${{ parameters.LclSource }} - lclPackageId: ${{ parameters.LclPackageId }} - isCreatePrSelected: ${{ parameters.CreatePr }} - isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }} - ${{ if eq(parameters.CreatePr, true) }}: - isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }} - ${{ if eq(parameters.RepoType, 'gitHub') }}: - isShouldReusePrSelected: ${{ parameters.ReusePr }} - packageSourceAuth: patAuth - patVariable: ${{ parameters.CeapexPat }} - ${{ if eq(parameters.RepoType, 'gitHub') }}: - repoType: ${{ parameters.RepoType }} - gitHubPatVariable: "${{ parameters.GithubPat }}" - ${{ if ne(parameters.MirrorRepo, '') }}: - isMirrorRepoSelected: true - gitHubOrganization: ${{ parameters.GitHubOrg }} - mirrorRepo: ${{ parameters.MirrorRepo }} - mirrorBranch: ${{ parameters.MirrorBranch }} - condition: ${{ parameters.condition }} - - - task: 1ES.PublishBuildArtifacts@1 - displayName: Publish Localization Files - inputs: - PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc' - PublishLocation: Container - ArtifactName: Loc - condition: ${{ parameters.condition }} +- template: /eng/common/core-templates/job/onelocbuild.yml + parameters: + is1ESPipeline: true - - task: 1ES.PublishBuildArtifacts@1 - displayName: Publish LocProject.json - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/' - PublishLocation: Container - ArtifactName: Loc - condition: ${{ parameters.condition }} \ No newline at end of file + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/job/publish-build-assets.yml b/eng/common/templates-official/job/publish-build-assets.yml index d72e4ea6d..d667a70e8 100644 --- a/eng/common/templates-official/job/publish-build-assets.yml +++ b/eng/common/templates-official/job/publish-build-assets.yml @@ -1,159 +1,7 @@ -parameters: - configuration: 'Debug' - - # Optional: condition for the job to run - condition: '' - - # Optional: 'true' if future jobs should run even if this job fails - continueOnError: false - - # Optional: dependencies of the job - dependsOn: '' - - # Optional: Include PublishBuildArtifacts task - enablePublishBuildArtifacts: false - - # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool - pool: {} - - # Optional: should run as a public build even in the internal project - # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. - runAsPublic: false - - # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing - publishUsingPipelines: false - - # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing - publishAssetsImmediately: false - - artifactsPublishingAdditionalParameters: '' - - signingValidationAdditionalParameters: '' - jobs: -- job: Asset_Registry_Publish - - dependsOn: ${{ parameters.dependsOn }} - timeoutInMinutes: 150 - - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: - displayName: Publish Assets - ${{ else }}: - displayName: Publish to Build Asset Registry - - variables: - - template: /eng/common/templates-official/variables/pool-providers.yml - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - group: Publish-Build-Assets - - group: AzureDevOps-Artifact-Feeds-Pats - - name: runCodesignValidationInjection - value: false - # unconditional - needed for logs publishing (redactor tool version) - - template: /eng/common/templates-official/post-build/common-variables.yml - - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: AzurePipelines-EO - image: 1ESPT-Windows2022 - demands: Cmd - os: windows - # If it's not devdiv, it's dnceng - ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: - name: $(DncEngInternalBuildPool) - image: 1es-windows-2022-pt - os: windows - steps: - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - checkout: self - fetchDepth: 3 - clean: true - - - task: DownloadBuildArtifacts@0 - displayName: Download artifact - inputs: - artifactName: AssetManifests - downloadPath: '$(Build.StagingDirectory)/Download' - checkDownloadedFiles: true - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - - - task: NuGetAuthenticate@1 - - - task: PowerShell@2 - displayName: Publish Build Assets - inputs: - filePath: eng\common\sdk-task.ps1 - arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet - /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests' - /p:BuildAssetRegistryToken=$(MaestroAccessToken) - /p:MaestroApiEndpoint=https://maestro.dot.net - /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }} - /p:OfficialBuildId=$(Build.BuildNumber) - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - - - task: powershell@2 - displayName: Create ReleaseConfigs Artifact - inputs: - targetType: inline - script: | - New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force - $filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt" - Add-Content -Path $filePath -Value $(BARBuildId) - Add-Content -Path $filePath -Value "$(DefaultChannels)" - Add-Content -Path $filePath -Value $(IsStableBuild) - - - task: 1ES.PublishBuildArtifacts@1 - displayName: Publish ReleaseConfigs Artifact - inputs: - PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs' - PublishLocation: Container - ArtifactName: ReleaseConfigs - - - task: powershell@2 - displayName: Check if SymbolPublishingExclusionsFile.txt exists - inputs: - targetType: inline - script: | - $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt" - if(Test-Path -Path $symbolExclusionfile) - { - Write-Host "SymbolExclusionFile exists" - Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true" - } - else{ - Write-Host "Symbols Exclusion file does not exists" - Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false" - } - - - task: 1ES.PublishBuildArtifacts@1 - displayName: Publish SymbolPublishingExclusionsFile Artifact - condition: eq(variables['SymbolExclusionFile'], 'true') - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt' - PublishLocation: Container - ArtifactName: ReleaseConfigs - - - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: - - template: /eng/common/templates-official/post-build/setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: PowerShell@2 - displayName: Publish Using Darc - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 - arguments: -BuildId $(BARBuildId) - -PublishingInfraVersion 3 - -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' - -MaestroToken '$(MaestroApiAccessToken)' - -WaitPublishingFinish true - -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' - -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' +- template: /eng/common/core-templates/job/publish-build-assets.yml + parameters: + is1ESPipeline: true - - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: - - template: /eng/common/templates-official/steps/publish-logs.yml - parameters: - JobLabel: 'Publish_Artifacts_Logs' + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/job/source-build.yml b/eng/common/templates-official/job/source-build.yml index 50f04e642..1a480034b 100644 --- a/eng/common/templates-official/job/source-build.yml +++ b/eng/common/templates-official/job/source-build.yml @@ -1,67 +1,7 @@ -parameters: - # This template adds arcade-powered source-build to CI. The template produces a server job with a - # default ID 'Source_Build_Complete' to put in a dependency list if necessary. - - # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed. - jobNamePrefix: 'Source_Build' - - # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for - # managed-only repositories. This is an object with these properties: - # - # name: '' - # The name of the job. This is included in the job ID. - # targetRID: '' - # The name of the target RID to use, instead of the one auto-detected by Arcade. - # nonPortable: false - # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than - # linux-x64), and compiling against distro-provided packages rather than portable ones. - # skipPublishValidation: false - # Disables publishing validation. By default, a check is performed to ensure no packages are - # published by source-build. - # container: '' - # A container to use. Runs in docker. - # pool: {} - # A pool to use. Runs directly on an agent. - # buildScript: '' - # Specifies the build script to invoke to perform the build in the repo. The default - # './build.sh' should work for typical Arcade repositories, but this is customizable for - # difficult situations. - # jobProperties: {} - # A list of job properties to inject at the top level, for potential extensibility beyond - # container and pool. - platform: {} - jobs: -- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }} - displayName: Source-Build (${{ parameters.platform.name }}) - - ${{ each property in parameters.platform.jobProperties }}: - ${{ property.key }}: ${{ property.value }} - - ${{ if ne(parameters.platform.container, '') }}: - container: ${{ parameters.platform.container }} - - ${{ if eq(parameters.platform.pool, '') }}: - # The default VM host AzDO pool. This should be capable of running Docker containers: almost all - # source-build builds run in Docker, including the default managed platform. - # /eng/common/templates-official/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic - pool: - ${{ if eq(variables['System.TeamProject'], 'public') }}: - name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')] - demands: ImageOverride -equals build.ubuntu.1804.amd64 - - ${{ if eq(variables['System.TeamProject'], 'internal') }}: - name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')] - image: 1es-mariner-2-pt - os: linux - - ${{ if ne(parameters.platform.pool, '') }}: - pool: ${{ parameters.platform.pool }} - - workspace: - clean: all +- template: /eng/common/core-templates/job/source-build.yml + parameters: + is1ESPipeline: true - steps: - - template: /eng/common/templates-official/steps/source-build.yml - parameters: - platform: ${{ parameters.platform }} + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/job/source-index-stage1.yml b/eng/common/templates-official/job/source-index-stage1.yml index 53a9ef51f..6d5ead316 100644 --- a/eng/common/templates-official/job/source-index-stage1.yml +++ b/eng/common/templates-official/job/source-index-stage1.yml @@ -1,67 +1,7 @@ -parameters: - runAsPublic: false - sourceIndexPackageVersion: 1.0.1-20240129.2 - sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json - sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci" - preSteps: [] - binlogPath: artifacts/log/Debug/Build.binlog - condition: '' - dependsOn: '' - pool: '' - jobs: -- job: SourceIndexStage1 - dependsOn: ${{ parameters.dependsOn }} - condition: ${{ parameters.condition }} - variables: - - name: SourceIndexPackageVersion - value: ${{ parameters.sourceIndexPackageVersion }} - - name: SourceIndexPackageSource - value: ${{ parameters.sourceIndexPackageSource }} - - name: BinlogPath - value: ${{ parameters.binlogPath }} - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - group: source-dot-net stage1 variables - - template: /eng/common/templates-official/variables/pool-providers.yml - - ${{ if ne(parameters.pool, '') }}: - pool: ${{ parameters.pool }} - ${{ if eq(parameters.pool, '') }}: - pool: - ${{ if eq(variables['System.TeamProject'], 'public') }}: - name: $(DncEngPublicBuildPool) - image: windows.vs2022.amd64.open - ${{ if eq(variables['System.TeamProject'], 'internal') }}: - name: $(DncEngInternalBuildPool) - image: windows.vs2022.amd64 - - steps: - - ${{ each preStep in parameters.preSteps }}: - - ${{ preStep }} - - - task: UseDotNet@2 - displayName: Use .NET 8 SDK - inputs: - packageType: sdk - version: 8.0.x - installationPath: $(Agent.TempDirectory)/dotnet - workingDirectory: $(Agent.TempDirectory) - - - script: | - $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools - $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools - displayName: Download Tools - # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk. - workingDirectory: $(Agent.TempDirectory) - - - script: ${{ parameters.sourceIndexBuildCommand }} - displayName: Build Repository - - - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output - displayName: Process Binlog into indexable sln +- template: /eng/common/core-templates/job/source-index-stage1.yml + parameters: + is1ESPipeline: true - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) - displayName: Upload stage1 artifacts to source index - env: - BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url) + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/jobs/codeql-build.yml b/eng/common/templates-official/jobs/codeql-build.yml index b68d3c2f3..a726322ec 100644 --- a/eng/common/templates-official/jobs/codeql-build.yml +++ b/eng/common/templates-official/jobs/codeql-build.yml @@ -1,31 +1,7 @@ -parameters: - # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md - continueOnError: false - # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job - jobs: [] - # Optional: if specified, restore and use this version of Guardian instead of the default. - overrideGuardianVersion: '' - jobs: -- template: /eng/common/templates-official/jobs/jobs.yml +- template: /eng/common/core-templates/jobs/codeql-build.yml parameters: - enableMicrobuild: false - enablePublishBuildArtifacts: false - enablePublishTestResults: false - enablePublishBuildAssets: false - enablePublishUsingPipelines: false - enableTelemetry: true + is1ESPipeline: true - variables: - - group: Publish-Build-Assets - # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in - # sync with the packages.config file. - - name: DefaultGuardianVersion - value: 0.109.0 - - name: GuardianPackagesConfigFile - value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config - - name: GuardianVersion - value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} - - jobs: ${{ parameters.jobs }} - + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/jobs/jobs.yml b/eng/common/templates-official/jobs/jobs.yml index 857a0f8ba..007deddae 100644 --- a/eng/common/templates-official/jobs/jobs.yml +++ b/eng/common/templates-official/jobs/jobs.yml @@ -1,97 +1,7 @@ -parameters: - # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md - continueOnError: false - - # Optional: Include PublishBuildArtifacts task - enablePublishBuildArtifacts: false - - # Optional: Enable publishing using release pipelines - enablePublishUsingPipelines: false - - # Optional: Enable running the source-build jobs to build repo from source - enableSourceBuild: false - - # Optional: Parameters for source-build template. - # See /eng/common/templates-official/jobs/source-build.yml for options - sourceBuildParameters: [] - - graphFileGeneration: - # Optional: Enable generating the graph files at the end of the build - enabled: false - # Optional: Include toolset dependencies in the generated graph files - includeToolset: false - - # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job - jobs: [] - - # Optional: Override automatically derived dependsOn value for "publish build assets" job - publishBuildAssetsDependsOn: '' - - # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage. - publishAssetsImmediately: false - - # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml) - artifactsPublishingAdditionalParameters: '' - signingValidationAdditionalParameters: '' - - # Optional: should run as a public build even in the internal project - # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. - runAsPublic: false - - enableSourceIndex: false - sourceIndexParams: {} - -# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, -# and some (Microbuild) should only be applied to non-PR cases for internal builds. - jobs: -- ${{ each job in parameters.jobs }}: - - template: ../job/job.yml - parameters: - # pass along parameters - ${{ each parameter in parameters }}: - ${{ if ne(parameter.key, 'jobs') }}: - ${{ parameter.key }}: ${{ parameter.value }} - - # pass along job properties - ${{ each property in job }}: - ${{ if ne(property.key, 'job') }}: - ${{ property.key }}: ${{ property.value }} - - name: ${{ job.job }} - -- ${{ if eq(parameters.enableSourceBuild, true) }}: - - template: /eng/common/templates-official/jobs/source-build.yml - parameters: - allCompletedJobId: Source_Build_Complete - ${{ each parameter in parameters.sourceBuildParameters }}: - ${{ parameter.key }}: ${{ parameter.value }} - -- ${{ if eq(parameters.enableSourceIndex, 'true') }}: - - template: ../job/source-index-stage1.yml - parameters: - runAsPublic: ${{ parameters.runAsPublic }} - ${{ each parameter in parameters.sourceIndexParams }}: - ${{ parameter.key }}: ${{ parameter.value }} - -- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}: - - template: ../job/publish-build-assets.yml - parameters: - continueOnError: ${{ parameters.continueOnError }} - dependsOn: - - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}: - - ${{ each job in parameters.publishBuildAssetsDependsOn }}: - - ${{ job.job }} - - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}: - - ${{ each job in parameters.jobs }}: - - ${{ job.job }} - - ${{ if eq(parameters.enableSourceBuild, true) }}: - - Source_Build_Complete +- template: /eng/common/core-templates/jobs/jobs.yml + parameters: + is1ESPipeline: true - runAsPublic: ${{ parameters.runAsPublic }} - publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }} - publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }} - enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }} + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/jobs/source-build.yml b/eng/common/templates-official/jobs/source-build.yml index 08e5db9bb..483e7b611 100644 --- a/eng/common/templates-official/jobs/source-build.yml +++ b/eng/common/templates-official/jobs/source-build.yml @@ -1,46 +1,7 @@ -parameters: - # This template adds arcade-powered source-build to CI. A job is created for each platform, as - # well as an optional server job that completes when all platform jobs complete. - - # The name of the "join" job for all source-build platforms. If set to empty string, the job is - # not included. Existing repo pipelines can use this job depend on all source-build jobs - # completing without maintaining a separate list of every single job ID: just depend on this one - # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'. - allCompletedJobId: '' - - # See /eng/common/templates-official/job/source-build.yml - jobNamePrefix: 'Source_Build' - - # This is the default platform provided by Arcade, intended for use by a managed-only repo. - defaultManagedPlatform: - name: 'Managed' - container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8' - - # Defines the platforms on which to run build jobs. One job is created for each platform, and the - # object in this array is sent to the job template as 'platform'. If no platforms are specified, - # one job runs on 'defaultManagedPlatform'. - platforms: [] - jobs: +- template: /eng/common/core-templates/jobs/source-build.yml + parameters: + is1ESPipeline: true -- ${{ if ne(parameters.allCompletedJobId, '') }}: - - job: ${{ parameters.allCompletedJobId }} - displayName: Source-Build Complete - pool: server - dependsOn: - - ${{ each platform in parameters.platforms }}: - - ${{ parameters.jobNamePrefix }}_${{ platform.name }} - - ${{ if eq(length(parameters.platforms), 0) }}: - - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }} - -- ${{ each platform in parameters.platforms }}: - - template: /eng/common/templates-official/job/source-build.yml - parameters: - jobNamePrefix: ${{ parameters.jobNamePrefix }} - platform: ${{ platform }} - -- ${{ if eq(length(parameters.platforms), 0) }}: - - template: /eng/common/templates-official/job/source-build.yml - parameters: - jobNamePrefix: ${{ parameters.jobNamePrefix }} - platform: ${{ parameters.defaultManagedPlatform }} + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates-official/post-build/common-variables.yml b/eng/common/templates-official/post-build/common-variables.yml index b9ede10bf..c32fc4923 100644 --- a/eng/common/templates-official/post-build/common-variables.yml +++ b/eng/common/templates-official/post-build/common-variables.yml @@ -1,24 +1,8 @@ variables: - - group: Publish-Build-Assets +- template: /eng/common/core-templates/post-build/common-variables.yml + parameters: + # Specifies whether to use 1ES + is1ESPipeline: true - # Whether the build is internal or not - - name: IsInternalBuild - value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }} - - # Default Maestro++ API Endpoint and API Version - - name: MaestroApiEndPoint - value: "https://maestro.dot.net" - - name: MaestroApiAccessToken - value: $(MaestroAccessToken) - - name: MaestroApiVersion - value: "2020-02-20" - - - name: SourceLinkCLIVersion - value: 3.0.0 - - name: SymbolToolVersion - value: 1.0.1 - - name: BinlogToolVersion - value: 1.0.11 - - - name: runCodesignValidationInjection - value: false + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates-official/post-build/post-build.yml b/eng/common/templates-official/post-build/post-build.yml index 5c98fe1c0..2364c0fd4 100644 --- a/eng/common/templates-official/post-build/post-build.yml +++ b/eng/common/templates-official/post-build/post-build.yml @@ -1,285 +1,8 @@ -parameters: - # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST. - # Publishing V1 is no longer supported - # Publishing V2 is no longer supported - # Publishing V3 is the default - - name: publishingInfraVersion - displayName: Which version of publishing should be used to promote the build definition? - type: number - default: 3 - values: - - 3 - - - name: BARBuildId - displayName: BAR Build Id - type: number - default: 0 - - - name: PromoteToChannelIds - displayName: Channel to promote BARBuildId to - type: string - default: '' - - - name: enableSourceLinkValidation - displayName: Enable SourceLink validation - type: boolean - default: false - - - name: enableSigningValidation - displayName: Enable signing validation - type: boolean - default: true - - - name: enableSymbolValidation - displayName: Enable symbol validation - type: boolean - default: false - - - name: enableNugetValidation - displayName: Enable NuGet validation - type: boolean - default: true - - - name: publishInstallersAndChecksums - displayName: Publish installers and checksums - type: boolean - default: true - - - name: SDLValidationParameters - type: object - default: - enable: false - publishGdn: false - continueOnError: false - params: '' - artifactNames: '' - downloadArtifacts: true - - # These parameters let the user customize the call to sdk-task.ps1 for publishing - # symbols & general artifacts as well as for signing validation - - name: symbolPublishingAdditionalParameters - displayName: Symbol publishing additional parameters - type: string - default: '' - - - name: artifactsPublishingAdditionalParameters - displayName: Artifact publishing additional parameters - type: string - default: '' - - - name: signingValidationAdditionalParameters - displayName: Signing validation additional parameters - type: string - default: '' - - # Which stages should finish execution before post-build stages start - - name: validateDependsOn - type: object - default: - - build - - - name: publishDependsOn - type: object - default: - - Validate - - # Optional: Call asset publishing rather than running in a separate stage - - name: publishAssetsImmediately - type: boolean - default: false - stages: -- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: - - stage: Validate - dependsOn: ${{ parameters.validateDependsOn }} - displayName: Validate Build Assets - variables: - - template: common-variables.yml - - template: /eng/common/templates-official/variables/pool-providers.yml - jobs: - - job: - displayName: NuGet Validation - condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true')) - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: AzurePipelines-EO - image: 1ESPT-Windows2022 - demands: Cmd - os: windows - # If it's not devdiv, it's dnceng - ${{ else }}: - name: $(DncEngInternalBuildPool) - image: 1es-windows-2022-pt - os: windows - - steps: - - template: setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Package Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: PackageArtifacts - checkDownloadedFiles: true - - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1 - arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ - -ToolDestinationPath $(Agent.BuildDirectory)/Extract/ - - - job: - displayName: Signing Validation - condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true')) - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: AzurePipelines-EO - image: 1ESPT-Windows2022 - demands: Cmd - os: windows - # If it's not devdiv, it's dnceng - ${{ else }}: - name: $(DncEngInternalBuildPool) - image: 1es-windows-2022-pt - os: windows - steps: - - template: setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Package Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: PackageArtifacts - checkDownloadedFiles: true - itemPattern: | - ** - !**/Microsoft.SourceBuild.Intermediate.*.nupkg - - # This is necessary whenever we want to publish/restore to an AzDO private feed - # Since sdk-task.ps1 tries to restore packages we need to do this authentication here - # otherwise it'll complain about accessing a private feed. - - task: NuGetAuthenticate@1 - displayName: 'Authenticate to AzDO Feeds' - - # Signing validation will optionally work with the buildmanifest file which is downloaded from - # Azure DevOps above. - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: eng\common\sdk-task.ps1 - arguments: -task SigningValidation -restore -msbuildEngine vs - /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' - /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt' - ${{ parameters.signingValidationAdditionalParameters }} - - - template: ../steps/publish-logs.yml - parameters: - StageLabel: 'Validation' - JobLabel: 'Signing' - BinlogToolVersion: $(BinlogToolVersion) - - - job: - displayName: SourceLink Validation - condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true') - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: AzurePipelines-EO - image: 1ESPT-Windows2022 - demands: Cmd - os: windows - # If it's not devdiv, it's dnceng - ${{ else }}: - name: $(DncEngInternalBuildPool) - image: 1es-windows-2022-pt - os: windows - steps: - - template: setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Blob Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: BlobArtifacts - checkDownloadedFiles: true - - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1 - arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ - -ExtractPath $(Agent.BuildDirectory)/Extract/ - -GHRepoName $(Build.Repository.Name) - -GHCommit $(Build.SourceVersion) - -SourcelinkCliVersion $(SourceLinkCLIVersion) - continueOnError: true - -- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}: - - stage: publish_using_darc - ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: - dependsOn: ${{ parameters.publishDependsOn }} - ${{ else }}: - dependsOn: ${{ parameters.validateDependsOn }} - displayName: Publish using Darc - variables: - - template: common-variables.yml - - template: /eng/common/templates-official/variables/pool-providers.yml - jobs: - - job: - displayName: Publish Using Darc - timeoutInMinutes: 120 - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: AzurePipelines-EO - image: 1ESPT-Windows2022 - demands: Cmd - os: windows - # If it's not devdiv, it's dnceng - ${{ else }}: - name: $(DncEngInternalBuildPool) - image: 1es-windows-2022-pt - os: windows - steps: - - template: setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: NuGetAuthenticate@1 +- template: /eng/common/core-templates/post-build/post-build.yml + parameters: + # Specifies whether to use 1ES + is1ESPipeline: true - - task: PowerShell@2 - displayName: Publish Using Darc - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 - arguments: -BuildId $(BARBuildId) - -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} - -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' - -MaestroToken '$(MaestroApiAccessToken)' - -WaitPublishingFinish true - -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' - -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/post-build/setup-maestro-vars.yml b/eng/common/templates-official/post-build/setup-maestro-vars.yml index 0c87f149a..024397d87 100644 --- a/eng/common/templates-official/post-build/setup-maestro-vars.yml +++ b/eng/common/templates-official/post-build/setup-maestro-vars.yml @@ -1,70 +1,8 @@ -parameters: - BARBuildId: '' - PromoteToChannelIds: '' - steps: - - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}: - - task: DownloadBuildArtifacts@0 - displayName: Download Release Configs - inputs: - buildType: current - artifactName: ReleaseConfigs - checkDownloadedFiles: true - - - task: PowerShell@2 - name: setReleaseVars - displayName: Set Release Configs Vars - inputs: - targetType: inline - pwsh: true - script: | - try { - if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') { - $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt - - $BarId = $Content | Select -Index 0 - $Channels = $Content | Select -Index 1 - $IsStableBuild = $Content | Select -Index 2 - - $AzureDevOpsProject = $Env:System_TeamProject - $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId - $AzureDevOpsBuildId = $Env:Build_BuildId - } - else { - $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}" - - $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]' - $apiHeaders.Add('Accept', 'application/json') - $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}") - - $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" } - - $BarId = $Env:BARBuildId - $Channels = $Env:PromoteToMaestroChannels -split "," - $Channels = $Channels -join "][" - $Channels = "[$Channels]" - - $IsStableBuild = $buildInfo.stable - $AzureDevOpsProject = $buildInfo.azureDevOpsProject - $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId - $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId - } - - Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId" - Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels" - Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild" +- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + # Specifies whether to use 1ES + is1ESPipeline: true - Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject" - Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId" - Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId" - } - catch { - Write-Host $_ - Write-Host $_.Exception - Write-Host $_.ScriptStackTrace - exit 1 - } - env: - MAESTRO_API_TOKEN: $(MaestroApiAccessToken) - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }} + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates-official/post-build/trigger-subscription.yml b/eng/common/templates-official/post-build/trigger-subscription.yml deleted file mode 100644 index da669030d..000000000 --- a/eng/common/templates-official/post-build/trigger-subscription.yml +++ /dev/null @@ -1,13 +0,0 @@ -parameters: - ChannelId: 0 - -steps: -- task: PowerShell@2 - displayName: Triggering subscriptions - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1 - arguments: -SourceRepo $(Build.Repository.Uri) - -ChannelId ${{ parameters.ChannelId }} - -MaestroApiAccessToken $(MaestroAccessToken) - -MaestroApiEndPoint $(MaestroApiEndPoint) - -MaestroApiVersion $(MaestroApiVersion) diff --git a/eng/common/templates-official/steps/add-build-to-channel.yml b/eng/common/templates-official/steps/add-build-to-channel.yml deleted file mode 100644 index f67a210d6..000000000 --- a/eng/common/templates-official/steps/add-build-to-channel.yml +++ /dev/null @@ -1,13 +0,0 @@ -parameters: - ChannelId: 0 - -steps: -- task: PowerShell@2 - displayName: Add Build to Channel - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1 - arguments: -BuildId $(BARBuildId) - -ChannelId ${{ parameters.ChannelId }} - -MaestroApiAccessToken $(MaestroApiAccessToken) - -MaestroApiEndPoint $(MaestroApiEndPoint) - -MaestroApiVersion $(MaestroApiVersion) diff --git a/eng/common/templates-official/steps/component-governance.yml b/eng/common/templates-official/steps/component-governance.yml index 0ecec47b0..30bb3985c 100644 --- a/eng/common/templates-official/steps/component-governance.yml +++ b/eng/common/templates-official/steps/component-governance.yml @@ -1,13 +1,7 @@ -parameters: - disableComponentGovernance: false - componentGovernanceIgnoreDirectories: '' - steps: -- ${{ if eq(parameters.disableComponentGovernance, 'true') }}: - - script: "echo ##vso[task.setvariable variable=skipComponentGovernanceDetection]true" - displayName: Set skipComponentGovernanceDetection variable -- ${{ if ne(parameters.disableComponentGovernance, 'true') }}: - - task: ComponentGovernanceComponentDetection@0 - continueOnError: true - inputs: - ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} \ No newline at end of file +- template: /eng/common/core-templates/steps/component-governance.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/enable-internal-runtimes.yml b/eng/common/templates-official/steps/enable-internal-runtimes.yml new file mode 100644 index 000000000..f9dd238c6 --- /dev/null +++ b/eng/common/templates-official/steps/enable-internal-runtimes.yml @@ -0,0 +1,9 @@ +# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64' +# variable with the base64-encoded SAS token, by default +steps: +- template: /eng/common/core-templates/steps/enable-internal-runtimes.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/enable-internal-sources.yml b/eng/common/templates-official/steps/enable-internal-sources.yml new file mode 100644 index 000000000..e6d571822 --- /dev/null +++ b/eng/common/templates-official/steps/enable-internal-sources.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/enable-internal-sources.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates-official/steps/generate-sbom.yml b/eng/common/templates-official/steps/generate-sbom.yml index 488b560e8..9a89a4706 100644 --- a/eng/common/templates-official/steps/generate-sbom.yml +++ b/eng/common/templates-official/steps/generate-sbom.yml @@ -1,48 +1,7 @@ -# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated. -# PackageName - The name of the package this SBOM represents. -# PackageVersion - The version of the package this SBOM represents. -# ManifestDirPath - The path of the directory where the generated manifest files will be placed -# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector. - -parameters: - PackageVersion: 7.0.0 - BuildDropPath: '$(Build.SourcesDirectory)/artifacts' - PackageName: '.NET' - ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom - IgnoreDirectories: '' - sbomContinueOnError: true - steps: -- task: PowerShell@2 - displayName: Prep for SBOM generation in (Non-linux) - condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin')) - inputs: - filePath: ./eng/common/generate-sbom-prep.ps1 - arguments: ${{parameters.manifestDirPath}} - -# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461 -- script: | - chmod +x ./eng/common/generate-sbom-prep.sh - ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}} - displayName: Prep for SBOM generation in (Linux) - condition: eq(variables['Agent.Os'], 'Linux') - continueOnError: ${{ parameters.sbomContinueOnError }} - -- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0 - displayName: 'Generate SBOM manifest' - continueOnError: ${{ parameters.sbomContinueOnError }} - inputs: - PackageName: ${{ parameters.packageName }} - BuildDropPath: ${{ parameters.buildDropPath }} - PackageVersion: ${{ parameters.packageVersion }} - ManifestDirPath: ${{ parameters.manifestDirPath }} - ${{ if ne(parameters.IgnoreDirectories, '') }}: - AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}' - -- task: 1ES.PublishPipelineArtifact@1 - displayName: Publish SBOM manifest - continueOnError: ${{parameters.sbomContinueOnError}} - inputs: - targetPath: '${{parameters.manifestDirPath}}' - artifactName: $(ARTIFACT_NAME) +- template: /eng/common/core-templates/steps/generate-sbom.yml + parameters: + is1ESPipeline: true + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/get-delegation-sas.yml b/eng/common/templates-official/steps/get-delegation-sas.yml new file mode 100644 index 000000000..c5a9c1f82 --- /dev/null +++ b/eng/common/templates-official/steps/get-delegation-sas.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/get-delegation-sas.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/get-federated-access-token.yml b/eng/common/templates-official/steps/get-federated-access-token.yml new file mode 100644 index 000000000..c8dcf6b81 --- /dev/null +++ b/eng/common/templates-official/steps/get-federated-access-token.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/get-federated-access-token.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates-official/steps/publish-build-artifacts.yml b/eng/common/templates-official/steps/publish-build-artifacts.yml new file mode 100644 index 000000000..100a3fc98 --- /dev/null +++ b/eng/common/templates-official/steps/publish-build-artifacts.yml @@ -0,0 +1,41 @@ +parameters: +- name: displayName + type: string + default: 'Publish to Build Artifact' + +- name: condition + type: string + default: succeeded() + +- name: artifactName + type: string + +- name: pathToPublish + type: string + +- name: continueOnError + type: boolean + default: false + +- name: publishLocation + type: string + default: 'Container' + +- name: is1ESPipeline + type: boolean + default: true + +steps: +- ${{ if ne(parameters.is1ESPipeline, true) }}: + - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error +- task: 1ES.PublishBuildArtifacts@1 + displayName: ${{ parameters.displayName }} + condition: ${{ parameters.condition }} + ${{ if parameters.continueOnError }}: + continueOnError: ${{ parameters.continueOnError }} + inputs: + PublishLocation: ${{ parameters.publishLocation }} + PathtoPublish: ${{ parameters.pathToPublish }} + ${{ if parameters.artifactName }}: + ArtifactName: ${{ parameters.artifactName }} + diff --git a/eng/common/templates-official/steps/publish-logs.yml b/eng/common/templates-official/steps/publish-logs.yml index 84b2f559c..579fd531e 100644 --- a/eng/common/templates-official/steps/publish-logs.yml +++ b/eng/common/templates-official/steps/publish-logs.yml @@ -1,49 +1,7 @@ -parameters: - StageLabel: '' - JobLabel: '' - CustomSensitiveDataList: '' - # A default - in case value from eng/common/templates-official/post-build/common-variables.yml is not passed - BinlogToolVersion: '1.0.11' - steps: -- task: Powershell@2 - displayName: Prepare Binlogs to Upload - inputs: - targetType: inline - script: | - New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ - Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ - continueOnError: true - condition: always() - -- task: PowerShell@2 - displayName: Redact Logs - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1 - # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml - # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt' - # If the file exists - sensitive data for redaction will be sourced from it - # (single entry per line, lines starting with '# ' are considered comments and skipped) - arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs' - -BinlogToolVersion ${{parameters.BinlogToolVersion}} - -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt' - '$(publishing-dnceng-devdiv-code-r-build-re)' - '$(MaestroAccessToken)' - '$(dn-bot-all-orgs-artifact-feeds-rw)' - '$(akams-client-id)' - '$(akams-client-secret)' - '$(microsoft-symbol-server-pat)' - '$(symweb-symbol-server-pat)' - '$(dn-bot-all-orgs-build-rw-code-rw)' - ${{parameters.CustomSensitiveDataList}} - continueOnError: true - condition: always() - -- task: 1ES.PublishBuildArtifacts@1 - displayName: Publish Logs - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs' - PublishLocation: Container - ArtifactName: PostBuildLogs - continueOnError: true - condition: always() +- template: /eng/common/core-templates/steps/publish-logs.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/publish-pipeline-artifacts.yml b/eng/common/templates-official/steps/publish-pipeline-artifacts.yml new file mode 100644 index 000000000..172f9f0fd --- /dev/null +++ b/eng/common/templates-official/steps/publish-pipeline-artifacts.yml @@ -0,0 +1,28 @@ +parameters: +- name: is1ESPipeline + type: boolean + default: true + +- name: args + type: object + default: {} + +steps: +- ${{ if ne(parameters.is1ESPipeline, true) }}: + - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error +- task: 1ES.PublishPipelineArtifact@1 + displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }} + ${{ if parameters.args.condition }}: + condition: ${{ parameters.args.condition }} + ${{ else }}: + condition: succeeded() + ${{ if parameters.args.continueOnError }}: + continueOnError: ${{ parameters.args.continueOnError }} + inputs: + targetPath: ${{ parameters.args.targetPath }} + ${{ if parameters.args.artifactName }}: + artifactName: ${{ parameters.args.artifactName }} + ${{ if parameters.args.properties }}: + properties: ${{ parameters.args.properties }} + ${{ if parameters.args.sbomEnabled }}: + sbomEnabled: ${{ parameters.args.sbomEnabled }} diff --git a/eng/common/templates-official/steps/retain-build.yml b/eng/common/templates-official/steps/retain-build.yml index 83d97a26a..559455150 100644 --- a/eng/common/templates-official/steps/retain-build.yml +++ b/eng/common/templates-official/steps/retain-build.yml @@ -1,28 +1,7 @@ -parameters: - # Optional azure devops PAT with build execute permissions for the build's organization, - # only needed if the build that should be retained ran on a different organization than - # the pipeline where this template is executing from - Token: '' - # Optional BuildId to retain, defaults to the current running build - BuildId: '' - # Azure devops Organization URI for the build in the https://dev.azure.com/ format. - # Defaults to the organization the current pipeline is running on - AzdoOrgUri: '$(System.CollectionUri)' - # Azure devops project for the build. Defaults to the project the current pipeline is running on - AzdoProject: '$(System.TeamProject)' - steps: - - task: powershell@2 - inputs: - targetType: 'filePath' - filePath: eng/common/retain-build.ps1 - pwsh: true - arguments: > - -AzdoOrgUri: ${{parameters.AzdoOrgUri}} - -AzdoProject ${{parameters.AzdoProject}} - -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }} - -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}} - displayName: Enable permanent build retention - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - BUILD_ID: $(Build.BuildId) \ No newline at end of file +- template: /eng/common/core-templates/steps/retain-build.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/send-to-helix.yml b/eng/common/templates-official/steps/send-to-helix.yml index 68fa739c4..6500f21bf 100644 --- a/eng/common/templates-official/steps/send-to-helix.yml +++ b/eng/common/templates-official/steps/send-to-helix.yml @@ -1,93 +1,7 @@ -# Please remember to update the documentation if you make changes to these parameters! -parameters: - HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/ - HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/' - HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number - HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues - HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group - HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY - HelixProjectArguments: '' # optional -- arguments passed to the build command - HelixConfiguration: '' # optional -- additional property attached to a job - HelixPreCommands: '' # optional -- commands to run before Helix work item execution - HelixPostCommands: '' # optional -- commands to run after Helix work item execution - WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects - WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects - WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects - CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload - XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true - XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects - XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects - XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner - XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects - IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion - DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json - DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json - WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget." - IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set - HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net ) - Creator: '' # optional -- if the build is external, use this to specify who is sending the job - DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO - condition: succeeded() # optional -- condition for step to execute; defaults to succeeded() - continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false - steps: - - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"' - displayName: ${{ parameters.DisplayNamePrefix }} (Windows) - env: - BuildConfig: $(_BuildConfig) - HelixSource: ${{ parameters.HelixSource }} - HelixType: ${{ parameters.HelixType }} - HelixBuild: ${{ parameters.HelixBuild }} - HelixConfiguration: ${{ parameters.HelixConfiguration }} - HelixTargetQueues: ${{ parameters.HelixTargetQueues }} - HelixAccessToken: ${{ parameters.HelixAccessToken }} - HelixPreCommands: ${{ parameters.HelixPreCommands }} - HelixPostCommands: ${{ parameters.HelixPostCommands }} - WorkItemDirectory: ${{ parameters.WorkItemDirectory }} - WorkItemCommand: ${{ parameters.WorkItemCommand }} - WorkItemTimeout: ${{ parameters.WorkItemTimeout }} - CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} - XUnitProjects: ${{ parameters.XUnitProjects }} - XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} - XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} - XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} - XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} - IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} - DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} - DotNetCliVersion: ${{ parameters.DotNetCliVersion }} - WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} - HelixBaseUri: ${{ parameters.HelixBaseUri }} - Creator: ${{ parameters.Creator }} - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT')) - continueOnError: ${{ parameters.continueOnError }} - - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog - displayName: ${{ parameters.DisplayNamePrefix }} (Unix) - env: - BuildConfig: $(_BuildConfig) - HelixSource: ${{ parameters.HelixSource }} - HelixType: ${{ parameters.HelixType }} - HelixBuild: ${{ parameters.HelixBuild }} - HelixConfiguration: ${{ parameters.HelixConfiguration }} - HelixTargetQueues: ${{ parameters.HelixTargetQueues }} - HelixAccessToken: ${{ parameters.HelixAccessToken }} - HelixPreCommands: ${{ parameters.HelixPreCommands }} - HelixPostCommands: ${{ parameters.HelixPostCommands }} - WorkItemDirectory: ${{ parameters.WorkItemDirectory }} - WorkItemCommand: ${{ parameters.WorkItemCommand }} - WorkItemTimeout: ${{ parameters.WorkItemTimeout }} - CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} - XUnitProjects: ${{ parameters.XUnitProjects }} - XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} - XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} - XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} - XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} - IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} - DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} - DotNetCliVersion: ${{ parameters.DotNetCliVersion }} - WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} - HelixBaseUri: ${{ parameters.HelixBaseUri }} - Creator: ${{ parameters.Creator }} - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT')) - continueOnError: ${{ parameters.continueOnError }} +- template: /eng/common/core-templates/steps/send-to-helix.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/source-build.yml b/eng/common/templates-official/steps/source-build.yml index 53ed57b6d..8f92c49e7 100644 --- a/eng/common/templates-official/steps/source-build.yml +++ b/eng/common/templates-official/steps/source-build.yml @@ -1,131 +1,7 @@ -parameters: - # This template adds arcade-powered source-build to CI. - - # This is a 'steps' template, and is intended for advanced scenarios where the existing build - # infra has a careful build methodology that must be followed. For example, a repo - # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline - # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to - # GitHub. Using this steps template leaves room for that infra to be included. - - # Defines the platform on which to run the steps. See 'eng/common/templates-official/job/source-build.yml' - # for details. The entire object is described in the 'job' template for simplicity, even though - # the usage of the properties on this object is split between the 'job' and 'steps' templates. - platform: {} - steps: -# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.) -- script: | - set -x - df -h - - # If building on the internal project, the artifact feeds variable may be available (usually only if needed) - # In that case, call the feed setup script to add internal feeds corresponding to public ones. - # In addition, add an msbuild argument to copy the WIP from the repo to the target build location. - # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those - # changes. - internalRestoreArgs= - if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then - # Temporarily work around https://github.com/dotnet/arcade/issues/7709 - chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh - $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw) - internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true' - - # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo. - # This only works if there is a username/email configured, which won't be the case in most CI runs. - git config --get user.email - if [ $? -ne 0 ]; then - git config user.email dn-bot@microsoft.com - git config user.name dn-bot - fi - fi - - # If building on the internal project, the internal storage variable may be available (usually only if needed) - # In that case, add variables to allow the download of internal runtimes if the specified versions are not found - # in the default public locations. - internalRuntimeDownloadArgs= - if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then - internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)' - fi - - buildConfig=Release - # Check if AzDO substitutes in a build config from a variable, and use it if so. - if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then - buildConfig='$(_BuildConfig)' - fi - - officialBuildArgs= - if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then - officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)' - fi - - targetRidArgs= - if [ '${{ parameters.platform.targetRID }}' != '' ]; then - targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}' - fi - - runtimeOsArgs= - if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then - runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}' - fi - - baseOsArgs= - if [ '${{ parameters.platform.baseOS }}' != '' ]; then - baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}' - fi - - publishArgs= - if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then - publishArgs='--publish' - fi - - assetManifestFileName=SourceBuild_RidSpecific.xml - if [ '${{ parameters.platform.name }}' != '' ]; then - assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml - fi - - ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ - --configuration $buildConfig \ - --restore --build --pack $publishArgs -bl \ - $officialBuildArgs \ - $internalRuntimeDownloadArgs \ - $internalRestoreArgs \ - $targetRidArgs \ - $runtimeOsArgs \ - $baseOsArgs \ - /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \ - /p:ArcadeBuildFromSource=true \ - /p:DotNetBuildSourceOnly=true \ - /p:DotNetBuildRepo=true \ - /p:AssetManifestFileName=$assetManifestFileName - displayName: Build - -# Upload build logs for diagnosis. -- task: CopyFiles@2 - displayName: Prepare BuildLogs staging directory - inputs: - SourceFolder: '$(Build.SourcesDirectory)' - Contents: | - **/*.log - **/*.binlog - artifacts/sb/prebuilt-report/** - TargetFolder: '$(Build.StagingDirectory)/BuildLogs' - CleanTargetFolder: true - continueOnError: true - condition: succeededOrFailed() - -- task: 1ES.PublishPipelineArtifact@1 - displayName: Publish BuildLogs - inputs: - targetPath: '$(Build.StagingDirectory)/BuildLogs' - artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt) - continueOnError: true - condition: succeededOrFailed() +- template: /eng/common/core-templates/steps/source-build.yml + parameters: + is1ESPipeline: true -# Manually inject component detection so that we can ignore the source build upstream cache, which contains -# a nupkg cache of input packages (a local feed). -# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir' -# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets -- task: ComponentGovernanceComponentDetection@0 - displayName: Component Detection (Exclude upstream cache) - inputs: - ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache' + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/source-index-stage1-publish.yml b/eng/common/templates-official/steps/source-index-stage1-publish.yml new file mode 100644 index 000000000..9b8b80942 --- /dev/null +++ b/eng/common/templates-official/steps/source-index-stage1-publish.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/variables/pool-providers.yml b/eng/common/templates-official/variables/pool-providers.yml index beab7d1bf..1f308b24e 100644 --- a/eng/common/templates-official/variables/pool-providers.yml +++ b/eng/common/templates-official/variables/pool-providers.yml @@ -23,7 +23,7 @@ # # pool: # name: $(DncEngInternalBuildPool) -# image: 1es-windows-2022-pt +# image: 1es-windows-2022 variables: # Coalesce the target and source branches so we know when a PR targets a release branch diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml deleted file mode 100644 index 7870f93bc..000000000 --- a/eng/common/templates/job/execute-sdl.yml +++ /dev/null @@ -1,139 +0,0 @@ -parameters: - enable: 'false' # Whether the SDL validation job should execute or not - overrideParameters: '' # Optional: to override values for parameters. - additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")' - # Optional: if specified, restore and use this version of Guardian instead of the default. - overrideGuardianVersion: '' - # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth - # diagnosis of problems with specific tool configurations. - publishGuardianDirectoryToPipeline: false - # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL - # parameters rather than relying on YAML. It may be better to use a local script, because you can - # reproduce results locally without piecing together a command based on the YAML. - executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1' - # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named - # 'continueOnError', the parameter value is not correctly picked up. - # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter - sdlContinueOnError: false # optional: determines whether to continue the build if the step errors; - # optional: determines if build artifacts should be downloaded. - downloadArtifacts: true - # optional: determines if this job should search the directory of downloaded artifacts for - # 'tar.gz' and 'zip' archive files and extract them before running SDL validation tasks. - extractArchiveArtifacts: false - dependsOn: '' # Optional: dependencies of the job - artifactNames: '' # Optional: patterns supplied to DownloadBuildArtifacts - # Usage: - # artifactNames: - # - 'BlobArtifacts' - # - 'Artifacts_Windows_NT_Release' - # Optional: download a list of pipeline artifacts. 'downloadArtifacts' controls build artifacts, - # not pipeline artifacts, so doesn't affect the use of this parameter. - pipelineArtifactNames: [] - -jobs: -- job: Run_SDL - dependsOn: ${{ parameters.dependsOn }} - displayName: Run SDL tool - condition: and(succeededOrFailed(), eq( ${{ parameters.enable }}, 'true')) - variables: - - group: DotNet-VSTS-Bot - - name: AzDOProjectName - value: ${{ parameters.AzDOProjectName }} - - name: AzDOPipelineId - value: ${{ parameters.AzDOPipelineId }} - - name: AzDOBuildId - value: ${{ parameters.AzDOBuildId }} - - template: /eng/common/templates/variables/sdl-variables.yml - - name: GuardianVersion - value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} - - template: /eng/common/templates/variables/pool-providers.yml - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: VSEngSS-MicroBuild2022-1ES - demands: Cmd - # If it's not devdiv, it's dnceng - ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: - name: $(DncEngInternalBuildPool) - demands: ImageOverride -equals windows.vs2019.amd64 - steps: - - checkout: self - clean: true - - # If the template caller didn't provide an AzDO parameter, set them all up as Maestro vars. - - ${{ if not(and(parameters.AzDOProjectName, parameters.AzDOPipelineId, parameters.AzDOBuildId)) }}: - - template: /eng/common/templates/post-build/setup-maestro-vars.yml - - - ${{ if ne(parameters.downloadArtifacts, 'false')}}: - - ${{ if ne(parameters.artifactNames, '') }}: - - ${{ each artifactName in parameters.artifactNames }}: - - task: DownloadBuildArtifacts@0 - displayName: Download Build Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: ${{ artifactName }} - downloadPath: $(Build.ArtifactStagingDirectory)\artifacts - checkDownloadedFiles: true - - ${{ if eq(parameters.artifactNames, '') }}: - - task: DownloadBuildArtifacts@0 - displayName: Download Build Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - downloadType: specific files - itemPattern: "**" - downloadPath: $(Build.ArtifactStagingDirectory)\artifacts - checkDownloadedFiles: true - - - ${{ each artifactName in parameters.pipelineArtifactNames }}: - - task: DownloadPipelineArtifact@2 - displayName: Download Pipeline Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: ${{ artifactName }} - downloadPath: $(Build.ArtifactStagingDirectory)\artifacts - checkDownloadedFiles: true - - - powershell: eng/common/sdl/trim-assets-version.ps1 - -InputPath $(Build.ArtifactStagingDirectory)\artifacts - displayName: Trim the version from the NuGet packages - continueOnError: ${{ parameters.sdlContinueOnError }} - - - powershell: eng/common/sdl/extract-artifact-packages.ps1 - -InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts - -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts - displayName: Extract Blob Artifacts - continueOnError: ${{ parameters.sdlContinueOnError }} - - - powershell: eng/common/sdl/extract-artifact-packages.ps1 - -InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts - -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts - displayName: Extract Package Artifacts - continueOnError: ${{ parameters.sdlContinueOnError }} - - - ${{ if ne(parameters.extractArchiveArtifacts, 'false') }}: - - powershell: eng/common/sdl/extract-artifact-archives.ps1 - -InputPath $(Build.ArtifactStagingDirectory)\artifacts - -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts - displayName: Extract Archive Artifacts - continueOnError: ${{ parameters.sdlContinueOnError }} - - - template: /eng/common/templates/steps/execute-sdl.yml - parameters: - overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }} - executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }} - overrideParameters: ${{ parameters.overrideParameters }} - additionalParameters: ${{ parameters.additionalParameters }} - publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }} - sdlContinueOnError: ${{ parameters.sdlContinueOnError }} diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml index a3277bf15..d1aeb92fc 100644 --- a/eng/common/templates/job/job.yml +++ b/eng/common/templates/job/job.yml @@ -1,259 +1,82 @@ -# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, -# and some (Microbuild) should only be applied to non-PR cases for internal builds. - -parameters: -# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job - cancelTimeoutInMinutes: '' - condition: '' - container: '' - continueOnError: false - dependsOn: '' - displayName: '' - pool: '' - steps: [] - strategy: '' - timeoutInMinutes: '' - variables: [] - workspace: '' - templateContext: '' - -# Job base template specific parameters - # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md - artifacts: '' - enableMicrobuild: false +parameters: enablePublishBuildArtifacts: false - enablePublishBuildAssets: false - enablePublishTestResults: false - enablePublishUsingPipelines: false - enableBuildRetry: false disableComponentGovernance: '' componentGovernanceIgnoreDirectories: '' - mergeTestResults: false - testRunTitle: '' - testResultsFormat: '' - name: '' - preSteps: [] - runAsPublic: false # Sbom related params enableSbom: true - PackageVersion: 7.0.0 + runAsPublic: false + PackageVersion: 9.0.0 BuildDropPath: '$(Build.SourcesDirectory)/artifacts' jobs: -- job: ${{ parameters.name }} - - ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}: - cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }} - - ${{ if ne(parameters.condition, '') }}: - condition: ${{ parameters.condition }} - - ${{ if ne(parameters.container, '') }}: - container: ${{ parameters.container }} - - ${{ if ne(parameters.continueOnError, '') }}: - continueOnError: ${{ parameters.continueOnError }} - - ${{ if ne(parameters.dependsOn, '') }}: - dependsOn: ${{ parameters.dependsOn }} - - ${{ if ne(parameters.displayName, '') }}: - displayName: ${{ parameters.displayName }} - - ${{ if ne(parameters.pool, '') }}: - pool: ${{ parameters.pool }} - - ${{ if ne(parameters.strategy, '') }}: - strategy: ${{ parameters.strategy }} - - ${{ if ne(parameters.timeoutInMinutes, '') }}: - timeoutInMinutes: ${{ parameters.timeoutInMinutes }} - - ${{ if ne(parameters.templateContext, '') }}: - templateContext: ${{ parameters.templateContext }} - - variables: - - ${{ if ne(parameters.enableTelemetry, 'false') }}: - - name: DOTNET_CLI_TELEMETRY_PROFILE - value: '$(Build.Repository.Uri)' - - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}: - - name: EnableRichCodeNavigation - value: 'true' - # Retry signature validation up to three times, waiting 2 seconds between attempts. - # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures - - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY - value: 3,2000 - - ${{ each variable in parameters.variables }}: - # handle name-value variable syntax - # example: - # - name: [key] - # value: [value] - - ${{ if ne(variable.name, '') }}: - - name: ${{ variable.name }} - value: ${{ variable.value }} - - # handle variable groups - - ${{ if ne(variable.group, '') }}: - - group: ${{ variable.group }} +- template: /eng/common/core-templates/job/job.yml + parameters: + is1ESPipeline: false - # handle template variable syntax - # example: - # - template: path/to/template.yml - # parameters: - # [key]: [value] - - ${{ if ne(variable.template, '') }}: - - template: ${{ variable.template }} - ${{ if ne(variable.parameters, '') }}: - parameters: ${{ variable.parameters }} + ${{ each parameter in parameters }}: + ${{ if and(ne(parameter.key, 'steps'), ne(parameter.key, 'is1ESPipeline')) }}: + ${{ parameter.key }}: ${{ parameter.value }} - # handle key-value variable syntax. - # example: - # - [key]: [value] - - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}: - - ${{ each pair in variable }}: - - name: ${{ pair.key }} - value: ${{ pair.value }} + steps: + - ${{ each step in parameters.steps }}: + - ${{ step }} - # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds - - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - group: DotNet-HelixApi-Access - - ${{ if ne(parameters.workspace, '') }}: - workspace: ${{ parameters.workspace }} - - steps: - - ${{ if ne(parameters.preSteps, '') }}: - - ${{ each preStep in parameters.preSteps }}: - - ${{ preStep }} - - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - - task: MicroBuildSigningPlugin@3 - displayName: Install MicroBuild plugin - inputs: - signType: $(_SignType) - zipSources: false - feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json - env: - TeamName: $(_TeamName) - continueOnError: ${{ parameters.continueOnError }} - condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) - - - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}: - - task: NuGetAuthenticate@1 - - - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}: - - task: DownloadPipelineArtifact@2 - inputs: - buildType: current - artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }} - targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }} - itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }} - - - ${{ each step in parameters.steps }}: - - ${{ step }} - - - ${{ if eq(parameters.enableRichCodeNavigation, true) }}: - - task: RichCodeNavIndexer@0 - displayName: RichCodeNav Upload - inputs: - languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }} - environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }} - richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin - uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }} - continueOnError: true - - - template: /eng/common/templates/steps/component-governance.yml - parameters: - ${{ if eq(parameters.disableComponentGovernance, '') }}: - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}: - disableComponentGovernance: false - ${{ else }}: - disableComponentGovernance: true - ${{ else }}: - disableComponentGovernance: ${{ parameters.disableComponentGovernance }} - componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} - - - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: MicroBuildCleanup@1 - displayName: Execute Microbuild cleanup tasks - condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) - continueOnError: ${{ parameters.continueOnError }} - env: - TeamName: $(_TeamName) - - - ${{ if ne(parameters.artifacts.publish, '') }}: - - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: - - task: CopyFiles@2 - displayName: Gather binaries for publish to artifacts - inputs: - SourceFolder: 'artifacts/bin' - Contents: '**' - TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin' - - task: CopyFiles@2 - displayName: Gather packages for publish to artifacts - inputs: - SourceFolder: 'artifacts/packages' - Contents: '**' - TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages' - - task: PublishBuildArtifacts@1 - displayName: Publish pipeline artifacts - inputs: - PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts' - PublishLocation: Container - ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} - continueOnError: true - condition: always() - - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: - - publish: artifacts/log - artifact: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }} - displayName: Publish logs - continueOnError: true - condition: always() - - - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}: - - task: PublishBuildArtifacts@1 - displayName: Publish Logs - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)' - PublishLocation: Container - ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }} - continueOnError: true - condition: always() - - - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}: - - task: PublishTestResults@2 - displayName: Publish XUnit Test Results - inputs: - testResultsFormat: 'xUnit' - testResultsFiles: '*.xml' - searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' - testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit - mergeTestResults: ${{ parameters.mergeTestResults }} - continueOnError: true - condition: always() - - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}: - - task: PublishTestResults@2 - displayName: Publish TRX Test Results - inputs: - testResultsFormat: 'VSTest' - testResultsFiles: '*.trx' - searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' - testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx - mergeTestResults: ${{ parameters.mergeTestResults }} - continueOnError: true - condition: always() - - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}: - - template: /eng/common/templates/steps/generate-sbom.yml + componentGovernanceSteps: + - template: /eng/common/templates/steps/component-governance.yml parameters: - PackageVersion: ${{ parameters.packageVersion}} - BuildDropPath: ${{ parameters.buildDropPath }} - IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} - - - ${{ if eq(parameters.enableBuildRetry, 'true') }}: - - publish: $(Build.SourcesDirectory)\eng\common\BuildConfiguration - artifact: BuildConfiguration - displayName: Publish build retry configuration - continueOnError: true + ${{ if eq(parameters.disableComponentGovernance, '') }}: + ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}: + disableComponentGovernance: false + ${{ else }}: + disableComponentGovernance: true + ${{ else }}: + disableComponentGovernance: ${{ parameters.disableComponentGovernance }} + componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} + + artifactPublishSteps: + - ${{ if ne(parameters.artifacts.publish, '') }}: + - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: + - template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: false + args: + displayName: Publish pipeline artifacts + pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts' + publishLocation: Container + artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} + continueOnError: true + condition: always() + - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: + - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml + parameters: + is1ESPipeline: false + args: + targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log' + artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }} + displayName: 'Publish logs' + continueOnError: true + condition: always() + sbomEnabled: false # we don't need SBOM for logs + + - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}: + - template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: false + args: + displayName: Publish Logs + pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)' + publishLocation: Container + artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }} + continueOnError: true + condition: always() + + - ${{ if eq(parameters.enableBuildRetry, 'true') }}: + - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml + parameters: + is1ESPipeline: false + args: + targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration' + artifactName: 'BuildConfiguration' + displayName: 'Publish build retry configuration' + continueOnError: true + sbomEnabled: false # we don't need SBOM for BuildConfiguration diff --git a/eng/common/templates/job/onelocbuild.yml b/eng/common/templates/job/onelocbuild.yml index 60ab00c4d..ff829dc4c 100644 --- a/eng/common/templates/job/onelocbuild.yml +++ b/eng/common/templates/job/onelocbuild.yml @@ -1,109 +1,7 @@ -parameters: - # Optional: dependencies of the job - dependsOn: '' - - # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool - pool: '' - - CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex - GithubPat: $(BotAccount-dotnet-bot-repo-PAT) - - SourcesDirectory: $(Build.SourcesDirectory) - CreatePr: true - AutoCompletePr: false - ReusePr: true - UseLfLineEndings: true - UseCheckedInLocProjectJson: false - SkipLocProjectJsonGeneration: false - LanguageSet: VS_Main_Languages - LclSource: lclFilesInRepo - LclPackageId: '' - RepoType: gitHub - GitHubOrg: dotnet - MirrorRepo: '' - MirrorBranch: main - condition: '' - JobNameSuffix: '' - jobs: -- job: OneLocBuild${{ parameters.JobNameSuffix }} - - dependsOn: ${{ parameters.dependsOn }} - - displayName: OneLocBuild${{ parameters.JobNameSuffix }} - - variables: - - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat - - name: _GenerateLocProjectArguments - value: -SourcesDirectory ${{ parameters.SourcesDirectory }} - -LanguageSet "${{ parameters.LanguageSet }}" - -CreateNeutralXlfs - - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}: - - name: _GenerateLocProjectArguments - value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson - - template: /eng/common/templates/variables/pool-providers.yml - - ${{ if ne(parameters.pool, '') }}: - pool: ${{ parameters.pool }} - ${{ if eq(parameters.pool, '') }}: - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: VSEngSS-MicroBuild2022-1ES - demands: Cmd - # If it's not devdiv, it's dnceng - ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: - name: $(DncEngInternalBuildPool) - demands: ImageOverride -equals windows.vs2019.amd64 - - steps: - - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}: - - task: Powershell@2 - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1 - arguments: $(_GenerateLocProjectArguments) - displayName: Generate LocProject.json - condition: ${{ parameters.condition }} - - - task: OneLocBuild@2 - displayName: OneLocBuild - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - inputs: - locProj: eng/Localize/LocProject.json - outDir: $(Build.ArtifactStagingDirectory) - lclSource: ${{ parameters.LclSource }} - lclPackageId: ${{ parameters.LclPackageId }} - isCreatePrSelected: ${{ parameters.CreatePr }} - isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }} - ${{ if eq(parameters.CreatePr, true) }}: - isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }} - ${{ if eq(parameters.RepoType, 'gitHub') }}: - isShouldReusePrSelected: ${{ parameters.ReusePr }} - packageSourceAuth: patAuth - patVariable: ${{ parameters.CeapexPat }} - ${{ if eq(parameters.RepoType, 'gitHub') }}: - repoType: ${{ parameters.RepoType }} - gitHubPatVariable: "${{ parameters.GithubPat }}" - ${{ if ne(parameters.MirrorRepo, '') }}: - isMirrorRepoSelected: true - gitHubOrganization: ${{ parameters.GitHubOrg }} - mirrorRepo: ${{ parameters.MirrorRepo }} - mirrorBranch: ${{ parameters.MirrorBranch }} - condition: ${{ parameters.condition }} - - - task: PublishBuildArtifacts@1 - displayName: Publish Localization Files - inputs: - PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc' - PublishLocation: Container - ArtifactName: Loc - condition: ${{ parameters.condition }} +- template: /eng/common/core-templates/job/onelocbuild.yml + parameters: + is1ESPipeline: false - - task: PublishBuildArtifacts@1 - displayName: Publish LocProject.json - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/' - PublishLocation: Container - ArtifactName: Loc - condition: ${{ parameters.condition }} \ No newline at end of file + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml index bb42240f8..ab2edec2a 100644 --- a/eng/common/templates/job/publish-build-assets.yml +++ b/eng/common/templates/job/publish-build-assets.yml @@ -1,155 +1,7 @@ -parameters: - configuration: 'Debug' - - # Optional: condition for the job to run - condition: '' - - # Optional: 'true' if future jobs should run even if this job fails - continueOnError: false - - # Optional: dependencies of the job - dependsOn: '' - - # Optional: Include PublishBuildArtifacts task - enablePublishBuildArtifacts: false - - # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool - pool: {} - - # Optional: should run as a public build even in the internal project - # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. - runAsPublic: false - - # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing - publishUsingPipelines: false - - # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing - publishAssetsImmediately: false - - artifactsPublishingAdditionalParameters: '' - - signingValidationAdditionalParameters: '' - jobs: -- job: Asset_Registry_Publish - - dependsOn: ${{ parameters.dependsOn }} - timeoutInMinutes: 150 - - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: - displayName: Publish Assets - ${{ else }}: - displayName: Publish to Build Asset Registry - - variables: - - template: /eng/common/templates/variables/pool-providers.yml - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - group: Publish-Build-Assets - - group: AzureDevOps-Artifact-Feeds-Pats - - name: runCodesignValidationInjection - value: false - # unconditional - needed for logs publishing (redactor tool version) - - template: /eng/common/templates/post-build/common-variables.yml - - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: VSEngSS-MicroBuild2022-1ES - demands: Cmd - # If it's not devdiv, it's dnceng - ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: - name: NetCore1ESPool-Publishing-Internal - demands: ImageOverride -equals windows.vs2019.amd64 - - steps: - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - checkout: self - fetchDepth: 3 - clean: true - - - task: DownloadBuildArtifacts@0 - displayName: Download artifact - inputs: - artifactName: AssetManifests - downloadPath: '$(Build.StagingDirectory)/Download' - checkDownloadedFiles: true - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - - - task: NuGetAuthenticate@1 - - - task: PowerShell@2 - displayName: Publish Build Assets - inputs: - filePath: eng\common\sdk-task.ps1 - arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet - /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests' - /p:BuildAssetRegistryToken=$(MaestroAccessToken) - /p:MaestroApiEndpoint=https://maestro.dot.net - /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }} - /p:OfficialBuildId=$(Build.BuildNumber) - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - - - task: powershell@2 - displayName: Create ReleaseConfigs Artifact - inputs: - targetType: inline - script: | - Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId) - Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)" - Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild) - - - task: PublishBuildArtifacts@1 - displayName: Publish ReleaseConfigs Artifact - inputs: - PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt' - PublishLocation: Container - ArtifactName: ReleaseConfigs - - - task: powershell@2 - displayName: Check if SymbolPublishingExclusionsFile.txt exists - inputs: - targetType: inline - script: | - $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt" - if(Test-Path -Path $symbolExclusionfile) - { - Write-Host "SymbolExclusionFile exists" - Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true" - } - else{ - Write-Host "Symbols Exclusion file does not exists" - Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false" - } - - - task: PublishBuildArtifacts@1 - displayName: Publish SymbolPublishingExclusionsFile Artifact - condition: eq(variables['SymbolExclusionFile'], 'true') - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt' - PublishLocation: Container - ArtifactName: ReleaseConfigs - - - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: - - template: /eng/common/templates/post-build/setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: PowerShell@2 - displayName: Publish Using Darc - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 - arguments: -BuildId $(BARBuildId) - -PublishingInfraVersion 3 - -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' - -MaestroToken '$(MaestroApiAccessToken)' - -WaitPublishingFinish true - -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' - -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' +- template: /eng/common/core-templates/job/publish-build-assets.yml + parameters: + is1ESPipeline: false - - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: - - template: /eng/common/templates/steps/publish-logs.yml - parameters: - JobLabel: 'Publish_Artifacts_Logs' + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/job/source-build.yml b/eng/common/templates/job/source-build.yml index 8a3deef2b..e44d47b1d 100644 --- a/eng/common/templates/job/source-build.yml +++ b/eng/common/templates/job/source-build.yml @@ -1,66 +1,7 @@ -parameters: - # This template adds arcade-powered source-build to CI. The template produces a server job with a - # default ID 'Source_Build_Complete' to put in a dependency list if necessary. - - # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed. - jobNamePrefix: 'Source_Build' - - # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for - # managed-only repositories. This is an object with these properties: - # - # name: '' - # The name of the job. This is included in the job ID. - # targetRID: '' - # The name of the target RID to use, instead of the one auto-detected by Arcade. - # nonPortable: false - # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than - # linux-x64), and compiling against distro-provided packages rather than portable ones. - # skipPublishValidation: false - # Disables publishing validation. By default, a check is performed to ensure no packages are - # published by source-build. - # container: '' - # A container to use. Runs in docker. - # pool: {} - # A pool to use. Runs directly on an agent. - # buildScript: '' - # Specifies the build script to invoke to perform the build in the repo. The default - # './build.sh' should work for typical Arcade repositories, but this is customizable for - # difficult situations. - # jobProperties: {} - # A list of job properties to inject at the top level, for potential extensibility beyond - # container and pool. - platform: {} - jobs: -- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }} - displayName: Source-Build (${{ parameters.platform.name }}) - - ${{ each property in parameters.platform.jobProperties }}: - ${{ property.key }}: ${{ property.value }} - - ${{ if ne(parameters.platform.container, '') }}: - container: ${{ parameters.platform.container }} - - ${{ if eq(parameters.platform.pool, '') }}: - # The default VM host AzDO pool. This should be capable of running Docker containers: almost all - # source-build builds run in Docker, including the default managed platform. - # /eng/common/templates/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic - pool: - ${{ if eq(variables['System.TeamProject'], 'public') }}: - name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')] - demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open - - ${{ if eq(variables['System.TeamProject'], 'internal') }}: - name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')] - demands: ImageOverride -equals Build.Ubuntu.1804.Amd64 - - ${{ if ne(parameters.platform.pool, '') }}: - pool: ${{ parameters.platform.pool }} - - workspace: - clean: all +- template: /eng/common/core-templates/job/source-build.yml + parameters: + is1ESPipeline: false - steps: - - template: /eng/common/templates/steps/source-build.yml - parameters: - platform: ${{ parameters.platform }} + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/job/source-index-stage1.yml b/eng/common/templates/job/source-index-stage1.yml index b5a3e5c4a..89f329159 100644 --- a/eng/common/templates/job/source-index-stage1.yml +++ b/eng/common/templates/job/source-index-stage1.yml @@ -1,67 +1,7 @@ -parameters: - runAsPublic: false - sourceIndexPackageVersion: 1.0.1-20240129.2 - sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json - sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci" - preSteps: [] - binlogPath: artifacts/log/Debug/Build.binlog - condition: '' - dependsOn: '' - pool: '' - jobs: -- job: SourceIndexStage1 - dependsOn: ${{ parameters.dependsOn }} - condition: ${{ parameters.condition }} - variables: - - name: SourceIndexPackageVersion - value: ${{ parameters.sourceIndexPackageVersion }} - - name: SourceIndexPackageSource - value: ${{ parameters.sourceIndexPackageSource }} - - name: BinlogPath - value: ${{ parameters.binlogPath }} - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - group: source-dot-net stage1 variables - - template: /eng/common/templates/variables/pool-providers.yml - - ${{ if ne(parameters.pool, '') }}: - pool: ${{ parameters.pool }} - ${{ if eq(parameters.pool, '') }}: - pool: - ${{ if eq(variables['System.TeamProject'], 'public') }}: - name: $(DncEngPublicBuildPool) - demands: ImageOverride -equals windows.vs2022.amd64.open - ${{ if eq(variables['System.TeamProject'], 'internal') }}: - name: $(DncEngInternalBuildPool) - demands: ImageOverride -equals windows.vs2022.amd64 - - steps: - - ${{ each preStep in parameters.preSteps }}: - - ${{ preStep }} - - - task: UseDotNet@2 - displayName: Use .NET 8 SDK - inputs: - packageType: sdk - version: 8.0.x - installationPath: $(Agent.TempDirectory)/dotnet - workingDirectory: $(Agent.TempDirectory) - - - script: | - $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools - $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools - displayName: Download Tools - # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk. - workingDirectory: $(Agent.TempDirectory) - - - script: ${{ parameters.sourceIndexBuildCommand }} - displayName: Build Repository - - - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output - displayName: Process Binlog into indexable sln +- template: /eng/common/core-templates/job/source-index-stage1.yml + parameters: + is1ESPipeline: false - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) - displayName: Upload stage1 artifacts to source index - env: - BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url) + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/jobs/codeql-build.yml b/eng/common/templates/jobs/codeql-build.yml index f7dc5ea4a..517f24d6a 100644 --- a/eng/common/templates/jobs/codeql-build.yml +++ b/eng/common/templates/jobs/codeql-build.yml @@ -1,31 +1,7 @@ -parameters: - # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md - continueOnError: false - # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job - jobs: [] - # Optional: if specified, restore and use this version of Guardian instead of the default. - overrideGuardianVersion: '' - jobs: -- template: /eng/common/templates/jobs/jobs.yml +- template: /eng/common/core-templates/jobs/codeql-build.yml parameters: - enableMicrobuild: false - enablePublishBuildArtifacts: false - enablePublishTestResults: false - enablePublishBuildAssets: false - enablePublishUsingPipelines: false - enableTelemetry: true + is1ESPipeline: false - variables: - - group: Publish-Build-Assets - # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in - # sync with the packages.config file. - - name: DefaultGuardianVersion - value: 0.109.0 - - name: GuardianPackagesConfigFile - value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config - - name: GuardianVersion - value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} - - jobs: ${{ parameters.jobs }} - + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/jobs/jobs.yml b/eng/common/templates/jobs/jobs.yml index 289bb2396..388e9037b 100644 --- a/eng/common/templates/jobs/jobs.yml +++ b/eng/common/templates/jobs/jobs.yml @@ -1,97 +1,7 @@ -parameters: - # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md - continueOnError: false - - # Optional: Include PublishBuildArtifacts task - enablePublishBuildArtifacts: false - - # Optional: Enable publishing using release pipelines - enablePublishUsingPipelines: false - - # Optional: Enable running the source-build jobs to build repo from source - enableSourceBuild: false - - # Optional: Parameters for source-build template. - # See /eng/common/templates/jobs/source-build.yml for options - sourceBuildParameters: [] - - graphFileGeneration: - # Optional: Enable generating the graph files at the end of the build - enabled: false - # Optional: Include toolset dependencies in the generated graph files - includeToolset: false - - # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job - jobs: [] - - # Optional: Override automatically derived dependsOn value for "publish build assets" job - publishBuildAssetsDependsOn: '' - - # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage. - publishAssetsImmediately: false - - # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml) - artifactsPublishingAdditionalParameters: '' - signingValidationAdditionalParameters: '' - - # Optional: should run as a public build even in the internal project - # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. - runAsPublic: false - - enableSourceIndex: false - sourceIndexParams: {} - -# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, -# and some (Microbuild) should only be applied to non-PR cases for internal builds. - jobs: -- ${{ each job in parameters.jobs }}: - - template: ../job/job.yml - parameters: - # pass along parameters - ${{ each parameter in parameters }}: - ${{ if ne(parameter.key, 'jobs') }}: - ${{ parameter.key }}: ${{ parameter.value }} - - # pass along job properties - ${{ each property in job }}: - ${{ if ne(property.key, 'job') }}: - ${{ property.key }}: ${{ property.value }} - - name: ${{ job.job }} - -- ${{ if eq(parameters.enableSourceBuild, true) }}: - - template: /eng/common/templates/jobs/source-build.yml - parameters: - allCompletedJobId: Source_Build_Complete - ${{ each parameter in parameters.sourceBuildParameters }}: - ${{ parameter.key }}: ${{ parameter.value }} - -- ${{ if eq(parameters.enableSourceIndex, 'true') }}: - - template: ../job/source-index-stage1.yml - parameters: - runAsPublic: ${{ parameters.runAsPublic }} - ${{ each parameter in parameters.sourceIndexParams }}: - ${{ parameter.key }}: ${{ parameter.value }} - -- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}: - - template: ../job/publish-build-assets.yml - parameters: - continueOnError: ${{ parameters.continueOnError }} - dependsOn: - - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}: - - ${{ each job in parameters.publishBuildAssetsDependsOn }}: - - ${{ job.job }} - - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}: - - ${{ each job in parameters.jobs }}: - - ${{ job.job }} - - ${{ if eq(parameters.enableSourceBuild, true) }}: - - Source_Build_Complete +- template: /eng/common/core-templates/jobs/jobs.yml + parameters: + is1ESPipeline: false - runAsPublic: ${{ parameters.runAsPublic }} - publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }} - publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }} - enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }} + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/jobs/source-build.yml b/eng/common/templates/jobs/source-build.yml index a15b07eb5..818d4c326 100644 --- a/eng/common/templates/jobs/source-build.yml +++ b/eng/common/templates/jobs/source-build.yml @@ -1,46 +1,7 @@ -parameters: - # This template adds arcade-powered source-build to CI. A job is created for each platform, as - # well as an optional server job that completes when all platform jobs complete. - - # The name of the "join" job for all source-build platforms. If set to empty string, the job is - # not included. Existing repo pipelines can use this job depend on all source-build jobs - # completing without maintaining a separate list of every single job ID: just depend on this one - # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'. - allCompletedJobId: '' - - # See /eng/common/templates/job/source-build.yml - jobNamePrefix: 'Source_Build' - - # This is the default platform provided by Arcade, intended for use by a managed-only repo. - defaultManagedPlatform: - name: 'Managed' - container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8' - - # Defines the platforms on which to run build jobs. One job is created for each platform, and the - # object in this array is sent to the job template as 'platform'. If no platforms are specified, - # one job runs on 'defaultManagedPlatform'. - platforms: [] - jobs: +- template: /eng/common/core-templates/jobs/source-build.yml + parameters: + is1ESPipeline: false -- ${{ if ne(parameters.allCompletedJobId, '') }}: - - job: ${{ parameters.allCompletedJobId }} - displayName: Source-Build Complete - pool: server - dependsOn: - - ${{ each platform in parameters.platforms }}: - - ${{ parameters.jobNamePrefix }}_${{ platform.name }} - - ${{ if eq(length(parameters.platforms), 0) }}: - - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }} - -- ${{ each platform in parameters.platforms }}: - - template: /eng/common/templates/job/source-build.yml - parameters: - jobNamePrefix: ${{ parameters.jobNamePrefix }} - platform: ${{ platform }} - -- ${{ if eq(length(parameters.platforms), 0) }}: - - template: /eng/common/templates/job/source-build.yml - parameters: - jobNamePrefix: ${{ parameters.jobNamePrefix }} - platform: ${{ parameters.defaultManagedPlatform }} + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates/post-build/common-variables.yml b/eng/common/templates/post-build/common-variables.yml index b9ede10bf..7fa105875 100644 --- a/eng/common/templates/post-build/common-variables.yml +++ b/eng/common/templates/post-build/common-variables.yml @@ -1,24 +1,8 @@ variables: - - group: Publish-Build-Assets +- template: /eng/common/core-templates/post-build/common-variables.yml + parameters: + # Specifies whether to use 1ES + is1ESPipeline: false - # Whether the build is internal or not - - name: IsInternalBuild - value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }} - - # Default Maestro++ API Endpoint and API Version - - name: MaestroApiEndPoint - value: "https://maestro.dot.net" - - name: MaestroApiAccessToken - value: $(MaestroAccessToken) - - name: MaestroApiVersion - value: "2020-02-20" - - - name: SourceLinkCLIVersion - value: 3.0.0 - - name: SymbolToolVersion - value: 1.0.1 - - name: BinlogToolVersion - value: 1.0.11 - - - name: runCodesignValidationInjection - value: false + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml index ee70e2b39..53ede714b 100644 --- a/eng/common/templates/post-build/post-build.yml +++ b/eng/common/templates/post-build/post-build.yml @@ -1,282 +1,8 @@ -parameters: - # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST. - # Publishing V1 is no longer supported - # Publishing V2 is no longer supported - # Publishing V3 is the default - - name: publishingInfraVersion - displayName: Which version of publishing should be used to promote the build definition? - type: number - default: 3 - values: - - 3 - - - name: BARBuildId - displayName: BAR Build Id - type: number - default: 0 - - - name: PromoteToChannelIds - displayName: Channel to promote BARBuildId to - type: string - default: '' - - - name: enableSourceLinkValidation - displayName: Enable SourceLink validation - type: boolean - default: false - - - name: enableSigningValidation - displayName: Enable signing validation - type: boolean - default: true - - - name: enableSymbolValidation - displayName: Enable symbol validation - type: boolean - default: false - - - name: enableNugetValidation - displayName: Enable NuGet validation - type: boolean - default: true - - - name: publishInstallersAndChecksums - displayName: Publish installers and checksums - type: boolean - default: true - - - name: SDLValidationParameters - type: object - default: - enable: false - publishGdn: false - continueOnError: false - params: '' - artifactNames: '' - downloadArtifacts: true - - # These parameters let the user customize the call to sdk-task.ps1 for publishing - # symbols & general artifacts as well as for signing validation - - name: symbolPublishingAdditionalParameters - displayName: Symbol publishing additional parameters - type: string - default: '' - - - name: artifactsPublishingAdditionalParameters - displayName: Artifact publishing additional parameters - type: string - default: '' - - - name: signingValidationAdditionalParameters - displayName: Signing validation additional parameters - type: string - default: '' - - # Which stages should finish execution before post-build stages start - - name: validateDependsOn - type: object - default: - - build - - - name: publishDependsOn - type: object - default: - - Validate - - # Optional: Call asset publishing rather than running in a separate stage - - name: publishAssetsImmediately - type: boolean - default: false - stages: -- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: - - stage: Validate - dependsOn: ${{ parameters.validateDependsOn }} - displayName: Validate Build Assets - variables: - - template: common-variables.yml - - template: /eng/common/templates/variables/pool-providers.yml - jobs: - - job: - displayName: NuGet Validation - condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true')) - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: VSEngSS-MicroBuild2022-1ES - demands: Cmd - # If it's not devdiv, it's dnceng - ${{ else }}: - name: $(DncEngInternalBuildPool) - demands: ImageOverride -equals windows.vs2019.amd64 - - steps: - - template: setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Package Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: PackageArtifacts - checkDownloadedFiles: true - - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1 - arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ - -ToolDestinationPath $(Agent.BuildDirectory)/Extract/ - - - job: - displayName: Signing Validation - condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true')) - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: VSEngSS-MicroBuild2022-1ES - demands: Cmd - # If it's not devdiv, it's dnceng - ${{ else }}: - name: $(DncEngInternalBuildPool) - demands: ImageOverride -equals windows.vs2019.amd64 - steps: - - template: setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Package Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: PackageArtifacts - checkDownloadedFiles: true - itemPattern: | - ** - !**/Microsoft.SourceBuild.Intermediate.*.nupkg - - # This is necessary whenever we want to publish/restore to an AzDO private feed - # Since sdk-task.ps1 tries to restore packages we need to do this authentication here - # otherwise it'll complain about accessing a private feed. - - task: NuGetAuthenticate@1 - displayName: 'Authenticate to AzDO Feeds' - - # Signing validation will optionally work with the buildmanifest file which is downloaded from - # Azure DevOps above. - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: eng\common\sdk-task.ps1 - arguments: -task SigningValidation -restore -msbuildEngine vs - /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' - /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt' - ${{ parameters.signingValidationAdditionalParameters }} - - - template: ../steps/publish-logs.yml - parameters: - StageLabel: 'Validation' - JobLabel: 'Signing' - BinlogToolVersion: $(BinlogToolVersion) - - - job: - displayName: SourceLink Validation - condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true') - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: VSEngSS-MicroBuild2022-1ES - demands: Cmd - # If it's not devdiv, it's dnceng - ${{ else }}: - name: $(DncEngInternalBuildPool) - demands: ImageOverride -equals windows.vs2019.amd64 - steps: - - template: setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Blob Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: BlobArtifacts - checkDownloadedFiles: true - - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1 - arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ - -ExtractPath $(Agent.BuildDirectory)/Extract/ - -GHRepoName $(Build.Repository.Name) - -GHCommit $(Build.SourceVersion) - -SourcelinkCliVersion $(SourceLinkCLIVersion) - continueOnError: true - - - template: /eng/common/templates/job/execute-sdl.yml - parameters: - enable: ${{ parameters.SDLValidationParameters.enable }} - publishGuardianDirectoryToPipeline: ${{ parameters.SDLValidationParameters.publishGdn }} - additionalParameters: ${{ parameters.SDLValidationParameters.params }} - continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }} - artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }} - downloadArtifacts: ${{ parameters.SDLValidationParameters.downloadArtifacts }} - -- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}: - - stage: publish_using_darc - ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: - dependsOn: ${{ parameters.publishDependsOn }} - ${{ else }}: - dependsOn: ${{ parameters.validateDependsOn }} - displayName: Publish using Darc - variables: - - template: common-variables.yml - - template: /eng/common/templates/variables/pool-providers.yml - jobs: - - job: - displayName: Publish Using Darc - timeoutInMinutes: 120 - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: VSEngSS-MicroBuild2022-1ES - demands: Cmd - # If it's not devdiv, it's dnceng - ${{ else }}: - name: NetCore1ESPool-Publishing-Internal - demands: ImageOverride -equals windows.vs2019.amd64 - steps: - - template: setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: NuGetAuthenticate@1 +- template: /eng/common/core-templates/post-build/post-build.yml + parameters: + # Specifies whether to use 1ES + is1ESPipeline: false - - task: PowerShell@2 - displayName: Publish Using Darc - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 - arguments: -BuildId $(BARBuildId) - -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} - -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' - -MaestroToken '$(MaestroApiAccessToken)' - -WaitPublishingFinish true - -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' - -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates/post-build/setup-maestro-vars.yml b/eng/common/templates/post-build/setup-maestro-vars.yml index 0c87f149a..a79fab5b4 100644 --- a/eng/common/templates/post-build/setup-maestro-vars.yml +++ b/eng/common/templates/post-build/setup-maestro-vars.yml @@ -1,70 +1,8 @@ -parameters: - BARBuildId: '' - PromoteToChannelIds: '' - steps: - - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}: - - task: DownloadBuildArtifacts@0 - displayName: Download Release Configs - inputs: - buildType: current - artifactName: ReleaseConfigs - checkDownloadedFiles: true - - - task: PowerShell@2 - name: setReleaseVars - displayName: Set Release Configs Vars - inputs: - targetType: inline - pwsh: true - script: | - try { - if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') { - $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt - - $BarId = $Content | Select -Index 0 - $Channels = $Content | Select -Index 1 - $IsStableBuild = $Content | Select -Index 2 - - $AzureDevOpsProject = $Env:System_TeamProject - $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId - $AzureDevOpsBuildId = $Env:Build_BuildId - } - else { - $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}" - - $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]' - $apiHeaders.Add('Accept', 'application/json') - $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}") - - $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" } - - $BarId = $Env:BARBuildId - $Channels = $Env:PromoteToMaestroChannels -split "," - $Channels = $Channels -join "][" - $Channels = "[$Channels]" - - $IsStableBuild = $buildInfo.stable - $AzureDevOpsProject = $buildInfo.azureDevOpsProject - $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId - $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId - } - - Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId" - Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels" - Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild" +- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + # Specifies whether to use 1ES + is1ESPipeline: false - Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject" - Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId" - Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId" - } - catch { - Write-Host $_ - Write-Host $_.Exception - Write-Host $_.ScriptStackTrace - exit 1 - } - env: - MAESTRO_API_TOKEN: $(MaestroApiAccessToken) - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }} + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates/post-build/trigger-subscription.yml b/eng/common/templates/post-build/trigger-subscription.yml deleted file mode 100644 index da669030d..000000000 --- a/eng/common/templates/post-build/trigger-subscription.yml +++ /dev/null @@ -1,13 +0,0 @@ -parameters: - ChannelId: 0 - -steps: -- task: PowerShell@2 - displayName: Triggering subscriptions - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1 - arguments: -SourceRepo $(Build.Repository.Uri) - -ChannelId ${{ parameters.ChannelId }} - -MaestroApiAccessToken $(MaestroAccessToken) - -MaestroApiEndPoint $(MaestroApiEndPoint) - -MaestroApiVersion $(MaestroApiVersion) diff --git a/eng/common/templates/steps/add-build-to-channel.yml b/eng/common/templates/steps/add-build-to-channel.yml deleted file mode 100644 index f67a210d6..000000000 --- a/eng/common/templates/steps/add-build-to-channel.yml +++ /dev/null @@ -1,13 +0,0 @@ -parameters: - ChannelId: 0 - -steps: -- task: PowerShell@2 - displayName: Add Build to Channel - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1 - arguments: -BuildId $(BARBuildId) - -ChannelId ${{ parameters.ChannelId }} - -MaestroApiAccessToken $(MaestroApiAccessToken) - -MaestroApiEndPoint $(MaestroApiEndPoint) - -MaestroApiVersion $(MaestroApiVersion) diff --git a/eng/common/templates/steps/build-reason.yml b/eng/common/templates/steps/build-reason.yml deleted file mode 100644 index eba58109b..000000000 --- a/eng/common/templates/steps/build-reason.yml +++ /dev/null @@ -1,12 +0,0 @@ -# build-reason.yml -# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons -# to include steps (',' separated). -parameters: - conditions: '' - steps: [] - -steps: - - ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}: - - ${{ parameters.steps }} - - ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}: - - ${{ parameters.steps }} diff --git a/eng/common/templates/steps/component-governance.yml b/eng/common/templates/steps/component-governance.yml index 0ecec47b0..c12a5f8d2 100644 --- a/eng/common/templates/steps/component-governance.yml +++ b/eng/common/templates/steps/component-governance.yml @@ -1,13 +1,7 @@ -parameters: - disableComponentGovernance: false - componentGovernanceIgnoreDirectories: '' - steps: -- ${{ if eq(parameters.disableComponentGovernance, 'true') }}: - - script: "echo ##vso[task.setvariable variable=skipComponentGovernanceDetection]true" - displayName: Set skipComponentGovernanceDetection variable -- ${{ if ne(parameters.disableComponentGovernance, 'true') }}: - - task: ComponentGovernanceComponentDetection@0 - continueOnError: true - inputs: - ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} \ No newline at end of file +- template: /eng/common/core-templates/steps/component-governance.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/enable-internal-runtimes.yml b/eng/common/templates/steps/enable-internal-runtimes.yml new file mode 100644 index 000000000..b21a8038c --- /dev/null +++ b/eng/common/templates/steps/enable-internal-runtimes.yml @@ -0,0 +1,10 @@ +# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64' +# variable with the base64-encoded SAS token, by default + +steps: +- template: /eng/common/core-templates/steps/enable-internal-runtimes.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/enable-internal-sources.yml b/eng/common/templates/steps/enable-internal-sources.yml new file mode 100644 index 000000000..5f87e9abb --- /dev/null +++ b/eng/common/templates/steps/enable-internal-sources.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/enable-internal-sources.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates/steps/execute-codeql.yml b/eng/common/templates/steps/execute-codeql.yml deleted file mode 100644 index 3930b1630..000000000 --- a/eng/common/templates/steps/execute-codeql.yml +++ /dev/null @@ -1,32 +0,0 @@ -parameters: - # Language that should be analyzed. Defaults to csharp - language: csharp - # Build Commands - buildCommands: '' - overrideParameters: '' # Optional: to override values for parameters. - additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")' - # Optional: if specified, restore and use this version of Guardian instead of the default. - overrideGuardianVersion: '' - # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth - # diagnosis of problems with specific tool configurations. - publishGuardianDirectoryToPipeline: false - # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL - # parameters rather than relying on YAML. It may be better to use a local script, because you can - # reproduce results locally without piecing together a command based on the YAML. - executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1' - # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named - # 'continueOnError', the parameter value is not correctly picked up. - # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter - # optional: determines whether to continue the build if the step errors; - sdlContinueOnError: false - -steps: -- template: /eng/common/templates/steps/execute-sdl.yml - parameters: - overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }} - executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }} - overrideParameters: ${{ parameters.overrideParameters }} - additionalParameters: '${{ parameters.additionalParameters }} - -CodeQLAdditionalRunConfigParams @("BuildCommands < ${{ parameters.buildCommands }}", "Language < ${{ parameters.language }}")' - publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }} - sdlContinueOnError: ${{ parameters.sdlContinueOnError }} \ No newline at end of file diff --git a/eng/common/templates/steps/execute-sdl.yml b/eng/common/templates/steps/execute-sdl.yml deleted file mode 100644 index 07426fde0..000000000 --- a/eng/common/templates/steps/execute-sdl.yml +++ /dev/null @@ -1,88 +0,0 @@ -parameters: - overrideGuardianVersion: '' - executeAllSdlToolsScript: '' - overrideParameters: '' - additionalParameters: '' - publishGuardianDirectoryToPipeline: false - sdlContinueOnError: false - condition: '' - -steps: -- task: NuGetAuthenticate@1 - inputs: - nuGetServiceConnections: GuardianConnect - -- task: NuGetToolInstaller@1 - displayName: 'Install NuGet.exe' - -- ${{ if ne(parameters.overrideGuardianVersion, '') }}: - - pwsh: | - Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl - . .\sdl.ps1 - $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }} - Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation" - displayName: Install Guardian (Overridden) - -- ${{ if eq(parameters.overrideGuardianVersion, '') }}: - - pwsh: | - Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl - . .\sdl.ps1 - $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts - Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation" - displayName: Install Guardian - -- ${{ if ne(parameters.overrideParameters, '') }}: - - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }} - displayName: Execute SDL (Overridden) - continueOnError: ${{ parameters.sdlContinueOnError }} - condition: ${{ parameters.condition }} - -- ${{ if eq(parameters.overrideParameters, '') }}: - - powershell: ${{ parameters.executeAllSdlToolsScript }} - -GuardianCliLocation $(GuardianCliLocation) - -NugetPackageDirectory $(Build.SourcesDirectory)\.packages - -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw) - ${{ parameters.additionalParameters }} - displayName: Execute SDL - continueOnError: ${{ parameters.sdlContinueOnError }} - condition: ${{ parameters.condition }} - -- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}: - # We want to publish the Guardian results and configuration for easy diagnosis. However, the - # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default - # tooling files. Some of these files are large and aren't useful during an investigation, so - # exclude them by simply deleting them before publishing. (As of writing, there is no documented - # way to selectively exclude a dir from the pipeline artifact publish task.) - - task: DeleteFiles@1 - displayName: Delete Guardian dependencies to avoid uploading - inputs: - SourceFolder: $(Agent.BuildDirectory)/.gdn - Contents: | - c - i - condition: succeededOrFailed() - - - publish: $(Agent.BuildDirectory)/.gdn - artifact: GuardianConfiguration - displayName: Publish GuardianConfiguration - condition: succeededOrFailed() - - # Publish the SARIF files in a container named CodeAnalysisLogs to enable integration - # with the "SARIF SAST Scans Tab" Azure DevOps extension - - task: CopyFiles@2 - displayName: Copy SARIF files - inputs: - flattenFolders: true - sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/ - contents: '**/*.sarif' - targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs - condition: succeededOrFailed() - - # Use PublishBuildArtifacts because the SARIF extension only checks this case - # see microsoft/sarif-azuredevops-extension#4 - - task: PublishBuildArtifacts@1 - displayName: Publish SARIF files to CodeAnalysisLogs container - inputs: - pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs - artifactName: CodeAnalysisLogs - condition: succeededOrFailed() \ No newline at end of file diff --git a/eng/common/templates/steps/generate-sbom.yml b/eng/common/templates/steps/generate-sbom.yml index a06373f38..26dc00a2e 100644 --- a/eng/common/templates/steps/generate-sbom.yml +++ b/eng/common/templates/steps/generate-sbom.yml @@ -1,48 +1,7 @@ -# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated. -# PackageName - The name of the package this SBOM represents. -# PackageVersion - The version of the package this SBOM represents. -# ManifestDirPath - The path of the directory where the generated manifest files will be placed -# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector. - -parameters: - PackageVersion: 7.0.0 - BuildDropPath: '$(Build.SourcesDirectory)/artifacts' - PackageName: '.NET' - ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom - IgnoreDirectories: '' - sbomContinueOnError: true - steps: -- task: PowerShell@2 - displayName: Prep for SBOM generation in (Non-linux) - condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin')) - inputs: - filePath: ./eng/common/generate-sbom-prep.ps1 - arguments: ${{parameters.manifestDirPath}} - -# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461 -- script: | - chmod +x ./eng/common/generate-sbom-prep.sh - ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}} - displayName: Prep for SBOM generation in (Linux) - condition: eq(variables['Agent.Os'], 'Linux') - continueOnError: ${{ parameters.sbomContinueOnError }} - -- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0 - displayName: 'Generate SBOM manifest' - continueOnError: ${{ parameters.sbomContinueOnError }} - inputs: - PackageName: ${{ parameters.packageName }} - BuildDropPath: ${{ parameters.buildDropPath }} - PackageVersion: ${{ parameters.packageVersion }} - ManifestDirPath: ${{ parameters.manifestDirPath }} - ${{ if ne(parameters.IgnoreDirectories, '') }}: - AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}' - -- task: PublishPipelineArtifact@1 - displayName: Publish SBOM manifest - continueOnError: ${{parameters.sbomContinueOnError}} - inputs: - targetPath: '${{parameters.manifestDirPath}}' - artifactName: $(ARTIFACT_NAME) +- template: /eng/common/core-templates/steps/generate-sbom.yml + parameters: + is1ESPipeline: false + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/get-delegation-sas.yml b/eng/common/templates/steps/get-delegation-sas.yml new file mode 100644 index 000000000..83760c979 --- /dev/null +++ b/eng/common/templates/steps/get-delegation-sas.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/get-delegation-sas.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/get-federated-access-token.yml b/eng/common/templates/steps/get-federated-access-token.yml new file mode 100644 index 000000000..31e151d9d --- /dev/null +++ b/eng/common/templates/steps/get-federated-access-token.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/get-federated-access-token.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates/steps/publish-build-artifacts.yml b/eng/common/templates/steps/publish-build-artifacts.yml new file mode 100644 index 000000000..6428a98df --- /dev/null +++ b/eng/common/templates/steps/publish-build-artifacts.yml @@ -0,0 +1,40 @@ +parameters: +- name: is1ESPipeline + type: boolean + default: false + +- name: displayName + type: string + default: 'Publish to Build Artifact' + +- name: condition + type: string + default: succeeded() + +- name: artifactName + type: string + +- name: pathToPublish + type: string + +- name: continueOnError + type: boolean + default: false + +- name: publishLocation + type: string + default: 'Container' + +steps: +- ${{ if eq(parameters.is1ESPipeline, true) }}: + - 'eng/common/templates cannot be referenced from a 1ES managed template': error +- task: PublishBuildArtifacts@1 + displayName: ${{ parameters.displayName }} + condition: ${{ parameters.condition }} + ${{ if parameters.continueOnError }}: + continueOnError: ${{ parameters.continueOnError }} + inputs: + PublishLocation: ${{ parameters.publishLocation }} + PathtoPublish: ${{ parameters.pathToPublish }} + ${{ if parameters.artifactName }}: + ArtifactName: ${{ parameters.artifactName }} \ No newline at end of file diff --git a/eng/common/templates/steps/publish-logs.yml b/eng/common/templates/steps/publish-logs.yml index 80861297d..4ea86bd88 100644 --- a/eng/common/templates/steps/publish-logs.yml +++ b/eng/common/templates/steps/publish-logs.yml @@ -1,49 +1,7 @@ -parameters: - StageLabel: '' - JobLabel: '' - CustomSensitiveDataList: '' - # A default - in case value from eng/common/templates/post-build/common-variables.yml is not passed - BinlogToolVersion: '1.0.11' - steps: -- task: Powershell@2 - displayName: Prepare Binlogs to Upload - inputs: - targetType: inline - script: | - New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ - Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ - continueOnError: true - condition: always() - -- task: PowerShell@2 - displayName: Redact Logs - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1 - # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml - # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt' - # If the file exists - sensitive data for redaction will be sourced from it - # (single entry per line, lines starting with '# ' are considered comments and skipped) - arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs' - -BinlogToolVersion ${{parameters.BinlogToolVersion}} - -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt' - '$(publishing-dnceng-devdiv-code-r-build-re)' - '$(MaestroAccessToken)' - '$(dn-bot-all-orgs-artifact-feeds-rw)' - '$(akams-client-id)' - '$(akams-client-secret)' - '$(microsoft-symbol-server-pat)' - '$(symweb-symbol-server-pat)' - '$(dn-bot-all-orgs-build-rw-code-rw)' - ${{parameters.CustomSensitiveDataList}} - continueOnError: true - condition: always() - -- task: PublishBuildArtifacts@1 - displayName: Publish Logs - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs' - PublishLocation: Container - ArtifactName: PostBuildLogs - continueOnError: true - condition: always() +- template: /eng/common/core-templates/steps/publish-logs.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/publish-pipeline-artifacts.yml b/eng/common/templates/steps/publish-pipeline-artifacts.yml new file mode 100644 index 000000000..5dd698b21 --- /dev/null +++ b/eng/common/templates/steps/publish-pipeline-artifacts.yml @@ -0,0 +1,34 @@ +parameters: +- name: is1ESPipeline + type: boolean + default: false + +- name: args + type: object + default: {} + +steps: +- ${{ if eq(parameters.is1ESPipeline, true) }}: + - 'eng/common/templates cannot be referenced from a 1ES managed template': error +- task: PublishPipelineArtifact@1 + displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }} + ${{ if parameters.args.condition }}: + condition: ${{ parameters.args.condition }} + ${{ else }}: + condition: succeeded() + ${{ if parameters.args.continueOnError }}: + continueOnError: ${{ parameters.args.continueOnError }} + inputs: + targetPath: ${{ parameters.args.targetPath }} + ${{ if parameters.args.artifactName }}: + artifactName: ${{ parameters.args.artifactName }} + ${{ if parameters.args.publishLocation }}: + publishLocation: ${{ parameters.args.publishLocation }} + ${{ if parameters.args.fileSharePath }}: + fileSharePath: ${{ parameters.args.fileSharePath }} + ${{ if parameters.args.Parallel }}: + parallel: ${{ parameters.args.Parallel }} + ${{ if parameters.args.parallelCount }}: + parallelCount: ${{ parameters.args.parallelCount }} + ${{ if parameters.args.properties }}: + properties: ${{ parameters.args.properties }} \ No newline at end of file diff --git a/eng/common/templates/steps/retain-build.yml b/eng/common/templates/steps/retain-build.yml index 83d97a26a..8e841ace3 100644 --- a/eng/common/templates/steps/retain-build.yml +++ b/eng/common/templates/steps/retain-build.yml @@ -1,28 +1,7 @@ -parameters: - # Optional azure devops PAT with build execute permissions for the build's organization, - # only needed if the build that should be retained ran on a different organization than - # the pipeline where this template is executing from - Token: '' - # Optional BuildId to retain, defaults to the current running build - BuildId: '' - # Azure devops Organization URI for the build in the https://dev.azure.com/ format. - # Defaults to the organization the current pipeline is running on - AzdoOrgUri: '$(System.CollectionUri)' - # Azure devops project for the build. Defaults to the project the current pipeline is running on - AzdoProject: '$(System.TeamProject)' - steps: - - task: powershell@2 - inputs: - targetType: 'filePath' - filePath: eng/common/retain-build.ps1 - pwsh: true - arguments: > - -AzdoOrgUri: ${{parameters.AzdoOrgUri}} - -AzdoProject ${{parameters.AzdoProject}} - -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }} - -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}} - displayName: Enable permanent build retention - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - BUILD_ID: $(Build.BuildId) \ No newline at end of file +- template: /eng/common/core-templates/steps/retain-build.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/run-on-unix.yml b/eng/common/templates/steps/run-on-unix.yml deleted file mode 100644 index e1733814f..000000000 --- a/eng/common/templates/steps/run-on-unix.yml +++ /dev/null @@ -1,7 +0,0 @@ -parameters: - agentOs: '' - steps: [] - -steps: -- ${{ if ne(parameters.agentOs, 'Windows_NT') }}: - - ${{ parameters.steps }} diff --git a/eng/common/templates/steps/run-on-windows.yml b/eng/common/templates/steps/run-on-windows.yml deleted file mode 100644 index 73e7e9c27..000000000 --- a/eng/common/templates/steps/run-on-windows.yml +++ /dev/null @@ -1,7 +0,0 @@ -parameters: - agentOs: '' - steps: [] - -steps: -- ${{ if eq(parameters.agentOs, 'Windows_NT') }}: - - ${{ parameters.steps }} diff --git a/eng/common/templates/steps/run-script-ifequalelse.yml b/eng/common/templates/steps/run-script-ifequalelse.yml deleted file mode 100644 index 3d1242f55..000000000 --- a/eng/common/templates/steps/run-script-ifequalelse.yml +++ /dev/null @@ -1,33 +0,0 @@ -parameters: - # if parameter1 equals parameter 2, run 'ifScript' command, else run 'elsescript' command - parameter1: '' - parameter2: '' - ifScript: '' - elseScript: '' - - # name of script step - name: Script - - # display name of script step - displayName: If-Equal-Else Script - - # environment - env: {} - - # conditional expression for step execution - condition: '' - -steps: -- ${{ if and(ne(parameters.ifScript, ''), eq(parameters.parameter1, parameters.parameter2)) }}: - - script: ${{ parameters.ifScript }} - name: ${{ parameters.name }} - displayName: ${{ parameters.displayName }} - env: ${{ parameters.env }} - condition: ${{ parameters.condition }} - -- ${{ if and(ne(parameters.elseScript, ''), ne(parameters.parameter1, parameters.parameter2)) }}: - - script: ${{ parameters.elseScript }} - name: ${{ parameters.name }} - displayName: ${{ parameters.displayName }} - env: ${{ parameters.env }} - condition: ${{ parameters.condition }} \ No newline at end of file diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml index 68fa739c4..39f99fc27 100644 --- a/eng/common/templates/steps/send-to-helix.yml +++ b/eng/common/templates/steps/send-to-helix.yml @@ -1,93 +1,7 @@ -# Please remember to update the documentation if you make changes to these parameters! -parameters: - HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/ - HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/' - HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number - HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues - HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group - HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY - HelixProjectArguments: '' # optional -- arguments passed to the build command - HelixConfiguration: '' # optional -- additional property attached to a job - HelixPreCommands: '' # optional -- commands to run before Helix work item execution - HelixPostCommands: '' # optional -- commands to run after Helix work item execution - WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects - WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects - WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects - CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload - XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true - XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects - XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects - XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner - XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects - IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion - DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json - DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json - WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget." - IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set - HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net ) - Creator: '' # optional -- if the build is external, use this to specify who is sending the job - DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO - condition: succeeded() # optional -- condition for step to execute; defaults to succeeded() - continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false - steps: - - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"' - displayName: ${{ parameters.DisplayNamePrefix }} (Windows) - env: - BuildConfig: $(_BuildConfig) - HelixSource: ${{ parameters.HelixSource }} - HelixType: ${{ parameters.HelixType }} - HelixBuild: ${{ parameters.HelixBuild }} - HelixConfiguration: ${{ parameters.HelixConfiguration }} - HelixTargetQueues: ${{ parameters.HelixTargetQueues }} - HelixAccessToken: ${{ parameters.HelixAccessToken }} - HelixPreCommands: ${{ parameters.HelixPreCommands }} - HelixPostCommands: ${{ parameters.HelixPostCommands }} - WorkItemDirectory: ${{ parameters.WorkItemDirectory }} - WorkItemCommand: ${{ parameters.WorkItemCommand }} - WorkItemTimeout: ${{ parameters.WorkItemTimeout }} - CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} - XUnitProjects: ${{ parameters.XUnitProjects }} - XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} - XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} - XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} - XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} - IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} - DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} - DotNetCliVersion: ${{ parameters.DotNetCliVersion }} - WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} - HelixBaseUri: ${{ parameters.HelixBaseUri }} - Creator: ${{ parameters.Creator }} - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT')) - continueOnError: ${{ parameters.continueOnError }} - - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog - displayName: ${{ parameters.DisplayNamePrefix }} (Unix) - env: - BuildConfig: $(_BuildConfig) - HelixSource: ${{ parameters.HelixSource }} - HelixType: ${{ parameters.HelixType }} - HelixBuild: ${{ parameters.HelixBuild }} - HelixConfiguration: ${{ parameters.HelixConfiguration }} - HelixTargetQueues: ${{ parameters.HelixTargetQueues }} - HelixAccessToken: ${{ parameters.HelixAccessToken }} - HelixPreCommands: ${{ parameters.HelixPreCommands }} - HelixPostCommands: ${{ parameters.HelixPostCommands }} - WorkItemDirectory: ${{ parameters.WorkItemDirectory }} - WorkItemCommand: ${{ parameters.WorkItemCommand }} - WorkItemTimeout: ${{ parameters.WorkItemTimeout }} - CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} - XUnitProjects: ${{ parameters.XUnitProjects }} - XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} - XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} - XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} - XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} - IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} - DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} - DotNetCliVersion: ${{ parameters.DotNetCliVersion }} - WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} - HelixBaseUri: ${{ parameters.HelixBaseUri }} - Creator: ${{ parameters.Creator }} - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT')) - continueOnError: ${{ parameters.continueOnError }} +- template: /eng/common/core-templates/steps/send-to-helix.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/source-build.yml b/eng/common/templates/steps/source-build.yml index 32738aa93..23c1d6f4e 100644 --- a/eng/common/templates/steps/source-build.yml +++ b/eng/common/templates/steps/source-build.yml @@ -1,131 +1,7 @@ -parameters: - # This template adds arcade-powered source-build to CI. - - # This is a 'steps' template, and is intended for advanced scenarios where the existing build - # infra has a careful build methodology that must be followed. For example, a repo - # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline - # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to - # GitHub. Using this steps template leaves room for that infra to be included. - - # Defines the platform on which to run the steps. See 'eng/common/templates/job/source-build.yml' - # for details. The entire object is described in the 'job' template for simplicity, even though - # the usage of the properties on this object is split between the 'job' and 'steps' templates. - platform: {} - steps: -# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.) -- script: | - set -x - df -h - - # If building on the internal project, the artifact feeds variable may be available (usually only if needed) - # In that case, call the feed setup script to add internal feeds corresponding to public ones. - # In addition, add an msbuild argument to copy the WIP from the repo to the target build location. - # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those - # changes. - internalRestoreArgs= - if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then - # Temporarily work around https://github.com/dotnet/arcade/issues/7709 - chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh - $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw) - internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true' - - # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo. - # This only works if there is a username/email configured, which won't be the case in most CI runs. - git config --get user.email - if [ $? -ne 0 ]; then - git config user.email dn-bot@microsoft.com - git config user.name dn-bot - fi - fi - - # If building on the internal project, the internal storage variable may be available (usually only if needed) - # In that case, add variables to allow the download of internal runtimes if the specified versions are not found - # in the default public locations. - internalRuntimeDownloadArgs= - if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then - internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)' - fi - - buildConfig=Release - # Check if AzDO substitutes in a build config from a variable, and use it if so. - if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then - buildConfig='$(_BuildConfig)' - fi - - officialBuildArgs= - if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then - officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)' - fi - - targetRidArgs= - if [ '${{ parameters.platform.targetRID }}' != '' ]; then - targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}' - fi - - runtimeOsArgs= - if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then - runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}' - fi - - baseOsArgs= - if [ '${{ parameters.platform.baseOS }}' != '' ]; then - baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}' - fi - - publishArgs= - if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then - publishArgs='--publish' - fi - - assetManifestFileName=SourceBuild_RidSpecific.xml - if [ '${{ parameters.platform.name }}' != '' ]; then - assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml - fi - - ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ - --configuration $buildConfig \ - --restore --build --pack $publishArgs -bl \ - $officialBuildArgs \ - $internalRuntimeDownloadArgs \ - $internalRestoreArgs \ - $targetRidArgs \ - $runtimeOsArgs \ - $baseOsArgs \ - /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \ - /p:ArcadeBuildFromSource=true \ - /p:DotNetBuildSourceOnly=true \ - /p:DotNetBuildRepo=true \ - /p:AssetManifestFileName=$assetManifestFileName - displayName: Build - -# Upload build logs for diagnosis. -- task: CopyFiles@2 - displayName: Prepare BuildLogs staging directory - inputs: - SourceFolder: '$(Build.SourcesDirectory)' - Contents: | - **/*.log - **/*.binlog - artifacts/sb/prebuilt-report/** - TargetFolder: '$(Build.StagingDirectory)/BuildLogs' - CleanTargetFolder: true - continueOnError: true - condition: succeededOrFailed() - -- task: PublishPipelineArtifact@1 - displayName: Publish BuildLogs - inputs: - targetPath: '$(Build.StagingDirectory)/BuildLogs' - artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt) - continueOnError: true - condition: succeededOrFailed() +- template: /eng/common/core-templates/steps/source-build.yml + parameters: + is1ESPipeline: false -# Manually inject component detection so that we can ignore the source build upstream cache, which contains -# a nupkg cache of input packages (a local feed). -# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir' -# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets -- task: ComponentGovernanceComponentDetection@0 - displayName: Component Detection (Exclude upstream cache) - inputs: - ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache' + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/source-index-stage1-publish.yml b/eng/common/templates/steps/source-index-stage1-publish.yml new file mode 100644 index 000000000..182cec33a --- /dev/null +++ b/eng/common/templates/steps/source-index-stage1-publish.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/telemetry-end.yml b/eng/common/templates/steps/telemetry-end.yml deleted file mode 100644 index fadc04ca1..000000000 --- a/eng/common/templates/steps/telemetry-end.yml +++ /dev/null @@ -1,102 +0,0 @@ -parameters: - maxRetries: 5 - retryDelay: 10 # in seconds - -steps: -- bash: | - if [ "$AGENT_JOBSTATUS" = "Succeeded" ] || [ "$AGENT_JOBSTATUS" = "PartiallySucceeded" ]; then - errorCount=0 - else - errorCount=1 - fi - warningCount=0 - - curlStatus=1 - retryCount=0 - # retry loop to harden against spotty telemetry connections - # we don't retry successes and 4xx client errors - until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]] - do - if [ $retryCount -gt 0 ]; then - echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..." - sleep $RetryDelay - fi - - # create a temporary file for curl output - res=`mktemp` - - curlResult=` - curl --verbose --output $res --write-out "%{http_code}"\ - -H 'Content-Type: application/json' \ - -H "X-Helix-Job-Token: $Helix_JobToken" \ - -H 'Content-Length: 0' \ - -X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$Helix_WorkItemId/finish" \ - --data-urlencode "errorCount=$errorCount" \ - --data-urlencode "warningCount=$warningCount"` - curlStatus=$? - - if [ $curlStatus -eq 0 ]; then - if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then - curlStatus=$curlResult - fi - fi - - let retryCount++ - done - - if [ $curlStatus -ne 0 ]; then - echo "Failed to Send Build Finish information after $retryCount retries" - vstsLogOutput="vso[task.logissue type=error;sourcepath=templates/steps/telemetry-end.yml;code=1;]Failed to Send Build Finish information: $curlStatus" - echo "##$vstsLogOutput" - exit 1 - fi - displayName: Send Unix Build End Telemetry - env: - # defined via VSTS variables in start-job.sh - Helix_JobToken: $(Helix_JobToken) - Helix_WorkItemId: $(Helix_WorkItemId) - MaxRetries: ${{ parameters.maxRetries }} - RetryDelay: ${{ parameters.retryDelay }} - condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT')) -- powershell: | - if (($env:Agent_JobStatus -eq 'Succeeded') -or ($env:Agent_JobStatus -eq 'PartiallySucceeded')) { - $ErrorCount = 0 - } else { - $ErrorCount = 1 - } - $WarningCount = 0 - - # Basic retry loop to harden against server flakiness - $retryCount = 0 - while ($retryCount -lt $env:MaxRetries) { - try { - Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$env:Helix_WorkItemId/finish?errorCount=$ErrorCount&warningCount=$WarningCount" -Method Post -ContentType "application/json" -Body "" ` - -Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken } - break - } - catch { - $statusCode = $_.Exception.Response.StatusCode.value__ - if ($statusCode -ge 400 -and $statusCode -le 499) { - Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)" - Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message - exit 1 - } - Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..." - $retryCount++ - sleep $env:RetryDelay - continue - } - } - - if ($retryCount -ge $env:MaxRetries) { - Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries." - exit 1 - } - displayName: Send Windows Build End Telemetry - env: - # defined via VSTS variables in start-job.ps1 - Helix_JobToken: $(Helix_JobToken) - Helix_WorkItemId: $(Helix_WorkItemId) - MaxRetries: ${{ parameters.maxRetries }} - RetryDelay: ${{ parameters.retryDelay }} - condition: and(always(),eq(variables['Agent.Os'], 'Windows_NT')) diff --git a/eng/common/templates/steps/telemetry-start.yml b/eng/common/templates/steps/telemetry-start.yml deleted file mode 100644 index 32c01ef0b..000000000 --- a/eng/common/templates/steps/telemetry-start.yml +++ /dev/null @@ -1,241 +0,0 @@ -parameters: - helixSource: 'undefined_defaulted_in_telemetry.yml' - helixType: 'undefined_defaulted_in_telemetry.yml' - buildConfig: '' - runAsPublic: false - maxRetries: 5 - retryDelay: 10 # in seconds - -steps: -- ${{ if and(eq(parameters.runAsPublic, 'false'), not(eq(variables['System.TeamProject'], 'public'))) }}: - - task: AzureKeyVault@1 - inputs: - azureSubscription: 'HelixProd_KeyVault' - KeyVaultName: HelixProdKV - SecretsFilter: 'HelixApiAccessToken' - condition: always() -- bash: | - # create a temporary file - jobInfo=`mktemp` - - # write job info content to temporary file - cat > $jobInfo < Date: Tue, 26 Nov 2024 13:25:39 +0000 Subject: [PATCH 02/15] Update dependencies from https://github.com/dotnet/arcade build 20241125.1 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 9.0.0-beta.24170.7 -> To Version 10.0.0-beta.24575.1 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index bfc068acc..2f7aa2628 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -3,13 +3,13 @@ - + https://github.com/dotnet/arcade - 7d955f9f470465e144c76d47fd2596a0e4c02a21 + 2a3bf4e3a4c473135d058adcd7193a5a4bcd38a7 - + https://github.com/dotnet/arcade - 7d955f9f470465e144c76d47fd2596a0e4c02a21 + 2a3bf4e3a4c473135d058adcd7193a5a4bcd38a7 diff --git a/global.json b/global.json index ab46e994f..92a0f1c47 100644 --- a/global.json +++ b/global.json @@ -6,7 +6,7 @@ "xcopy-msbuild": "16.8.0-preview2.1" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24572.3", - "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24572.3" + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24575.1", + "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24575.1" } } From 0acf220b0dd549a5649fc8195fc2eb1da4843ec5 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Wed, 27 Nov 2024 13:25:00 +0000 Subject: [PATCH 03/15] Update dependencies from https://github.com/dotnet/arcade build 20241126.1 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 9.0.0-beta.24170.7 -> To Version 10.0.0-beta.24576.1 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 2f7aa2628..80f5e4ed9 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -3,13 +3,13 @@ - + https://github.com/dotnet/arcade - 2a3bf4e3a4c473135d058adcd7193a5a4bcd38a7 + 9ad0880a9f8450f4ac4e097cfe830e401ea3e22c - + https://github.com/dotnet/arcade - 2a3bf4e3a4c473135d058adcd7193a5a4bcd38a7 + 9ad0880a9f8450f4ac4e097cfe830e401ea3e22c diff --git a/global.json b/global.json index 92a0f1c47..455e88278 100644 --- a/global.json +++ b/global.json @@ -6,7 +6,7 @@ "xcopy-msbuild": "16.8.0-preview2.1" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24575.1", - "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24575.1" + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24576.1", + "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24576.1" } } From 61fbae9ef5f36dc60e605997f4a41859e44aa312 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Thu, 28 Nov 2024 13:23:07 +0000 Subject: [PATCH 04/15] Update dependencies from https://github.com/dotnet/arcade build 20241127.1 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 9.0.0-beta.24170.7 -> To Version 10.0.0-beta.24577.1 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 80f5e4ed9..bbaeed942 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -3,13 +3,13 @@ - + https://github.com/dotnet/arcade - 9ad0880a9f8450f4ac4e097cfe830e401ea3e22c + d0f89c635d780e183a97ad86af4f3c8d7e95977f - + https://github.com/dotnet/arcade - 9ad0880a9f8450f4ac4e097cfe830e401ea3e22c + d0f89c635d780e183a97ad86af4f3c8d7e95977f diff --git a/global.json b/global.json index 455e88278..da8d2ad73 100644 --- a/global.json +++ b/global.json @@ -6,7 +6,7 @@ "xcopy-msbuild": "16.8.0-preview2.1" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24576.1", - "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24576.1" + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24577.1", + "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24577.1" } } From b354ae9f66b850e742521f23344067e78f72c2ba Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Fri, 29 Nov 2024 13:24:14 +0000 Subject: [PATCH 05/15] Update dependencies from https://github.com/dotnet/arcade build 20241128.2 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 9.0.0-beta.24170.7 -> To Version 10.0.0-beta.24578.2 --- eng/Version.Details.xml | 8 ++++---- eng/common/native/install-dependencies.sh | 6 +----- global.json | 4 ++-- 3 files changed, 7 insertions(+), 11 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index bbaeed942..590c5a770 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -3,13 +3,13 @@ - + https://github.com/dotnet/arcade - d0f89c635d780e183a97ad86af4f3c8d7e95977f + e8de3415124309210e4cbd0105e4a9da8dc01696 - + https://github.com/dotnet/arcade - d0f89c635d780e183a97ad86af4f3c8d7e95977f + e8de3415124309210e4cbd0105e4a9da8dc01696 diff --git a/eng/common/native/install-dependencies.sh b/eng/common/native/install-dependencies.sh index dc396a955..3eef7409f 100644 --- a/eng/common/native/install-dependencies.sh +++ b/eng/common/native/install-dependencies.sh @@ -44,15 +44,11 @@ case "$os" in export HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1 # Skip brew update for now, see https://github.com/actions/setup-python/issues/577 # brew update --preinstall - - # Temporarily uninstall pkg-config@0.29.2 to work around https://github.com/actions/runner-images/issues/10984 - brew uninstall --ignore-dependencies --force pkg-config@0.29.2 - brew bundle --no-upgrade --no-lock --file=- < Date: Thu, 5 Dec 2024 13:23:51 +0000 Subject: [PATCH 06/15] Update dependencies from https://github.com/dotnet/arcade build 20241204.4 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 9.0.0-beta.24170.7 -> To Version 10.0.0-beta.24604.4 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 590c5a770..f1ec9fb9e 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -3,13 +3,13 @@ - + https://github.com/dotnet/arcade - e8de3415124309210e4cbd0105e4a9da8dc01696 + 45d845e04c05fbe5da9838c454bbc3af1df6be81 - + https://github.com/dotnet/arcade - e8de3415124309210e4cbd0105e4a9da8dc01696 + 45d845e04c05fbe5da9838c454bbc3af1df6be81 diff --git a/global.json b/global.json index 2ec732660..734dc08d4 100644 --- a/global.json +++ b/global.json @@ -6,7 +6,7 @@ "xcopy-msbuild": "16.8.0-preview2.1" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24578.2", - "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24578.2" + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24604.4", + "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24604.4" } } From b414af05eb7d4701f5d1cad2990f7f84eb30395b Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Fri, 6 Dec 2024 13:22:12 +0000 Subject: [PATCH 07/15] Update dependencies from https://github.com/dotnet/arcade build 20241205.1 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 9.0.0-beta.24170.7 -> To Version 10.0.0-beta.24605.1 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index f1ec9fb9e..ffee77162 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -3,13 +3,13 @@ - + https://github.com/dotnet/arcade - 45d845e04c05fbe5da9838c454bbc3af1df6be81 + 110749ff6e8a43fcd7c8f0bd74c5fcb0da3562ed - + https://github.com/dotnet/arcade - 45d845e04c05fbe5da9838c454bbc3af1df6be81 + 110749ff6e8a43fcd7c8f0bd74c5fcb0da3562ed diff --git a/global.json b/global.json index 734dc08d4..80a662a13 100644 --- a/global.json +++ b/global.json @@ -6,7 +6,7 @@ "xcopy-msbuild": "16.8.0-preview2.1" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24604.4", - "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24604.4" + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24605.1", + "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24605.1" } } From c0ccf8b34c81edbe042b46feb13a23fe861e9e2d Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Sat, 7 Dec 2024 13:23:46 +0000 Subject: [PATCH 08/15] Update dependencies from https://github.com/dotnet/arcade build 20241206.6 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 9.0.0-beta.24170.7 -> To Version 10.0.0-beta.24606.6 --- eng/Version.Details.xml | 8 ++++---- eng/common/core-templates/steps/source-build.yml | 2 +- eng/common/cross/build-rootfs.sh | 8 ++++---- global.json | 4 ++-- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index ffee77162..05d80d994 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -3,13 +3,13 @@ - + https://github.com/dotnet/arcade - 110749ff6e8a43fcd7c8f0bd74c5fcb0da3562ed + 61b8f746424762d2e3173ebfaab19346224d591c - + https://github.com/dotnet/arcade - 110749ff6e8a43fcd7c8f0bd74c5fcb0da3562ed + 61b8f746424762d2e3173ebfaab19346224d591c diff --git a/eng/common/core-templates/steps/source-build.yml b/eng/common/core-templates/steps/source-build.yml index 4da05afe0..f9ba1625c 100644 --- a/eng/common/core-templates/steps/source-build.yml +++ b/eng/common/core-templates/steps/source-build.yml @@ -78,7 +78,7 @@ steps: portableBuildArgs= if [ '${{ parameters.platform.portableBuild }}' != '' ]; then - portableBuildArgs='/p:PortabelBuild=${{ parameters.platform.portableBuild }}' + portableBuildArgs='/p:PortableBuild=${{ parameters.platform.portableBuild }}' fi ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh index 20ae8c286..096bfe51f 100644 --- a/eng/common/cross/build-rootfs.sh +++ b/eng/common/cross/build-rootfs.sh @@ -73,8 +73,8 @@ __AlpinePackages+=" krb5-dev" __AlpinePackages+=" openssl-dev" __AlpinePackages+=" zlib-dev" -__FreeBSDBase="13.3-RELEASE" -__FreeBSDPkg="1.17.0" +__FreeBSDBase="13.4-RELEASE" +__FreeBSDPkg="1.21.3" __FreeBSDABI="13" __FreeBSDPackages="libunwind" __FreeBSDPackages+=" icu" @@ -371,7 +371,7 @@ while :; do ;; freebsd14) __CodeName=freebsd - __FreeBSDBase="14.0-RELEASE" + __FreeBSDBase="14.2-RELEASE" __FreeBSDABI="14" __SkipUnmount=1 ;; @@ -574,7 +574,7 @@ elif [[ "$__CodeName" == "freebsd" ]]; then curl -SL "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version fi echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf - echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf + echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf mkdir -p "$__RootfsDir"/tmp # get and build package manager if [[ "$__hasWget" == 1 ]]; then diff --git a/global.json b/global.json index 80a662a13..fce754fc3 100644 --- a/global.json +++ b/global.json @@ -6,7 +6,7 @@ "xcopy-msbuild": "16.8.0-preview2.1" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24605.1", - "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24605.1" + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24606.6", + "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24606.6" } } From 3e0868de02eb34ce79b1107125a5223c00a3d158 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Tue, 10 Dec 2024 13:24:16 +0000 Subject: [PATCH 09/15] Update dependencies from https://github.com/dotnet/arcade build 20241210.1 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 9.0.0-beta.24170.7 -> To Version 10.0.0-beta.24610.1 --- eng/Version.Details.xml | 8 +- eng/common/cross/toolchain.cmake | 138 ++++++++++++------------------- global.json | 4 +- 3 files changed, 61 insertions(+), 89 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 05d80d994..d76af212a 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -3,13 +3,13 @@ - + https://github.com/dotnet/arcade - 61b8f746424762d2e3173ebfaab19346224d591c + bdea4c2fa4851874b372af1c90d4bd2ec51ad499 - + https://github.com/dotnet/arcade - 61b8f746424762d2e3173ebfaab19346224d591c + bdea4c2fa4851874b372af1c90d4bd2ec51ad499 diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake index 9a4e285a5..deac538e6 100644 --- a/eng/common/cross/toolchain.cmake +++ b/eng/common/cross/toolchain.cmake @@ -40,7 +40,7 @@ if(TARGET_ARCH_NAME STREQUAL "arm") set(TOOLCHAIN "arm-linux-gnueabihf") endif() if(TIZEN) - set(TIZEN_TOOLCHAIN "armv7hl-tizen-linux-gnueabihf/9.2.0") + set(TIZEN_TOOLCHAIN "armv7hl-tizen-linux-gnueabihf") endif() elseif(TARGET_ARCH_NAME STREQUAL "arm64") set(CMAKE_SYSTEM_PROCESSOR aarch64) @@ -49,7 +49,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "arm64") elseif(LINUX) set(TOOLCHAIN "aarch64-linux-gnu") if(TIZEN) - set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu/9.2.0") + set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu") endif() elseif(FREEBSD) set(triple "aarch64-unknown-freebsd12") @@ -58,7 +58,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "armel") set(CMAKE_SYSTEM_PROCESSOR armv7l) set(TOOLCHAIN "arm-linux-gnueabi") if(TIZEN) - set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/9.2.0") + set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi") endif() elseif(TARGET_ARCH_NAME STREQUAL "armv6") set(CMAKE_SYSTEM_PROCESSOR armv6l) @@ -81,7 +81,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "riscv64") else() set(TOOLCHAIN "riscv64-linux-gnu") if(TIZEN) - set(TIZEN_TOOLCHAIN "riscv64-tizen-linux-gnu/13.1.0") + set(TIZEN_TOOLCHAIN "riscv64-tizen-linux-gnu") endif() endif() elseif(TARGET_ARCH_NAME STREQUAL "s390x") @@ -98,7 +98,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "x64") elseif(LINUX) set(TOOLCHAIN "x86_64-linux-gnu") if(TIZEN) - set(TIZEN_TOOLCHAIN "x86_64-tizen-linux-gnu/9.2.0") + set(TIZEN_TOOLCHAIN "x86_64-tizen-linux-gnu") endif() elseif(FREEBSD) set(triple "x86_64-unknown-freebsd12") @@ -115,7 +115,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "x86") set(TOOLCHAIN "i686-linux-gnu") endif() if(TIZEN) - set(TIZEN_TOOLCHAIN "i586-tizen-linux-gnu/9.2.0") + set(TIZEN_TOOLCHAIN "i586-tizen-linux-gnu") endif() else() message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only arm, arm64, armel, armv6, ppc64le, riscv64, s390x, x64 and x86 are supported!") @@ -127,32 +127,46 @@ endif() # Specify include paths if(TIZEN) - if(TARGET_ARCH_NAME STREQUAL "arm") - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/) - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7hl-tizen-linux-gnueabihf) - endif() - if(TARGET_ARCH_NAME STREQUAL "armel") - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/) - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7l-tizen-linux-gnueabi) - endif() - if(TARGET_ARCH_NAME STREQUAL "arm64") - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/) - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/aarch64-tizen-linux-gnu) - endif() - if(TARGET_ARCH_NAME STREQUAL "x86") - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/) - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/i586-tizen-linux-gnu) - endif() - if(TARGET_ARCH_NAME STREQUAL "x64") - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/) - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/x86_64-tizen-linux-gnu) - endif() - if(TARGET_ARCH_NAME STREQUAL "riscv64") - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/) - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/riscv64-tizen-linux-gnu) + function(find_toolchain_dir prefix) + # Dynamically find the version subdirectory + file(GLOB DIRECTORIES "${prefix}/*") + list(GET DIRECTORIES 0 FIRST_MATCH) + get_filename_component(TOOLCHAIN_VERSION ${FIRST_MATCH} NAME) + + set(TIZEN_TOOLCHAIN_PATH "${prefix}/${TOOLCHAIN_VERSION}" PARENT_SCOPE) + endfunction() + + if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$") + find_toolchain_dir("${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + else() + find_toolchain_dir("${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}") endif() + + message(STATUS "TIZEN_TOOLCHAIN_PATH set to: ${TIZEN_TOOLCHAIN_PATH}") + + include_directories(SYSTEM ${TIZEN_TOOLCHAIN_PATH}/include/c++) + include_directories(SYSTEM ${TIZEN_TOOLCHAIN_PATH}/include/c++/${TIZEN_TOOLCHAIN}) endif() +function(locate_toolchain_exec exec var) + set(TOOLSET_PREFIX ${TOOLCHAIN}-) + string(TOUPPER ${exec} EXEC_UPPERCASE) + if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "") + set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE) + return() + endif() + + find_program(EXEC_LOCATION_${exec} + NAMES + "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}" + "${TOOLSET_PREFIX}${exec}") + + if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND") + message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.") + endif() + set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE) +endfunction() + if(ANDROID) if(TARGET_ARCH_NAME STREQUAL "arm") set(ANDROID_ABI armeabi-v7a) @@ -183,66 +197,24 @@ elseif(FREEBSD) set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -fuse-ld=lld") elseif(ILLUMOS) set(CMAKE_SYSROOT "${CROSS_ROOTFS}") + set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}") + set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp") + set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") include_directories(SYSTEM ${CROSS_ROOTFS}/include) - set(TOOLSET_PREFIX ${TOOLCHAIN}-) - function(locate_toolchain_exec exec var) - string(TOUPPER ${exec} EXEC_UPPERCASE) - if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "") - set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE) - return() - endif() - - find_program(EXEC_LOCATION_${exec} - NAMES - "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}" - "${TOOLSET_PREFIX}${exec}") - - if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND") - message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.") - endif() - set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE) - endfunction() - - set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}") - locate_toolchain_exec(gcc CMAKE_C_COMPILER) locate_toolchain_exec(g++ CMAKE_CXX_COMPILER) - - set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp") - set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") elseif(HAIKU) set(CMAKE_SYSROOT "${CROSS_ROOTFS}") set(CMAKE_PROGRAM_PATH "${CMAKE_PROGRAM_PATH};${CROSS_ROOTFS}/cross-tools-x86_64/bin") - - set(TOOLSET_PREFIX ${TOOLCHAIN}-) - function(locate_toolchain_exec exec var) - string(TOUPPER ${exec} EXEC_UPPERCASE) - if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "") - set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE) - return() - endif() - - find_program(EXEC_LOCATION_${exec} - NAMES - "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}" - "${TOOLSET_PREFIX}${exec}") - - if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND") - message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.") - endif() - set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE) - endfunction() - set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}") + set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp") + set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") locate_toolchain_exec(gcc CMAKE_C_COMPILER) locate_toolchain_exec(g++ CMAKE_CXX_COMPILER) - set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp") - set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") - # let CMake set up the correct search paths include(Platform/Haiku) else() @@ -272,21 +244,21 @@ endif() if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$") if(TIZEN) - add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + add_toolchain_linker_flag("-B${TIZEN_TOOLCHAIN_PATH}") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib") - add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + add_toolchain_linker_flag("-L${TIZEN_TOOLCHAIN_PATH}") endif() elseif(TARGET_ARCH_NAME MATCHES "^(arm64|x64|riscv64)$") if(TIZEN) - add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}") + add_toolchain_linker_flag("-B${TIZEN_TOOLCHAIN_PATH}") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib64") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64") - add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}") + add_toolchain_linker_flag("-L${TIZEN_TOOLCHAIN_PATH}") add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib64") add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64") - add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}") + add_toolchain_linker_flag("-Wl,--rpath-link=${TIZEN_TOOLCHAIN_PATH}") endif() elseif(TARGET_ARCH_NAME STREQUAL "s390x") add_toolchain_linker_flag("--target=${TOOLCHAIN}") @@ -297,10 +269,10 @@ elseif(TARGET_ARCH_NAME STREQUAL "x86") endif() add_toolchain_linker_flag(-m32) if(TIZEN) - add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + add_toolchain_linker_flag("-B${TIZEN_TOOLCHAIN_PATH}") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib") - add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + add_toolchain_linker_flag("-L${TIZEN_TOOLCHAIN_PATH}") endif() elseif(ILLUMOS) add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib/amd64") diff --git a/global.json b/global.json index fce754fc3..2bd3ce1d3 100644 --- a/global.json +++ b/global.json @@ -6,7 +6,7 @@ "xcopy-msbuild": "16.8.0-preview2.1" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24606.6", - "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24606.6" + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24610.1", + "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24610.1" } } From 542165e67ad5d94450ee44cdbf1517287a7d9567 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Wed, 11 Dec 2024 13:23:03 +0000 Subject: [PATCH 10/15] Update dependencies from https://github.com/dotnet/arcade build 20241210.2 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 9.0.0-beta.24170.7 -> To Version 10.0.0-beta.24610.2 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index d76af212a..fc8826304 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -3,13 +3,13 @@ - + https://github.com/dotnet/arcade - bdea4c2fa4851874b372af1c90d4bd2ec51ad499 + d5c8bb8726b46b5867961f5d8f56f1b13e72dcb9 - + https://github.com/dotnet/arcade - bdea4c2fa4851874b372af1c90d4bd2ec51ad499 + d5c8bb8726b46b5867961f5d8f56f1b13e72dcb9 diff --git a/global.json b/global.json index 2bd3ce1d3..1cde950dd 100644 --- a/global.json +++ b/global.json @@ -6,7 +6,7 @@ "xcopy-msbuild": "16.8.0-preview2.1" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24610.1", - "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24610.1" + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24610.2", + "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24610.2" } } From 5fd51039ff471702490865ace43b5b06e7bbf146 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Thu, 12 Dec 2024 13:22:24 +0000 Subject: [PATCH 11/15] Update dependencies from https://github.com/dotnet/arcade build 20241212.1 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 9.0.0-beta.24170.7 -> To Version 10.0.0-beta.24612.1 --- eng/Version.Details.xml | 8 ++++---- eng/common/cross/build-rootfs.sh | 14 +++----------- eng/common/cross/toolchain.cmake | 11 +++++++++-- global.json | 4 ++-- 4 files changed, 18 insertions(+), 19 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index fc8826304..b174fb537 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -3,13 +3,13 @@ - + https://github.com/dotnet/arcade - d5c8bb8726b46b5867961f5d8f56f1b13e72dcb9 + 2c4eeabbeab30dfe532190f7e5c448078231cdd0 - + https://github.com/dotnet/arcade - d5c8bb8726b46b5867961f5d8f56f1b13e72dcb9 + 2c4eeabbeab30dfe532190f7e5c448078231cdd0 diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh index 096bfe51f..de9807297 100644 --- a/eng/common/cross/build-rootfs.sh +++ b/eng/common/cross/build-rootfs.sh @@ -52,14 +52,12 @@ __UbuntuPackages+=" symlinks" __UbuntuPackages+=" libicu-dev" __UbuntuPackages+=" liblttng-ust-dev" __UbuntuPackages+=" libunwind8-dev" -__UbuntuPackages+=" libnuma-dev" __AlpinePackages+=" gettext-dev" __AlpinePackages+=" icu-dev" __AlpinePackages+=" libunwind-dev" __AlpinePackages+=" lttng-ust-dev" __AlpinePackages+=" compiler-rt" -__AlpinePackages+=" numactl-dev" # runtime libraries' dependencies __UbuntuPackages+=" libcurl4-openssl-dev" @@ -424,13 +422,12 @@ case "$__AlpineVersion" in elif [[ "$__AlpineArch" == "riscv64" ]]; then __AlpineLlvmLibsLookup=1 __AlpineVersion=edge # minimum version with APKINDEX.tar.gz (packages archive) + elif [[ -n "$__AlpineVersion" ]]; then + # use whichever alpine version is provided and select the latest toolchain libs + __AlpineLlvmLibsLookup=1 else __AlpineVersion=3.13 # 3.13 to maximize compatibility __AlpinePackages+=" llvm10-libs" - - if [[ "$__AlpineArch" == "armv7" ]]; then - __AlpinePackages="${__AlpinePackages//numactl-dev/}" - fi fi esac @@ -444,11 +441,6 @@ if [[ "$__BuildArch" == "armel" ]]; then __LLDB_Package="lldb-3.5-dev" fi -if [[ "$__CodeName" == "xenial" && "$__UbuntuArch" == "armhf" ]]; then - # libnuma-dev is not available on armhf for xenial - __UbuntuPackages="${__UbuntuPackages//libnuma-dev/}" -fi - __UbuntuPackages+=" ${__LLDB_Package:-}" if [[ -z "$__UbuntuRepo" ]]; then diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake index deac538e6..0ff85cf03 100644 --- a/eng/common/cross/toolchain.cmake +++ b/eng/common/cross/toolchain.cmake @@ -67,6 +67,13 @@ elseif(TARGET_ARCH_NAME STREQUAL "armv6") else() set(TOOLCHAIN "arm-linux-gnueabihf") endif() +elseif(TARGET_ARCH_NAME STREQUAL "loongarch64") + set(CMAKE_SYSTEM_PROCESSOR "loongarch64") + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/loongarch64-alpine-linux-musl) + set(TOOLCHAIN "loongarch64-alpine-linux-musl") + else() + set(TOOLCHAIN "loongarch64-linux-gnu") + endif() elseif(TARGET_ARCH_NAME STREQUAL "ppc64le") set(CMAKE_SYSTEM_PROCESSOR ppc64le) if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/powerpc64le-alpine-linux-musl) @@ -118,7 +125,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "x86") set(TIZEN_TOOLCHAIN "i586-tizen-linux-gnu") endif() else() - message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only arm, arm64, armel, armv6, ppc64le, riscv64, s390x, x64 and x86 are supported!") + message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only arm, arm64, armel, armv6, loongarch64, ppc64le, riscv64, s390x, x64 and x86 are supported!") endif() if(DEFINED ENV{TOOLCHAIN}) @@ -284,7 +291,7 @@ endif() # Specify compile options -if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|ppc64le|riscv64|s390x|x64|x86)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU) +if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|loongarch64|ppc64le|riscv64|s390x|x64|x86)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU) set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN}) set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN}) set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN}) diff --git a/global.json b/global.json index 1cde950dd..2dc36be91 100644 --- a/global.json +++ b/global.json @@ -6,7 +6,7 @@ "xcopy-msbuild": "16.8.0-preview2.1" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24610.2", - "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24610.2" + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24612.1", + "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24612.1" } } From c24b7a0f08ccdfc3e63eed4adbb023aed11f3cd0 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Fri, 13 Dec 2024 13:21:06 +0000 Subject: [PATCH 12/15] Update dependencies from https://github.com/dotnet/arcade build 20241212.4 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 9.0.0-beta.24170.7 -> To Version 10.0.0-beta.24612.4 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index b174fb537..7ac8f1d14 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -3,13 +3,13 @@ - + https://github.com/dotnet/arcade - 2c4eeabbeab30dfe532190f7e5c448078231cdd0 + 1e161ed635ca19f61b1ddddee61b0bfc995fd716 - + https://github.com/dotnet/arcade - 2c4eeabbeab30dfe532190f7e5c448078231cdd0 + 1e161ed635ca19f61b1ddddee61b0bfc995fd716 diff --git a/global.json b/global.json index 2dc36be91..97b30eadf 100644 --- a/global.json +++ b/global.json @@ -6,7 +6,7 @@ "xcopy-msbuild": "16.8.0-preview2.1" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24612.1", - "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24612.1" + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24612.4", + "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24612.4" } } From fbe6c1510b1e4fbd55c847c1bda681226fcf6020 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Sat, 14 Dec 2024 13:21:37 +0000 Subject: [PATCH 13/15] Update dependencies from https://github.com/dotnet/arcade build 20241213.2 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 9.0.0-beta.24170.7 -> To Version 10.0.0-beta.24613.2 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 7ac8f1d14..703732148 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -3,13 +3,13 @@ - + https://github.com/dotnet/arcade - 1e161ed635ca19f61b1ddddee61b0bfc995fd716 + 255d5e0c89958af276883a988108c2d616438805 - + https://github.com/dotnet/arcade - 1e161ed635ca19f61b1ddddee61b0bfc995fd716 + 255d5e0c89958af276883a988108c2d616438805 diff --git a/global.json b/global.json index 97b30eadf..bd14ab3c7 100644 --- a/global.json +++ b/global.json @@ -6,7 +6,7 @@ "xcopy-msbuild": "16.8.0-preview2.1" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24612.4", - "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24612.4" + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24613.2", + "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24613.2" } } From c5db39572090a5bfabc92df299d16642cbaa069c Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Tue, 17 Dec 2024 13:23:29 +0000 Subject: [PATCH 14/15] Update dependencies from https://github.com/dotnet/arcade build 20241216.1 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 9.0.0-beta.24170.7 -> To Version 10.0.0-beta.24616.1 --- eng/Version.Details.xml | 8 ++++---- global.json | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 703732148..7add06146 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -3,13 +3,13 @@ - + https://github.com/dotnet/arcade - 255d5e0c89958af276883a988108c2d616438805 + e0abaf3431b0fe3c9f9902aa42f6008b1250e75e - + https://github.com/dotnet/arcade - 255d5e0c89958af276883a988108c2d616438805 + e0abaf3431b0fe3c9f9902aa42f6008b1250e75e diff --git a/global.json b/global.json index bd14ab3c7..baf4aeba6 100644 --- a/global.json +++ b/global.json @@ -6,7 +6,7 @@ "xcopy-msbuild": "16.8.0-preview2.1" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24613.2", - "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24613.2" + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24616.1", + "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24616.1" } } From e1ab846c356ca63b047ca93be6be817aeee6fbe2 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Wed, 18 Dec 2024 13:20:53 +0000 Subject: [PATCH 15/15] Update dependencies from https://github.com/dotnet/arcade build 20241217.2 Microsoft.DotNet.Arcade.Sdk , Microsoft.DotNet.Helix.Sdk From Version 9.0.0-beta.24170.7 -> To Version 10.0.0-beta.24617.2 --- eng/Version.Details.xml | 8 ++++---- eng/common/cross/build-rootfs.sh | 2 +- global.json | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 7add06146..e5c5a1044 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -3,13 +3,13 @@ - + https://github.com/dotnet/arcade - e0abaf3431b0fe3c9f9902aa42f6008b1250e75e + 4f2968fce08735a7e22fca6bd4c99d003221d716 - + https://github.com/dotnet/arcade - e0abaf3431b0fe3c9f9902aa42f6008b1250e75e + 4f2968fce08735a7e22fca6bd4c99d003221d716 diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh index de9807297..464040aaa 100644 --- a/eng/common/cross/build-rootfs.sh +++ b/eng/common/cross/build-rootfs.sh @@ -422,7 +422,7 @@ case "$__AlpineVersion" in elif [[ "$__AlpineArch" == "riscv64" ]]; then __AlpineLlvmLibsLookup=1 __AlpineVersion=edge # minimum version with APKINDEX.tar.gz (packages archive) - elif [[ -n "$__AlpineVersion" ]]; then + elif [[ -n "$__AlpineMajorVersion" ]]; then # use whichever alpine version is provided and select the latest toolchain libs __AlpineLlvmLibsLookup=1 else diff --git a/global.json b/global.json index baf4aeba6..6f284b742 100644 --- a/global.json +++ b/global.json @@ -6,7 +6,7 @@ "xcopy-msbuild": "16.8.0-preview2.1" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24616.1", - "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24616.1" + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24617.2", + "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.24617.2" } }