diff --git a/.CodeQL.yml b/.CodeQL.yml
deleted file mode 100644
index 3c93eef4979827..00000000000000
--- a/.CodeQL.yml
+++ /dev/null
@@ -1,10 +0,0 @@
-# This file configures CodeQL runs and TSA bug autofiling. For more information, see:
-# https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/codeql/troubleshooting/bugs/generated-library-code
-# (Access restricted to Microsoft employees only.)
-
-path_classifiers:
- refs:
- # The ref/ directories don't contain shipping implementations of code, so they should
- # be excluded from analysis. If there is a problem at the API layer, the analysis
- # engine will detect the problem in the src/ implementations anyway.
- - src/libraries/**/ref/*
diff --git a/.config/dotnet-tools.json b/.config/dotnet-tools.json
index 22f4226d1bfee9..2663dda99de3fe 100644
--- a/.config/dotnet-tools.json
+++ b/.config/dotnet-tools.json
@@ -15,7 +15,7 @@
]
},
"microsoft.dotnet.xharness.cli": {
- "version": "9.0.0-prerelease.24161.1",
+ "version": "9.0.0-prerelease.24112.4",
"commands": [
"xharness"
]
diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile
index d76e325e8b6cae..5a697ac088194c 100644
--- a/.devcontainer/Dockerfile
+++ b/.devcontainer/Dockerfile
@@ -25,5 +25,4 @@ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
libssl-dev \
libkrb5-dev \
zlib1g-dev \
- ninja-build \
- tzdata
+ ninja-build
diff --git a/.devcontainer/scripts/onCreateCommand.sh b/.devcontainer/scripts/onCreateCommand.sh
index 779ced4de99b98..066d0eccda876b 100755
--- a/.devcontainer/scripts/onCreateCommand.sh
+++ b/.devcontainer/scripts/onCreateCommand.sh
@@ -2,26 +2,6 @@
set -e
-function wasm_common() {
- # prebuild for WASM, so it is ready for wasm development
- make -C src/mono/browser provision-wasm
- export EMSDK_PATH=$PWD/src/mono/browser/emsdk
- case "$1" in
- wasm)
- # Put your common commands for wasm here
- ./build.sh mono+libs -os browser -c Release
- ;;
- wasm-multithreaded)
- # Put your common commands for wasm-multithread here
- ./build.sh mono+libs -os browser -c Release /p:WasmEnableThreads=true
- ;;
- *)
- # install dotnet-serve for running wasm samples
- ./dotnet.sh tool install dotnet-serve --version 1.10.172 --tool-path ./.dotnet-tools-global
- ;;
- esac
-}
-
opt=$1
case "$opt" in
@@ -40,11 +20,13 @@ case "$opt" in
;;
wasm)
- wasm_common $opt
- ;;
+ # prebuild for WASM, so it is ready for wasm development
+ make -C src/mono/browser provision-wasm
+ export EMSDK_PATH=$PWD/src/mono/browser/emsdk
+ ./build.sh mono+libs -os browser -c Release
- wasm-multithreaded)
- wasm_common $opt
+ # install dotnet-serve for running wasm samples
+ ./dotnet.sh tool install dotnet-serve --version 1.10.172 --tool-path ./.dotnet-tools-global
;;
esac
diff --git a/.devcontainer/wasm-multiThreaded/Dockerfile b/.devcontainer/wasm-multiThreaded/Dockerfile
deleted file mode 100644
index 75f2465b391b3c..00000000000000
--- a/.devcontainer/wasm-multiThreaded/Dockerfile
+++ /dev/null
@@ -1,60 +0,0 @@
-# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.192.0/containers/dotnet/.devcontainer/base.Dockerfile
-# For details on dotnet specific container, see: https://github.com/microsoft/vscode-dev-containers/tree/main/containers/dotnet
-
-# [Choice] .NET version: 6.0, 7.0
-ARG VARIANT="6.0-jammy"
-FROM mcr.microsoft.com/devcontainers/dotnet:0-${VARIANT}
-
-# Set up machine requirements to build the repo and the gh CLI
-RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
- && apt-get -y install --no-install-recommends \
- cmake \
- llvm \
- clang \
- build-essential \
- python3 \
- curl \
- git \
- lldb \
- liblldb-dev \
- libunwind8 \
- libunwind8-dev \
- gettext \
- libicu-dev \
- liblttng-ust-dev \
- libssl-dev \
- libkrb5-dev \
- zlib1g-dev \
- ninja-build
-
-SHELL ["/bin/bash", "-c"]
-
-# Install LTS npm and node
-RUN source /usr/local/share/nvm/nvm.sh && nvm install --lts
-
-# Install V8 Engine
-RUN curl -sSL "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/linux/chromium-v8/v8-linux64-rel-10.8.168.zip" -o ./v8.zip \
- && unzip ./v8.zip -d /usr/local/v8 \
- && echo $'#!/usr/bin/env bash\n\
-"/usr/local/v8/d8" --snapshot_blob="/usr/local/v8/snapshot_blob.bin" "$@"\n' > /usr/local/bin/v8 \
- && chmod +x /usr/local/bin/v8
-
-# install chromium dependencies to run debugger tests:
-RUN sudo apt-get install libnss3 -y \
- && apt-get install libatk1.0-0 -y \
- && apt-get install libatk-bridge2.0-0 -y \
- && apt-get install libcups2 -y \
- && apt-get install libdrm2 -y \
- && apt-get install libxkbcommon-x11-0 -y \
- && apt-get install libxcomposite-dev -y \
- && apt-get install libxdamage1 -y \
- && apt-get install libxrandr2 -y \
- && apt-get install libgbm-dev -y \
- && apt-get install libpango-1.0-0 -y \
- && apt-get install libcairo2 -y \
- && apt-get install libasound2 -y
-
-# install firefox dependencies to run debugger tests:
-RUN sudo apt-get install libdbus-glib-1-2 -y \
- && apt-get install libgtk-3-0 -y \
- && apt-get install libx11-xcb-dev -y
diff --git a/.devcontainer/wasm-multiThreaded/devcontainer.json b/.devcontainer/wasm-multiThreaded/devcontainer.json
deleted file mode 100644
index db3b2981b57149..00000000000000
--- a/.devcontainer/wasm-multiThreaded/devcontainer.json
+++ /dev/null
@@ -1,65 +0,0 @@
-// For format details, see https://aka.ms/devcontainer.json.
-{
- "name": "WASM multithreaded development (prebuilt)",
- "build": {
- "dockerfile": "Dockerfile",
- "args": {
- // Update 'VARIANT' to pick a .NET Core version: 6.0, 7.0
- "VARIANT": "6.0-jammy"
- }
- },
- "hostRequirements": {
- "cpus": 4,
- "memory": "8gb"
- },
-
- "features": {
- "ghcr.io/devcontainers/features/github-cli:1": {}
- },
-
- // Configure tool-specific properties.
- "customizations": {
- // Configure properties specific to VS Code.
- "vscode": {
- // Add the IDs of extensions you want installed when the container is created.
- "extensions": [
- "ms-dotnettools.csharp"
- ],
- "settings": {
- // Loading projects on demand is better for larger codebases
- "omnisharp.enableMsBuildLoadProjectsOnDemand": true,
- "omnisharp.enableRoslynAnalyzers": true,
- "omnisharp.enableEditorConfigSupport": true,
- "omnisharp.enableAsyncCompletion": true,
- "omnisharp.testRunSettings": "${containerWorkspaceFolder}/artifacts/obj/vscode/.runsettings"
- }
- }
- },
-
- // Use 'onCreateCommand' to run pre-build commands inside the codespace
- "onCreateCommand": "${containerWorkspaceFolder}/.devcontainer/scripts/onCreateCommand.sh wasm-multithreaded",
-
- // Use 'postCreateCommand' to run commands after the container is created.
- "postCreateCommand": "${containerWorkspaceFolder}/.devcontainer/scripts/postCreateCommand.sh",
-
- // Add the locally installed dotnet to the path to ensure that it is activated
- // This allows developers to just use 'dotnet build' on the command-line, and the local dotnet version will be used.
- // Add the global tools dir to the PATH so that globally installed tools will work
- "remoteEnv": {
- "PATH": "${containerWorkspaceFolder}/.dotnet:${containerWorkspaceFolder}/.dotnet-tools-global:${containerEnv:PATH}",
- "DOTNET_MULTILEVEL_LOOKUP": "0",
- // Path to provisioned Emscripten SDK, for rebuilding the wasm runtime
- "EMSDK_PATH": "${containerWorkspaceFolder}/src/mono/browser/emsdk",
- },
-
- // Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
- "remoteUser": "vscode",
-
- // Forward mono samples port
- "forwardPorts": [8000],
- "portsAttributes": {
- "8000": {
- "label": "mono wasm samples (8000)",
- }
- }
-}
diff --git a/.devcontainer/wasm/devcontainer.json b/.devcontainer/wasm/devcontainer.json
index f4144299ff11d3..ab598dcb9a32d2 100644
--- a/.devcontainer/wasm/devcontainer.json
+++ b/.devcontainer/wasm/devcontainer.json
@@ -1,6 +1,6 @@
// For format details, see https://aka.ms/devcontainer.json.
{
- "name": "WASM singlethreaded development (prebuilt)",
+ "name": "WASM development (prebuilt)",
"build": {
"dockerfile": "Dockerfile",
"args": {
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 12303cdf57558d..b042cebfc6b92a 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -21,7 +21,7 @@
/src/mono @marek-safar
-/src/mono/llvm @vargaz @steveisok
+/src/mono/llvm @vargaz @SamMonoRT
/src/mono/mono/arch @vargaz
/src/mono/mono/eglib @vargaz @lambdageek
@@ -36,7 +36,7 @@
/src/mono/mono/eventpipe @lateralusX @lambdageek
-/src/mono/mono/mini @vargaz @lambdageek @steveisok
+/src/mono/mono/mini @vargaz @lambdageek @SamMonoRT
/src/mono/mono/mini/*cfgdump* @vargaz
/src/mono/mono/mini/*exceptions* @vargaz @BrzVlad
/src/mono/mono/mini/*llvm* @vargaz @fanyang-mono
@@ -50,7 +50,7 @@
/src/mono/mono/mini/*simd* @fanyang-mono
/src/mono/mono/profiler @BrzVlad @lambdageek
-/src/mono/mono/sgen @BrzVlad @lambdageek
+/src/mono/mono/sgen @BrzVlad @lambdageek @SamMonoRT
/src/mono/mono/utils @vargaz @lambdageek
/src/mono/mono/utils/*-win* @lateralusX @lambdageek
@@ -112,4 +112,4 @@
# Area ownership and repo automation
/docs/area-owners.* @jeffhandley
/docs/issue*.md @jeffhandley
-/.github/policies/ @jeffhandley @mkArtakMSFT
+/.github/fabricbot.json @jeffhandley
diff --git a/.github/ISSUE_TEMPLATE/04_ci_known_issue.yml b/.github/ISSUE_TEMPLATE/04_ci_known_issue.yml
new file mode 100644
index 00000000000000..17ec4e5e5ec930
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/04_ci_known_issue.yml
@@ -0,0 +1,32 @@
+name: CI Known Issue Report
+description: Create a known issue directly
+labels: ["blocking-clean-ci","Known Build Error"]
+body:
+ - type: markdown
+ attributes:
+ value: |
+ Use this template to report issues currently affecting PR stability, be it build or test failures.
+ - type: textarea
+ id: background
+ attributes:
+ label: Error Blob
+ description: Please identify a clear error string that can help identify future instances of this issue. For more information on how to fill this check our issue triage guidelines at [Failure Analysis](/dotnet/runtime/blob/main/docs/workflow/ci/failure-analysis.md#what-to-do-if-you-determine-the-failure-is-unrelated)
+ value: |
+ ```json
+ {
+ "ErrorMessage": "",
+ "BuildRetry": false,
+ "ErrorPattern": "",
+ "ExcludeConsoleLog": true
+ }
+ ```
+ validations:
+ required: true
+ - type: textarea
+ id: repro-steps
+ attributes:
+ label: Reproduction Steps
+ description: |
+ If possible describe where you observe the issue with links and any other relevant details.
+ validations:
+ required: false
diff --git a/.github/ISSUE_TEMPLATE/04_blank_issue.md b/.github/ISSUE_TEMPLATE/05_blank_issue.md
similarity index 100%
rename from .github/ISSUE_TEMPLATE/04_blank_issue.md
rename to .github/ISSUE_TEMPLATE/05_blank_issue.md
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
index b14edd954edeef..54d8c5740bad6c 100644
--- a/.github/ISSUE_TEMPLATE/config.yml
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -18,6 +18,3 @@ contact_links:
- name: Issue with WPF
url: https://github.com/dotnet/wpf/issues/new/choose
about: Please open issues relating to WPF in dotnet/wpf.
- - name: CI Known Issue Report
- url: https://helix.dot.net/BuildAnalysis/CreateKnownIssues
- about: Use the helper to create a Known Issue in CI if failures in your runs are unrelated to your change. See [Failure Analysis](https://github.com/dotnet/runtime/blob/main/docs/workflow/ci/failure-analysis.md#what-to-do-if-you-determine-the-failure-is-unrelated) for triage instructions.
diff --git a/.github/fabricbot.json b/.github/fabricbot.json
new file mode 100644
index 00000000000000..27ea96a2762f3a
--- /dev/null
+++ b/.github/fabricbot.json
@@ -0,0 +1,2938 @@
+[
+ {
+ "taskType": "scheduledAndTrigger",
+ "capabilityId": "IssueRouting",
+ "subCapability": "@Mention",
+ "version": "1.0",
+ "config": {
+ "taskName": "Area-owners",
+ "labelsAndMentions": [
+ {
+ "labels": [
+ "area-AssemblyLoader-coreclr"
+ ],
+ "mentionees": [
+ "vitek-karas",
+ "agocke",
+ "vsadov"
+ ]
+ },
+ {
+ "labels": [
+ "area-AssemblyLoader-mono"
+ ],
+ "mentionees": []
+ },
+ {
+ "labels": [
+ "area-CodeGen-coreclr"
+ ],
+ "mentionees": [
+ "JulieLeeMSFT",
+ "jakobbotsch"
+ ]
+ },
+ {
+ "labels": [
+ "area-Codegen-Interpreter-mono"
+ ],
+ "mentionees": [
+ "brzvlad",
+ "kotlarmilos"
+ ]
+ },
+ {
+ "labels": [
+ "area-Codegen-JIT-Mono"
+ ],
+ "mentionees": [
+ "SamMonoRT",
+ "vargaz"
+ ]
+ },
+ {
+ "labels": [
+ "area-CodeGen-LLVM-Mono"
+ ],
+ "mentionees": [
+ "SamMonoRT",
+ "vargaz"
+ ]
+ },
+ {
+ "labels": [
+ "area-Codegen-Intrinsics-mono"
+ ],
+ "mentionees": [
+ "SamMonoRT",
+ "fanyang-mono"
+ ]
+ },
+ {
+ "labels": [
+ "area-CodeGen-meta-Mono"
+ ],
+ "mentionees": [
+ "SamMonoRT",
+ "vargaz",
+ "lambdageek"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.DateTime"
+ ],
+ "mentionees": [
+ "dotnet/area-system-datetime"
+ ]
+ },
+ {
+ "labels": [
+ "area-Debugger-mono"
+ ],
+ "mentionees": [
+ "thaystg"
+ ]
+ },
+ {
+ "labels": [
+ "area-DependencyModel"
+ ],
+ "mentionees": [
+ "dotnet/area-dependencymodel"
+ ]
+ },
+ {
+ "labels": [
+ "area-Diagnostics-coreclr"
+ ],
+ "mentionees": [
+ "tommcdon"
+ ]
+ },
+ {
+ "labels": [
+ "area-Extensions-Caching"
+ ],
+ "mentionees": [
+ "dotnet/area-extensions-caching"
+ ]
+ },
+ {
+ "labels": [
+ "area-Extensions-Configuration"
+ ],
+ "mentionees": [
+ "dotnet/area-extensions-configuration"
+ ]
+ },
+ {
+ "labels": [
+ "area-Extensions-DependencyInjection"
+ ],
+ "mentionees": [
+ "dotnet/area-extensions-dependencyinjection"
+ ]
+ },
+ {
+ "labels": [
+ "area-Extensions-FileSystem"
+ ],
+ "mentionees": [
+ "dotnet/area-extensions-filesystem"
+ ]
+ },
+ {
+ "labels": [
+ "area-Extensions-Hosting"
+ ],
+ "mentionees": [
+ "dotnet/area-extensions-hosting"
+ ]
+ },
+ {
+ "labels": [
+ "area-Extensions-HttpClientFactory"
+ ],
+ "mentionees": [
+ "dotnet/ncl"
+ ]
+ },
+ {
+ "labels": [
+ "area-Extensions-Logging"
+ ],
+ "mentionees": [
+ "dotnet/area-extensions-logging"
+ ]
+ },
+ {
+ "labels": [
+ "area-Extensions-Options"
+ ],
+ "mentionees": [
+ "dotnet/area-extensions-options"
+ ]
+ },
+ {
+ "labels": [
+ "area-Extensions-Primitives"
+ ],
+ "mentionees": [
+ "dotnet/area-extensions-primitives"
+ ]
+ },
+ {
+ "labels": [
+ "area-GC-coreclr"
+ ],
+ "mentionees": [
+ "dotnet/gc"
+ ]
+ },
+ {
+ "labels": [
+ "area-GC-mono"
+ ],
+ "mentionees": [
+ "brzvlad"
+ ]
+ },
+ {
+ "labels": [
+ "area-Host"
+ ],
+ "mentionees": [
+ "vitek-karas",
+ "agocke",
+ "vsadov"
+ ]
+ },
+ {
+ "labels": [
+ "area-HostModel"
+ ],
+ "mentionees": [
+ "vitek-karas",
+ "agocke"
+ ]
+ },
+ {
+ "labels": [
+ "area-ILTools-coreclr"
+ ],
+ "mentionees": [
+ "JulieLeeMSFT"
+ ]
+ },
+ {
+ "labels": [
+ "area-Tools-ILVerification"
+ ],
+ "mentionees": [
+ "JulieLeeMSFT"
+ ]
+ },
+ {
+ "labels": [
+ "area-Infrastructure"
+ ],
+ "mentionees": [
+ "dotnet/runtime-infrastructure"
+ ]
+ },
+ {
+ "labels": [
+ "area-Infrastructure-coreclr"
+ ],
+ "mentionees": [
+ "hoyosjs"
+ ]
+ },
+ {
+ "labels": [
+ "area-Infrastructure-libraries"
+ ],
+ "mentionees": [
+ "dotnet/area-infrastructure-libraries"
+ ]
+ },
+ {
+ "labels": [
+ "area-Infrastructure-mono"
+ ],
+ "mentionees": [
+ "directhex"
+ ]
+ },
+ {
+ "labels": [
+ "area-Meta"
+ ],
+ "mentionees": [
+ "dotnet/area-meta"
+ ]
+ },
+ {
+ "labels": [
+ "area-Microsoft.CSharp"
+ ],
+ "mentionees": [
+ "cston"
+ ]
+ },
+ {
+ "labels": [
+ "area-Microsoft.Extensions"
+ ],
+ "mentionees": [
+ "dotnet/area-microsoft-extensions"
+ ]
+ },
+ {
+ "labels": [
+ "area-Microsoft.VisualBasic"
+ ],
+ "mentionees": [
+ "cston"
+ ]
+ },
+ {
+ "labels": [
+ "area-Microsoft.Win32"
+ ],
+ "mentionees": [
+ "dotnet/area-microsoft-win32"
+ ]
+ },
+ {
+ "labels": [
+ "area-NativeAOT-coreclr"
+ ],
+ "mentionees": [
+ "agocke",
+ "MichalStrehovsky",
+ "jkotas"
+ ]
+ },
+ {
+ "labels": [
+ "area-Single-File"
+ ],
+ "mentionees": [
+ "agocke",
+ "vitek-karas",
+ "vsadov"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Buffers"
+ ],
+ "mentionees": [
+ "dotnet/area-system-buffers"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.CodeDom"
+ ],
+ "mentionees": [
+ "dotnet/area-system-codedom"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Collections"
+ ],
+ "mentionees": [
+ "dotnet/area-system-collections"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.ComponentModel"
+ ],
+ "mentionees": [
+ "dotnet/area-system-componentmodel"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.ComponentModel.Composition"
+ ],
+ "mentionees": [
+ "dotnet/area-system-componentmodel-composition"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.ComponentModel.DataAnnotations"
+ ],
+ "mentionees": [
+ "dotnet/area-system-componentmodel-dataannotations"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Composition"
+ ],
+ "mentionees": [
+ "dotnet/area-system-composition"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Configuration"
+ ],
+ "mentionees": [
+ "dotnet/area-system-configuration"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Console"
+ ],
+ "mentionees": [
+ "dotnet/area-system-console"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Data"
+ ],
+ "mentionees": [
+ "roji",
+ "ajcvickers"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Data.Odbc"
+ ],
+ "mentionees": [
+ "roji",
+ "ajcvickers"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Data.OleDB"
+ ],
+ "mentionees": [
+ "roji",
+ "ajcvickers"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Data.SqlClient"
+ ],
+ "mentionees": [
+ "davoudeshtehari",
+ "david-engel",
+ "jrahnama"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Diagnostics"
+ ],
+ "mentionees": [
+ "tommcdon"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Diagnostics.Activity"
+ ],
+ "mentionees": [
+ "dotnet/area-system-diagnostics-activity"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Diagnostics.EventLog"
+ ],
+ "mentionees": [
+ "dotnet/area-system-diagnostics-eventlog"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Diagnostics.PerformanceCounter"
+ ],
+ "mentionees": [
+ "dotnet/area-system-diagnostics-performancecounter"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Diagnostics.Process"
+ ],
+ "mentionees": [
+ "dotnet/area-system-diagnostics-process"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Diagnostics.TraceSource"
+ ],
+ "mentionees": [
+ "dotnet/area-system-diagnostics-tracesource"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Diagnostics.Tracing"
+ ],
+ "mentionees": [
+ "tarekgh",
+ "tommcdon",
+ "pjanotti"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.DirectoryServices"
+ ],
+ "mentionees": [
+ "dotnet/area-system-directoryservices",
+ "jay98014"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Drawing"
+ ],
+ "mentionees": [
+ "dotnet/area-system-drawing"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Dynamic.Runtime"
+ ],
+ "mentionees": [
+ "cston"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Formats.Asn1"
+ ],
+ "mentionees": [
+ "dotnet/area-system-formats-asn1",
+ "bartonjs",
+ "vcsjones"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Formats.Cbor"
+ ],
+ "mentionees": [
+ "dotnet/area-system-formats-cbor",
+ "bartonjs",
+ "vcsjones"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Formats.Tar"
+ ],
+ "mentionees": [
+ "dotnet/area-system-formats-tar"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Globalization"
+ ],
+ "mentionees": [
+ "dotnet/area-system-globalization"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.IO"
+ ],
+ "mentionees": [
+ "dotnet/area-system-io"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.IO.Compression"
+ ],
+ "mentionees": [
+ "dotnet/area-system-io-compression"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.IO.Hashing"
+ ],
+ "mentionees": [
+ "dotnet/area-system-io-hashing",
+ "bartonjs",
+ "vcsjones"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.IO.Ports"
+ ],
+ "mentionees": [
+ "dotnet/area-system-io-ports"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Linq"
+ ],
+ "mentionees": [
+ "dotnet/area-system-linq"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Linq.Expressions"
+ ],
+ "mentionees": [
+ "cston"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Linq.Parallel"
+ ],
+ "mentionees": [
+ "dotnet/area-system-linq-parallel"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Management"
+ ],
+ "mentionees": [
+ "dotnet/area-system-management"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Memory"
+ ],
+ "mentionees": [
+ "dotnet/area-system-memory"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Net"
+ ],
+ "mentionees": [
+ "dotnet/ncl"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Net.Http"
+ ],
+ "mentionees": [
+ "dotnet/ncl"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Net.Quic"
+ ],
+ "mentionees": [
+ "dotnet/ncl"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Net.Security"
+ ],
+ "mentionees": [
+ "dotnet/ncl",
+ "bartonjs",
+ "vcsjones"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Net.Sockets"
+ ],
+ "mentionees": [
+ "dotnet/ncl"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Numerics"
+ ],
+ "mentionees": [
+ "dotnet/area-system-numerics"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Numerics.Tensors"
+ ],
+ "mentionees": [
+ "dotnet/area-system-numerics-tensors"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Reflection"
+ ],
+ "mentionees": [
+ "dotnet/area-system-reflection"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Reflection.Emit"
+ ],
+ "mentionees": [
+ "dotnet/area-system-reflection-emit"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Reflection.Metadata"
+ ],
+ "mentionees": [
+ "dotnet/area-system-reflection-metadata"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Resources"
+ ],
+ "mentionees": [
+ "dotnet/area-system-resources"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Runtime"
+ ],
+ "mentionees": [
+ "dotnet/area-system-runtime"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Runtime.CompilerServices"
+ ],
+ "mentionees": [
+ "dotnet/area-system-runtime-compilerservices"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Runtime.InteropServices"
+ ],
+ "mentionees": [
+ "dotnet/interop-contrib"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Runtime.Intrinsics"
+ ],
+ "mentionees": [
+ "dotnet/area-system-runtime-intrinsics"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Security"
+ ],
+ "mentionees": [
+ "dotnet/area-system-security",
+ "bartonjs",
+ "vcsjones"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.ServiceProcess"
+ ],
+ "mentionees": [
+ "dotnet/area-system-serviceprocess"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Speech"
+ ],
+ "mentionees": [
+ "dotnet/area-system-speech"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Text.Encoding"
+ ],
+ "mentionees": [
+ "dotnet/area-system-text-encoding"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Text.Encodings.Web"
+ ],
+ "mentionees": [
+ "dotnet/area-system-text-encodings-web"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Text.Json"
+ ],
+ "mentionees": [
+ "dotnet/area-system-text-json",
+ "gregsdennis"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Text.RegularExpressions"
+ ],
+ "mentionees": [
+ "dotnet/area-system-text-regularexpressions"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Threading"
+ ],
+ "mentionees": [
+ "mangod9"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Threading.Channels"
+ ],
+ "mentionees": [
+ "dotnet/area-system-threading-channels"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Threading.Tasks"
+ ],
+ "mentionees": [
+ "dotnet/area-system-threading-tasks"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Transactions"
+ ],
+ "mentionees": [
+ "roji",
+ "ajcvickers"
+ ]
+ },
+ {
+ "labels": [
+ "area-System.Xml"
+ ],
+ "mentionees": [
+ "dotnet/area-system-xml"
+ ]
+ },
+ {
+ "labels": [
+ "area-Tools-ILLink"
+ ],
+ "mentionees": [
+ "agocke",
+ "sbomer",
+ "vitek-karas"
+ ]
+ },
+ {
+ "labels": [
+ "area-vm-coreclr"
+ ],
+ "mentionees": [
+ "mangod9"
+ ]
+ }
+ ],
+ "replyTemplate": "Tagging subscribers to this area: ${mentionees}\nSee info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.",
+ "enableForPullRequests": true
+ },
+ "disabled": false
+ },
+ {
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "IssuesOnlyResponder",
+ "version": "1.0",
+ "config": {
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "name": "labelAdded",
+ "parameters": {
+ "label": "breaking-change"
+ }
+ }
+ ]
+ },
+ "eventType": "issue",
+ "eventNames": [
+ "issues"
+ ],
+ "actions": [
+ {
+ "name": "addLabel",
+ "parameters": {
+ "label": "needs-breaking-change-doc-created"
+ }
+ },
+ {
+ "name": "addReply",
+ "parameters": {
+ "comment": "Added `needs-breaking-change-doc-created` label because this issue has the `breaking-change` label. \n\n1. [ ] Create and link to this issue a matching issue in the dotnet/docs repo using the [breaking change documentation template](https://aka.ms/dotnet/docs/new-breaking-change-issue), then remove this `needs-breaking-change-doc-created` label.\n\nTagging @dotnet/compat for awareness of the breaking change."
+ }
+ }
+ ],
+ "taskName": "Add breaking change doc label to issue"
+ },
+ "disabled": false
+ },
+ {
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "PullRequestResponder",
+ "version": "1.0",
+ "config": {
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "name": "labelAdded",
+ "parameters": {
+ "label": "breaking-change"
+ }
+ },
+ {
+ "name": "isPr",
+ "parameters": {}
+ }
+ ]
+ },
+ "eventType": "pull_request",
+ "eventNames": [
+ "pull_request",
+ "issues"
+ ],
+ "actions": [
+ {
+ "name": "addLabel",
+ "parameters": {
+ "label": "needs-breaking-change-doc-created"
+ }
+ },
+ {
+ "name": "addReply",
+ "parameters": {
+ "comment": "Added `needs-breaking-change-doc-created` label because this PR has the `breaking-change` label. \n\nWhen you commit this breaking change:\n\n1. [ ] Create and link to this PR and the issue a matching issue in the dotnet/docs repo using the [breaking change documentation template](https://aka.ms/dotnet/docs/new-breaking-change-issue), then remove this `needs-breaking-change-doc-created` label.\n2. [ ] Ask a committer to mail the `.NET Breaking Change Notification` DL.\n\nTagging @dotnet/compat for awareness of the breaking change."
+ }
+ }
+ ],
+ "taskName": "Add breaking change doc label to PR"
+ },
+ "disabled": false
+ },
+ {
+ "taskType": "scheduledAndTrigger",
+ "capabilityId": "IssueRouting",
+ "subCapability": "@Mention",
+ "version": "1.0",
+ "config": {
+ "taskName": "@Mention for linkable-framework",
+ "labelsAndMentions": [
+ {
+ "labels": [
+ "linkable-framework"
+ ],
+ "mentionees": [
+ "eerhardt",
+ "vitek-karas",
+ "LakshanF",
+ "sbomer",
+ "joperezr",
+ "marek-safar"
+ ]
+ }
+ ],
+ "replyTemplate": "Tagging subscribers to 'linkable-framework': ${mentionees}\nSee info in area-owners.md if you want to be subscribed.",
+ "enableForPullRequests": true
+ }
+ },
+ {
+ "taskType": "scheduledAndTrigger",
+ "capabilityId": "IssueRouting",
+ "subCapability": "@Mention",
+ "version": "1.0",
+ "config": {
+ "taskName": "@Mention for size-reduction",
+ "replyTemplate": "Tagging subscribers to 'size-reduction': ${mentionees}\nSee info in area-owners.md if you want to be subscribed.",
+ "labelsAndMentions": [
+ {
+ "labels": [
+ "size-reduction"
+ ],
+ "mentionees": [
+ "eerhardt",
+ "SamMonoRT",
+ "marek-safar"
+ ]
+ }
+ ],
+ "enableForPullRequests": true
+ }
+ },
+ {
+ "taskType": "scheduledAndTrigger",
+ "capabilityId": "IssueRouting",
+ "subCapability": "@Mention",
+ "version": "1.0",
+ "config": {
+ "taskName": "@Mention for wasm",
+ "labelsAndMentions": [
+ {
+ "labels": [
+ "arch-wasm"
+ ],
+ "mentionees": [
+ "lewing"
+ ]
+ }
+ ],
+ "replyTemplate": "Tagging subscribers to 'arch-wasm': ${mentionees}\nSee info in area-owners.md if you want to be subscribed.",
+ "enableForPullRequests": true
+ }
+ },
+ {
+ "taskType": "scheduledAndTrigger",
+ "capabilityId": "IssueRouting",
+ "subCapability": "@Mention",
+ "version": "1.0",
+ "config": {
+ "taskName": "@Mention for ios",
+ "labelsAndMentions": [
+ {
+ "labels": [
+ "os-ios"
+ ],
+ "mentionees": [
+ "steveisok",
+ "akoeplinger",
+ "kotlarmilos"
+ ]
+ }
+ ],
+ "enableForPullRequests": true,
+ "replyTemplate": "Tagging subscribers to 'os-ios': ${mentionees}\nSee info in area-owners.md if you want to be subscribed."
+ }
+ },
+ {
+ "taskType": "scheduledAndTrigger",
+ "capabilityId": "IssueRouting",
+ "subCapability": "@Mention",
+ "version": "1.0",
+ "config": {
+ "taskName": "@Mention for android",
+ "labelsAndMentions": [
+ {
+ "labels": [
+ "os-android"
+ ],
+ "mentionees": [
+ "steveisok",
+ "akoeplinger"
+ ]
+ }
+ ],
+ "enableForPullRequests": true,
+ "replyTemplate": "Tagging subscribers to 'arch-android': ${mentionees}\nSee info in area-owners.md if you want to be subscribed."
+ }
+ },
+ {
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "PullRequestResponder",
+ "version": "1.0",
+ "config": {
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "operator": "or",
+ "operands": [
+ {
+ "name": "prMatchesPattern",
+ "parameters": {
+ "matchRegex": ".*ILLink.*"
+ }
+ },
+ {
+ "name": "prMatchesPattern",
+ "parameters": {
+ "matchRegex": ".*illink.*"
+ }
+ }
+ ]
+ },
+ {
+ "operator": "not",
+ "operands": [
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "linkable-framework"
+ }
+ }
+ ]
+ },
+ {
+ "name": "isPr",
+ "parameters": {}
+ },
+ {
+ "name": "isOpen",
+ "parameters": {}
+ }
+ ]
+ },
+ "eventType": "pull_request",
+ "eventNames": [
+ "pull_request",
+ "issues"
+ ],
+ "taskName": "[Linkable-framework workgroup] Add linkable-framework label to new Prs that touch files with *ILLink* that not have it already",
+ "actions": [
+ {
+ "name": "addLabel",
+ "parameters": {
+ "label": "linkable-framework"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "PullRequestResponder",
+ "version": "1.0",
+ "config": {
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "operator": "or",
+ "operands": [
+ {
+ "name": "prMatchesPattern",
+ "parameters": {
+ "matchRegex": ".*ILLink.*"
+ }
+ },
+ {
+ "name": "prMatchesPattern",
+ "parameters": {
+ "matchRegex": ".*illink.*"
+ }
+ }
+ ]
+ },
+ {
+ "operator": "not",
+ "operands": [
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "linkable-framework"
+ }
+ }
+ ]
+ },
+ {
+ "name": "isPr",
+ "parameters": {}
+ },
+ {
+ "name": "isOpen",
+ "parameters": {}
+ },
+ {
+ "name": "isAction",
+ "parameters": {
+ "action": "synchronize"
+ }
+ }
+ ]
+ },
+ "eventType": "pull_request",
+ "eventNames": [
+ "pull_request",
+ "issues"
+ ],
+ "taskName": "[Linkable-framework workgroup] Add linkable-framework label to Prs that get changes pushed where they touch *ILLInk* files",
+ "actions": [
+ {
+ "name": "addLabel",
+ "parameters": {
+ "label": "linkable-framework"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "IssuesOnlyResponder",
+ "version": "1.0",
+ "config": {
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "name": "labelAdded",
+ "parameters": {
+ "label": "backlog-cleanup-candidate"
+ }
+ }
+ ]
+ },
+ "eventType": "issue",
+ "eventNames": [
+ "issues"
+ ],
+ "taskName": "Manual Issue Cleanup",
+ "actions": [
+ {
+ "name": "addReply",
+ "parameters": {
+ "comment": "Due to lack of recent activity, this issue has been marked as a candidate for backlog cleanup. It will be closed if no further activity occurs within 14 more days. Any new comment (by anyone, not necessarily the author) will undo this process.\n\nThis process is part of our [issue cleanup automation](https://github.com/dotnet/runtime/blob/main/docs/issue-cleanup.md)."
+ }
+ },
+ {
+ "name": "addLabel",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "taskType": "scheduled",
+ "capabilityId": "ScheduledSearch",
+ "subCapability": "ScheduledSearch",
+ "version": "1.1",
+ "config": {
+ "frequency": [
+ {
+ "weekDay": 0,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 1,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 2,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 3,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 4,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 5,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 6,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ }
+ ],
+ "searchTerms": [
+ {
+ "name": "noActivitySince",
+ "parameters": {
+ "days": 1644
+ }
+ },
+ {
+ "name": "isIssue",
+ "parameters": {}
+ },
+ {
+ "name": "isOpen",
+ "parameters": {}
+ },
+ {
+ "name": "noLabel",
+ "parameters": {
+ "label": "backlog-cleanup-candidate"
+ }
+ }
+ ],
+ "taskName": "Automated Issue cleanup",
+ "actions": [
+ {
+ "name": "addReply",
+ "parameters": {
+ "comment": "Due to lack of recent activity, this issue has been marked as a candidate for backlog cleanup. It will be closed if no further activity occurs within 14 more days. Any new comment (by anyone, not necessarily the author) will undo this process.\n\nThis process is part of our [issue cleanup automation](https://github.com/dotnet/runtime/blob/main/docs/issue-cleanup.md)."
+ }
+ },
+ {
+ "name": "addLabel",
+ "parameters": {
+ "label": "backlog-cleanup-candidate"
+ }
+ },
+ {
+ "name": "addLabel",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "taskType": "scheduledAndTrigger",
+ "capabilityId": "IssueRouting",
+ "subCapability": "@Mention",
+ "version": "1.0",
+ "config": {
+ "taskName": "@Mention for tvos",
+ "labelsAndMentions": [
+ {
+ "labels": [
+ "os-tvos"
+ ],
+ "mentionees": [
+ "steveisok",
+ "akoeplinger"
+ ]
+ }
+ ],
+ "enableForPullRequests": true,
+ "replyTemplate": "Tagging subscribers to 'os-tvos': ${mentionees}\nSee info in area-owners.md if you want to be subscribed."
+ }
+ },
+ {
+ "taskType": "scheduledAndTrigger",
+ "capabilityId": "IssueRouting",
+ "subCapability": "@Mention",
+ "version": "1.0",
+ "config": {
+ "labelsAndMentions": [
+ {
+ "labels": [
+ "os-maccatalyst"
+ ],
+ "mentionees": [
+ "steveisok",
+ "akoeplinger"
+ ]
+ }
+ ],
+ "replyTemplate": "Tagging subscribers to 'os-maccatalyst': ${mentionees}\nSee info in area-owners.md if you want to be subscribed.",
+ "enableForPullRequests": true,
+ "taskName": "@Mention for maccatalyst"
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "IssuesOnlyResponder",
+ "version": "1.0",
+ "config": {
+ "taskName": "Add untriaged label to new/reopened issues without a milestone",
+ "actions": [
+ {
+ "name": "addLabel",
+ "parameters": {
+ "label": "untriaged"
+ }
+ }
+ ],
+ "eventType": "issue",
+ "eventNames": [
+ "issues"
+ ],
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "operator": "or",
+ "operands": [
+ {
+ "name": "isAction",
+ "parameters": {
+ "action": "opened"
+ }
+ },
+ {
+ "name": "isAction",
+ "parameters": {
+ "action": "reopened"
+ }
+ }
+ ]
+ },
+ {
+ "name": "isOpen",
+ "parameters": {}
+ },
+ {
+ "operator": "not",
+ "operands": [
+ {
+ "name": "isInMilestone",
+ "parameters": {}
+ }
+ ]
+ },
+ {
+ "operator": "not",
+ "operands": [
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "untriaged"
+ }
+ }
+ ]
+ }
+ ]
+ }
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "IssuesOnlyResponder",
+ "version": "1.0",
+ "config": {
+ "taskName": "Remove untriaged label from issues when closed or added to a milestone",
+ "actions": [
+ {
+ "name": "removeLabel",
+ "parameters": {
+ "label": "untriaged"
+ }
+ }
+ ],
+ "eventType": "issue",
+ "eventNames": [
+ "issues"
+ ],
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "operator": "or",
+ "operands": [
+ {
+ "name": "isAction",
+ "parameters": {
+ "action": "closed"
+ }
+ },
+ {
+ "name": "isInMilestone",
+ "parameters": {}
+ }
+ ]
+ },
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "untriaged"
+ }
+ }
+ ]
+ }
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "trigger",
+ "capabilityId": "InPrLabel",
+ "subCapability": "InPrLabel",
+ "version": "1.0",
+ "config": {
+ "taskName": "Add `in-pr` label on issue when an open pull request is targeting it",
+ "inPrLabelText": "There is an active PR which will close this issue when it is merged",
+ "fixedLabelEnabled": false,
+ "label_inPr": "in-pr"
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "PullRequestResponder",
+ "version": "1.0",
+ "config": {
+ "taskName": "Assign Team PRs to author",
+ "actions": [
+ {
+ "name": "assignToUser",
+ "parameters": {
+ "user": {
+ "type": "prAuthor"
+ }
+ }
+ }
+ ],
+ "eventType": "pull_request",
+ "eventNames": [
+ "pull_request"
+ ],
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "name": "isAction",
+ "parameters": {
+ "action": "opened"
+ }
+ },
+ {
+ "operator": "not",
+ "operands": [
+ {
+ "name": "activitySenderHasPermissions",
+ "parameters": {
+ "permissions": "read"
+ }
+ }
+ ]
+ }
+ ]
+ }
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "PullRequestResponder",
+ "version": "1.0",
+ "config": {
+ "taskName": "Label community PRs",
+ "actions": [
+ {
+ "name": "addLabel",
+ "parameters": {
+ "label": "community-contribution"
+ }
+ }
+ ],
+ "eventType": "pull_request",
+ "eventNames": [
+ "pull_request"
+ ],
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "name": "isAction",
+ "parameters": {
+ "action": "opened"
+ }
+ },
+ {
+ "name": "isPr",
+ "parameters": {}
+ },
+ {
+ "operator": "and",
+ "operands": [
+ {
+ "operator": "not",
+ "operands": [
+ {
+ "name": "activitySenderHasPermissions",
+ "parameters": {
+ "permissions": "admin"
+ }
+ }
+ ]
+ },
+ {
+ "operator": "not",
+ "operands": [
+ {
+ "name": "activitySenderHasPermissions",
+ "parameters": {
+ "permissions": "write"
+ }
+ }
+ ]
+ },
+ {
+ "operator": "not",
+ "operands": [
+ {
+ "name": "isActivitySender",
+ "parameters": {
+ "user": "github-actions[bot]"
+ }
+ }
+ ]
+ },
+ {
+ "operator": "not",
+ "operands": [
+ {
+ "name": "isActivitySender",
+ "parameters": {
+ "user": "dotnet-maestro[bot]"
+ }
+ }
+ ]
+ },
+ {
+ "operator": "not",
+ "operands": [
+ {
+ "name": "isActivitySender",
+ "parameters": {
+ "user": "dotnet-maestro-bot[bot]"
+ }
+ }
+ ]
+ },
+ {
+ "operator": "not",
+ "operands": [
+ {
+ "name": "isActivitySender",
+ "parameters": {
+ "user": "dotnet-maestro-bot"
+ }
+ }
+ ]
+ },
+ {
+ "operator": "not",
+ "operands": [
+ {
+ "name": "isActivitySender",
+ "parameters": {
+ "user": "dotnet-maestro"
+ }
+ }
+ ]
+ },
+ {
+ "operator": "not",
+ "operands": [
+ {
+ "name": "isActivitySender",
+ "parameters": {
+ "user": "github-actions"
+ }
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ }
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "IssuesOnlyResponder",
+ "version": "1.0",
+ "config": {
+ "taskName": "Needs-author-action notification",
+ "actions": [
+ {
+ "name": "addReply",
+ "parameters": {
+ "comment": "This issue has been marked `needs-author-action` and may be missing some important information."
+ }
+ }
+ ],
+ "eventType": "issue",
+ "eventNames": [
+ "issues"
+ ],
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "name": "labelAdded",
+ "parameters": {
+ "label": "needs-author-action"
+ }
+ }
+ ]
+ }
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "PullRequestReviewResponder",
+ "version": "1.0",
+ "config": {
+ "taskName": "PR reviews with \"changes requested\" applies the needs-author-action label",
+ "actions": [
+ {
+ "name": "addLabel",
+ "parameters": {
+ "label": "needs-author-action"
+ }
+ }
+ ],
+ "eventType": "pull_request",
+ "eventNames": [
+ "pull_request_review"
+ ],
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "operator": "not",
+ "operands": [
+ {
+ "name": "activitySenderHasPermissions",
+ "parameters": {
+ "state": "changes_requested",
+ "permissions": "read"
+ }
+ }
+ ]
+ },
+ {
+ "name": "isPr",
+ "parameters": {}
+ },
+ {
+ "name": "isAction",
+ "parameters": {
+ "action": "submitted"
+ }
+ },
+ {
+ "name": "isReviewState",
+ "parameters": {
+ "state": "changes_requested"
+ }
+ }
+ ]
+ }
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "IssueCommentResponder",
+ "version": "1.0",
+ "config": {
+ "taskName": "Replace `needs-author-action` label with `needs-further-triage` label when the author comments on an issue that is not still untriaged",
+ "actions": [
+ {
+ "name": "addLabel",
+ "parameters": {
+ "label": "needs-further-triage"
+ }
+ },
+ {
+ "name": "removeLabel",
+ "parameters": {
+ "label": "needs-author-action"
+ }
+ }
+ ],
+ "eventType": "issue",
+ "eventNames": [
+ "issue_comment"
+ ],
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "name": "isAction",
+ "parameters": {
+ "action": "created"
+ }
+ },
+ {
+ "name": "isActivitySender",
+ "parameters": {
+ "user": {
+ "type": "author"
+ }
+ }
+ },
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "needs-author-action"
+ }
+ },
+ {
+ "operator": "not",
+ "operands": [
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "untriaged"
+ }
+ }
+ ]
+ },
+ {
+ "name": "isIssue",
+ "parameters": {}
+ },
+ {
+ "name": "isOpen",
+ "parameters": {}
+ }
+ ]
+ }
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "IssueCommentResponder",
+ "version": "1.0",
+ "config": {
+ "taskName": "Remove `needs-author-action` label when the author comments on an `untriaged` issue",
+ "actions": [
+ {
+ "name": "removeLabel",
+ "parameters": {
+ "label": "needs-author-action"
+ }
+ }
+ ],
+ "eventType": "issue",
+ "eventNames": [
+ "issue_comment"
+ ],
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "name": "isAction",
+ "parameters": {
+ "action": "created"
+ }
+ },
+ {
+ "name": "isActivitySender",
+ "parameters": {
+ "user": {
+ "type": "author"
+ }
+ }
+ },
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "needs-author-action"
+ }
+ },
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "untriaged"
+ }
+ },
+ {
+ "name": "isIssue",
+ "parameters": {}
+ },
+ {
+ "name": "isOpen",
+ "parameters": {}
+ }
+ ]
+ }
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "PullRequestResponder",
+ "version": "1.0",
+ "config": {
+ "taskName": "Pushing changes to PR branch removes the needs-author-action label",
+ "actions": [
+ {
+ "name": "removeLabel",
+ "parameters": {
+ "label": "needs-author-action"
+ }
+ }
+ ],
+ "eventType": "pull_request",
+ "eventNames": [
+ "pull_request"
+ ],
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "name": "isPr",
+ "parameters": {}
+ },
+ {
+ "name": "isAction",
+ "parameters": {
+ "action": "synchronize"
+ }
+ },
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "needs-author-action"
+ }
+ }
+ ]
+ }
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "PullRequestCommentResponder",
+ "version": "1.0",
+ "config": {
+ "taskName": "Author commenting in PR removes the needs-author-action label",
+ "actions": [
+ {
+ "name": "removeLabel",
+ "parameters": {
+ "label": "needs-author-action"
+ }
+ }
+ ],
+ "eventType": "pull_request",
+ "eventNames": [
+ "issue_comment"
+ ],
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "name": "isActivitySender",
+ "parameters": {
+ "user": {
+ "type": "author"
+ }
+ }
+ },
+ {
+ "name": "isAction",
+ "parameters": {
+ "action": "created"
+ }
+ },
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "needs-author-action"
+ }
+ },
+ {
+ "name": "isPr",
+ "parameters": {}
+ },
+ {
+ "name": "isOpen",
+ "parameters": {}
+ }
+ ]
+ }
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "PullRequestReviewResponder",
+ "version": "1.0",
+ "config": {
+ "taskName": "Author responding to a pull request review comment removes the needs-author-action label",
+ "actions": [
+ {
+ "name": "removeLabel",
+ "parameters": {
+ "label": "needs-author-action"
+ }
+ }
+ ],
+ "eventType": "pull_request",
+ "eventNames": [
+ "pull_request_review"
+ ],
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "name": "isActivitySender",
+ "parameters": {
+ "user": {
+ "type": "author"
+ }
+ }
+ },
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "needs-author-action"
+ }
+ },
+ {
+ "name": "isAction",
+ "parameters": {
+ "action": "submitted"
+ }
+ },
+ {
+ "name": "isPr",
+ "parameters": {}
+ },
+ {
+ "name": "isOpen",
+ "parameters": {}
+ }
+ ]
+ }
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "scheduled",
+ "capabilityId": "ScheduledSearch",
+ "subCapability": "ScheduledSearch",
+ "version": "1.1",
+ "config": {
+ "taskName": "Add no-recent-activity label to issues",
+ "actions": [
+ {
+ "name": "addLabel",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ },
+ {
+ "name": "addReply",
+ "parameters": {
+ "comment": "This issue has been automatically marked `no-recent-activity` because it has not had any activity for 14 days. It will be closed if no further activity occurs within 14 more days. Any new comment (by anyone, not necessarily the author) will remove `no-recent-activity`."
+ }
+ }
+ ],
+ "frequency": [
+ {
+ "weekDay": 0,
+ "hours": [
+ 4,
+ 10,
+ 16,
+ 22
+ ],
+ "timezoneOffset": 1
+ },
+ {
+ "weekDay": 1,
+ "hours": [
+ 4,
+ 10,
+ 16,
+ 22
+ ],
+ "timezoneOffset": 1
+ },
+ {
+ "weekDay": 2,
+ "hours": [
+ 4,
+ 10,
+ 16,
+ 22
+ ],
+ "timezoneOffset": 1
+ },
+ {
+ "weekDay": 3,
+ "hours": [
+ 4,
+ 10,
+ 16,
+ 22
+ ],
+ "timezoneOffset": 1
+ },
+ {
+ "weekDay": 4,
+ "hours": [
+ 4,
+ 10,
+ 16,
+ 22
+ ],
+ "timezoneOffset": 1
+ },
+ {
+ "weekDay": 5,
+ "hours": [
+ 4,
+ 10,
+ 16,
+ 22
+ ],
+ "timezoneOffset": 1
+ },
+ {
+ "weekDay": 6,
+ "hours": [
+ 4,
+ 10,
+ 16,
+ 22
+ ],
+ "timezoneOffset": 1
+ }
+ ],
+ "searchTerms": [
+ {
+ "name": "isIssue",
+ "parameters": {}
+ },
+ {
+ "name": "isOpen",
+ "parameters": {}
+ },
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "needs-author-action"
+ }
+ },
+ {
+ "name": "noActivitySince",
+ "parameters": {
+ "days": 14
+ }
+ },
+ {
+ "name": "noLabel",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "scheduled",
+ "capabilityId": "ScheduledSearch",
+ "subCapability": "ScheduledSearch",
+ "version": "1.1",
+ "config": {
+ "taskName": "Add no-recent-activity label to PRs",
+ "actions": [
+ {
+ "name": "addLabel",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ },
+ {
+ "name": "addReply",
+ "parameters": {
+ "comment": "This pull request has been automatically marked `no-recent-activity` because it has not had any activity for 14 days. It will be closed if no further activity occurs within 14 more days. Any new comment (by anyone, not necessarily the author) will remove `no-recent-activity`."
+ }
+ }
+ ],
+ "frequency": [
+ {
+ "weekDay": 0,
+ "hours": [
+ 4,
+ 10,
+ 16,
+ 22
+ ],
+ "timezoneOffset": 1
+ },
+ {
+ "weekDay": 1,
+ "hours": [
+ 4,
+ 10,
+ 16,
+ 22
+ ],
+ "timezoneOffset": 1
+ },
+ {
+ "weekDay": 2,
+ "hours": [
+ 4,
+ 10,
+ 16,
+ 22
+ ],
+ "timezoneOffset": 1
+ },
+ {
+ "weekDay": 3,
+ "hours": [
+ 4,
+ 10,
+ 16,
+ 22
+ ],
+ "timezoneOffset": 1
+ },
+ {
+ "weekDay": 4,
+ "hours": [
+ 4,
+ 10,
+ 16,
+ 22
+ ],
+ "timezoneOffset": 1
+ },
+ {
+ "weekDay": 5,
+ "hours": [
+ 4,
+ 10,
+ 16,
+ 22
+ ],
+ "timezoneOffset": 1
+ },
+ {
+ "weekDay": 6,
+ "hours": [
+ 4,
+ 10,
+ 16,
+ 22
+ ],
+ "timezoneOffset": 1
+ }
+ ],
+ "searchTerms": [
+ {
+ "name": "isPr",
+ "parameters": {}
+ },
+ {
+ "name": "isOpen",
+ "parameters": {}
+ },
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "needs-author-action"
+ }
+ },
+ {
+ "name": "noActivitySince",
+ "parameters": {
+ "days": 14
+ }
+ },
+ {
+ "name": "noLabel",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "IssuesOnlyResponder",
+ "version": "1.0",
+ "config": {
+ "taskName": "Remove `no-recent-activity` label from issues when issue is modified",
+ "actions": [
+ {
+ "name": "removeLabel",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ },
+ {
+ "name": "removeLabel",
+ "parameters": {
+ "label": "backlog-cleanup-candidate"
+ }
+ }
+ ],
+ "eventType": "issue",
+ "eventNames": [
+ "issues"
+ ],
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "operator": "not",
+ "operands": [
+ {
+ "name": "isAction",
+ "parameters": {
+ "action": "closed"
+ }
+ }
+ ]
+ },
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ },
+ {
+ "operator": "not",
+ "operands": [
+ {
+ "name": "labelAdded",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ }
+ ]
+ }
+ ]
+ }
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "IssueCommentResponder",
+ "version": "1.0",
+ "config": {
+ "taskName": "Remove `no-recent-activity` label when an issue is commented on",
+ "actions": [
+ {
+ "name": "removeLabel",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ },
+ {
+ "name": "removeLabel",
+ "parameters": {
+ "label": "backlog-cleanup-candidate"
+ }
+ }
+ ],
+ "eventType": "issue",
+ "eventNames": [
+ "issue_comment"
+ ],
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ },
+ {
+ "name": "isIssue",
+ "parameters": {}
+ }
+ ]
+ }
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "PullRequestResponder",
+ "version": "1.0",
+ "config": {
+ "taskName": "Remove `no-recent-activity` label from PRs when modified",
+ "actions": [
+ {
+ "name": "removeLabel",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ },
+ {
+ "name": "removeLabel",
+ "parameters": {
+ "label": "backlog-cleanup-candidate"
+ }
+ }
+ ],
+ "eventType": "pull_request",
+ "eventNames": [
+ "pull_request"
+ ],
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "name": "isPr",
+ "parameters": {}
+ },
+ {
+ "name": "isOpen",
+ "parameters": {}
+ },
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ },
+ {
+ "operator": "not",
+ "operands": [
+ {
+ "name": "labelAdded",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ }
+ ]
+ }
+ ]
+ }
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "PullRequestCommentResponder",
+ "version": "1.0",
+ "config": {
+ "taskName": "Remove `no-recent-activity` label from PRs when commented on",
+ "actions": [
+ {
+ "name": "removeLabel",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ },
+ {
+ "name": "removeLabel",
+ "parameters": {
+ "label": "backlog-cleanup-candidate"
+ }
+ }
+ ],
+ "eventType": "pull_request",
+ "eventNames": [
+ "issue_comment"
+ ],
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ },
+ {
+ "name": "isPr",
+ "parameters": {}
+ },
+ {
+ "name": "isOpen",
+ "parameters": {}
+ }
+ ]
+ }
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "trigger",
+ "capabilityId": "IssueResponder",
+ "subCapability": "PullRequestReviewResponder",
+ "version": "1.0",
+ "config": {
+ "taskName": "Remove `no-recent-activity` label from PRs when new review is added",
+ "actions": [
+ {
+ "name": "removeLabel",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ },
+ {
+ "name": "removeLabel",
+ "parameters": {
+ "label": "backlog-cleanup-candidate"
+ }
+ }
+ ],
+ "eventType": "pull_request",
+ "eventNames": [
+ "pull_request_review"
+ ],
+ "conditions": {
+ "operator": "and",
+ "operands": [
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ },
+ {
+ "name": "isPr",
+ "parameters": {}
+ },
+ {
+ "name": "isOpen",
+ "parameters": {}
+ }
+ ]
+ }
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "scheduled",
+ "capabilityId": "ScheduledSearch",
+ "subCapability": "ScheduledSearch",
+ "version": "1.1",
+ "config": {
+ "taskName": "Close issues with no recent activity",
+ "actions": [
+ {
+ "name": "addReply",
+ "parameters": {
+ "comment": "This issue will now be closed since it had been marked `no-recent-activity` but received no further activity in the past 14 days. It is still possible to reopen or comment on the issue, but please note that the issue will be locked if it remains inactive for another 30 days."
+ }
+ },
+ {
+ "name": "closeIssue",
+ "parameters": {}
+ }
+ ],
+ "frequency": [
+ {
+ "weekDay": 0,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 1,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 2,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 3,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 4,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 5,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 6,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ }
+ ],
+ "searchTerms": [
+ {
+ "name": "isIssue",
+ "parameters": {}
+ },
+ {
+ "name": "isOpen",
+ "parameters": {}
+ },
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ },
+ {
+ "name": "noActivitySince",
+ "parameters": {
+ "days": 14
+ }
+ }
+ ]
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "scheduled",
+ "capabilityId": "ScheduledSearch",
+ "subCapability": "ScheduledSearch",
+ "version": "1.1",
+ "config": {
+ "taskName": "Close PRs with no-recent-activity",
+ "actions": [
+ {
+ "name": "addReply",
+ "parameters": {
+ "comment": "This pull request will now be closed since it had been marked `no-recent-activity` but received no further activity in the past 14 days. It is still possible to reopen or comment on the pull request, but please note that it will be locked if it remains inactive for another 30 days."
+ }
+ },
+ {
+ "name": "closeIssue",
+ "parameters": {}
+ }
+ ],
+ "frequency": [
+ {
+ "weekDay": 0,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 1,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 2,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 3,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 4,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 5,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 6,
+ "hours": [
+ 0,
+ 6,
+ 12,
+ 18
+ ],
+ "timezoneOffset": 0
+ }
+ ],
+ "searchTerms": [
+ {
+ "name": "isPr",
+ "parameters": {}
+ },
+ {
+ "name": "isOpen",
+ "parameters": {}
+ },
+ {
+ "name": "hasLabel",
+ "parameters": {
+ "label": "no-recent-activity"
+ }
+ },
+ {
+ "name": "noActivitySince",
+ "parameters": {
+ "days": 14
+ }
+ }
+ ]
+ }
+ },
+ {
+ "taskSource": "fabricbot-config",
+ "taskType": "scheduled",
+ "capabilityId": "ScheduledSearch",
+ "subCapability": "ScheduledSearch",
+ "version": "1.1",
+ "config": {
+ "taskName": "Close inactive Draft PRs",
+ "actions": [
+ {
+ "name": "closeIssue",
+ "parameters": {}
+ },
+ {
+ "name": "addReply",
+ "parameters": {
+ "comment": "Draft Pull Request was automatically closed for 30 days of inactivity. Please [let us know](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you'd like to reopen it."
+ }
+ }
+ ],
+ "frequency": [
+ {
+ "weekDay": 0,
+ "hours": [
+ 5,
+ 11,
+ 17,
+ 23
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 1,
+ "hours": [
+ 5,
+ 11,
+ 17,
+ 23
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 2,
+ "hours": [
+ 5,
+ 11,
+ 17,
+ 23
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 3,
+ "hours": [
+ 5,
+ 11,
+ 17,
+ 23
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 4,
+ "hours": [
+ 5,
+ 11,
+ 17,
+ 23
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 5,
+ "hours": [
+ 5,
+ 11,
+ 17,
+ 23
+ ],
+ "timezoneOffset": 0
+ },
+ {
+ "weekDay": 6,
+ "hours": [
+ 5,
+ 11,
+ 17,
+ 23
+ ],
+ "timezoneOffset": 0
+ }
+ ],
+ "searchTerms": [
+ {
+ "name": "isDraftPr",
+ "parameters": {
+ "value": "true"
+ }
+ },
+ {
+ "name": "isOpen",
+ "parameters": {}
+ },
+ {
+ "name": "noActivitySince",
+ "parameters": {
+ "days": 30
+ }
+ }
+ ]
+ }
+ }
+]
diff --git a/.github/policies/resourceManagement.yml b/.github/policies/resourceManagement.yml
deleted file mode 100644
index 94a9b162a4569a..00000000000000
--- a/.github/policies/resourceManagement.yml
+++ /dev/null
@@ -1,1882 +0,0 @@
-id:
-name: GitOps.PullRequestIssueManagement
-description: GitOps.PullRequestIssueManagement primitive
-owner:
-resource: repository
-disabled: false
-where:
-configuration:
- resourceManagementConfiguration:
- scheduledSearches:
- - description: Automated Issue cleanup
- frequencies:
- - hourly:
- hour: 6
- filters:
- - noActivitySince:
- days: 1644
- - isIssue
- - isOpen
- - isNotLabeledWith:
- label: backlog-cleanup-candidate
- actions:
- - addReply:
- reply: >-
- Due to lack of recent activity, this issue has been marked as a candidate for backlog cleanup. It will be closed if no further activity occurs within 14 more days. Any new comment (by anyone, not necessarily the author) will undo this process.
-
-
- This process is part of our [issue cleanup automation](https://github.com/dotnet/runtime/blob/main/docs/issue-cleanup.md).
- - addLabel:
- label: backlog-cleanup-candidate
- - addLabel:
- label: no-recent-activity
- - description: Add no-recent-activity label to issues
- frequencies:
- - hourly:
- hour: 6
- filters:
- - isIssue
- - isOpen
- - hasLabel:
- label: needs-author-action
- - noActivitySince:
- days: 14
- - isNotLabeledWith:
- label: no-recent-activity
- actions:
- - addLabel:
- label: no-recent-activity
- - addReply:
- reply: This issue has been automatically marked `no-recent-activity` because it has not had any activity for 14 days. It will be closed if no further activity occurs within 14 more days. Any new comment (by anyone, not necessarily the author) will remove `no-recent-activity`.
- - description: Add no-recent-activity label to PRs
- frequencies:
- - hourly:
- hour: 6
- filters:
- - isPullRequest
- - isOpen
- - hasLabel:
- label: needs-author-action
- - noActivitySince:
- days: 14
- - isNotLabeledWith:
- label: no-recent-activity
- actions:
- - addLabel:
- label: no-recent-activity
- - addReply:
- reply: This pull request has been automatically marked `no-recent-activity` because it has not had any activity for 14 days. It will be closed if no further activity occurs within 14 more days. Any new comment (by anyone, not necessarily the author) will remove `no-recent-activity`.
- - description: Close issues with no recent activity
- frequencies:
- - hourly:
- hour: 6
- filters:
- - isIssue
- - isOpen
- - hasLabel:
- label: no-recent-activity
- - noActivitySince:
- days: 14
- actions:
- - addReply:
- reply: This issue will now be closed since it had been marked `no-recent-activity` but received no further activity in the past 14 days. It is still possible to reopen or comment on the issue, but please note that the issue will be locked if it remains inactive for another 30 days.
- - closeIssue
- - description: Close PRs with no-recent-activity
- frequencies:
- - hourly:
- hour: 6
- filters:
- - isPullRequest
- - isOpen
- - hasLabel:
- label: no-recent-activity
- - noActivitySince:
- days: 14
- actions:
- - addReply:
- reply: This pull request will now be closed since it had been marked `no-recent-activity` but received no further activity in the past 14 days. It is still possible to reopen or comment on the pull request, but please note that it will be locked if it remains inactive for another 30 days.
- - closeIssue
- - description: Close inactive Draft PRs
- frequencies:
- - hourly:
- hour: 6
- filters:
- - isDraftPullRequest
- - isOpen
- - noActivitySince:
- days: 30
- actions:
- - closeIssue
- - addReply:
- reply: Draft Pull Request was automatically closed for 30 days of inactivity. Please [let us know](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you'd like to reopen it.
- eventResponderTasks:
- - if:
- - or:
- - payloadType: Issues
- - payloadType: Pull_Request
- - isAction:
- action: Opened
- then:
- - if:
- - hasLabel:
- label: area-AssemblyLoader-coreclr
- then:
- - mentionUsers:
- mentionees:
- - vitek-karas
- - agocke
- - vsadov
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-AssemblyLoader-mono
- then:
- - mentionUsers:
- mentionees: []
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-CodeGen-coreclr
- then:
- - mentionUsers:
- mentionees:
- - JulieLeeMSFT
- - jakobbotsch
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Codegen-Interpreter-mono
- then:
- - mentionUsers:
- mentionees:
- - brzvlad
- - kotlarmilos
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Codegen-JIT-Mono
- then:
- - mentionUsers:
- mentionees:
- - SamMonoRT
- - vargaz
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-CodeGen-LLVM-Mono
- then:
- - mentionUsers:
- mentionees:
- - SamMonoRT
- - vargaz
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Codegen-Intrinsics-mono
- then:
- - mentionUsers:
- mentionees:
- - SamMonoRT
- - fanyang-mono
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-CodeGen-meta-Mono
- then:
- - mentionUsers:
- mentionees:
- - SamMonoRT
- - vargaz
- - lambdageek
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.DateTime
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-datetime
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Debugger-mono
- then:
- - mentionUsers:
- mentionees:
- - thaystg
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-DependencyModel
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-dependencymodel
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Diagnostics-coreclr
- then:
- - mentionUsers:
- mentionees:
- - tommcdon
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Extensions-Caching
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-extensions-caching
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Extensions-Configuration
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-extensions-configuration
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Extensions-DependencyInjection
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-extensions-dependencyinjection
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Extensions-FileSystem
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-extensions-filesystem
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Extensions-Hosting
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-extensions-hosting
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Extensions-HttpClientFactory
- then:
- - mentionUsers:
- mentionees:
- - dotnet/ncl
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Extensions-Logging
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-extensions-logging
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Extensions-Options
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-extensions-options
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Extensions-Primitives
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-extensions-primitives
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-GC-coreclr
- then:
- - mentionUsers:
- mentionees:
- - dotnet/gc
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-GC-mono
- then:
- - mentionUsers:
- mentionees:
- - brzvlad
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Host
- then:
- - mentionUsers:
- mentionees:
- - vitek-karas
- - agocke
- - vsadov
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-HostModel
- then:
- - mentionUsers:
- mentionees:
- - vitek-karas
- - agocke
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-ILTools-coreclr
- then:
- - mentionUsers:
- mentionees:
- - JulieLeeMSFT
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Tools-ILVerification
- then:
- - mentionUsers:
- mentionees:
- - JulieLeeMSFT
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Infrastructure
- then:
- - mentionUsers:
- mentionees:
- - dotnet/runtime-infrastructure
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Infrastructure-coreclr
- then:
- - mentionUsers:
- mentionees:
- - hoyosjs
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Infrastructure-libraries
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-infrastructure-libraries
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Infrastructure-mono
- then:
- - mentionUsers:
- mentionees:
- - directhex
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Meta
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-meta
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Microsoft.CSharp
- then:
- - mentionUsers:
- mentionees:
- - cston
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Microsoft.Extensions
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-microsoft-extensions
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Microsoft.VisualBasic
- then:
- - mentionUsers:
- mentionees:
- - cston
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Microsoft.Win32
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-microsoft-win32
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-NativeAOT-coreclr
- then:
- - mentionUsers:
- mentionees:
- - agocke
- - MichalStrehovsky
- - jkotas
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Single-File
- then:
- - mentionUsers:
- mentionees:
- - agocke
- - vitek-karas
- - vsadov
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Buffers
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-buffers
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.CodeDom
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-codedom
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Collections
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-collections
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.ComponentModel
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-componentmodel
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.ComponentModel.Composition
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-componentmodel-composition
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.ComponentModel.DataAnnotations
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-componentmodel-dataannotations
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Composition
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-composition
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Configuration
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-configuration
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Console
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-console
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Data
- then:
- - mentionUsers:
- mentionees:
- - roji
- - ajcvickers
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Data.Odbc
- then:
- - mentionUsers:
- mentionees:
- - roji
- - ajcvickers
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Data.OleDB
- then:
- - mentionUsers:
- mentionees:
- - roji
- - ajcvickers
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Data.SqlClient
- then:
- - mentionUsers:
- mentionees:
- - davoudeshtehari
- - david-engel
- - jrahnama
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Diagnostics
- then:
- - mentionUsers:
- mentionees:
- - tommcdon
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Diagnostics.Activity
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-diagnostics-activity
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Diagnostics.EventLog
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-diagnostics-eventlog
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Diagnostics.PerformanceCounter
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-diagnostics-performancecounter
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Diagnostics.Process
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-diagnostics-process
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Diagnostics.TraceSource
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-diagnostics-tracesource
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Diagnostics.Tracing
- then:
- - mentionUsers:
- mentionees:
- - tarekgh
- - tommcdon
- - pjanotti
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.DirectoryServices
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-directoryservices
- - jay98014
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Drawing
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-drawing
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Dynamic.Runtime
- then:
- - mentionUsers:
- mentionees:
- - cston
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Formats.Asn1
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-formats-asn1
- - bartonjs
- - vcsjones
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Formats.Cbor
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-formats-cbor
- - bartonjs
- - vcsjones
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Formats.Tar
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-formats-tar
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Globalization
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-globalization
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.IO
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-io
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.IO.Compression
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-io-compression
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.IO.Hashing
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-io-hashing
- - bartonjs
- - vcsjones
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.IO.Ports
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-io-ports
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Linq
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-linq
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Linq.Expressions
- then:
- - mentionUsers:
- mentionees:
- - cston
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Linq.Parallel
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-linq-parallel
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Management
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-management
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Memory
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-memory
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Net
- then:
- - mentionUsers:
- mentionees:
- - dotnet/ncl
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Net.Http
- then:
- - mentionUsers:
- mentionees:
- - dotnet/ncl
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Net.Quic
- then:
- - mentionUsers:
- mentionees:
- - dotnet/ncl
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Net.Security
- then:
- - mentionUsers:
- mentionees:
- - dotnet/ncl
- - bartonjs
- - vcsjones
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Net.Sockets
- then:
- - mentionUsers:
- mentionees:
- - dotnet/ncl
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Numerics
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-numerics
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Numerics.Tensors
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-numerics-tensors
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Reflection
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-reflection
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Reflection.Emit
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-reflection-emit
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Reflection.Metadata
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-reflection-metadata
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Resources
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-resources
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Runtime
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-runtime
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Runtime.CompilerServices
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-runtime-compilerservices
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Runtime.InteropServices
- then:
- - mentionUsers:
- mentionees:
- - dotnet/interop-contrib
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Runtime.Intrinsics
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-runtime-intrinsics
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Security
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-security
- - bartonjs
- - vcsjones
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.ServiceProcess
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-serviceprocess
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Speech
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-speech
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Text.Encoding
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-text-encoding
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Text.Encodings.Web
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-text-encodings-web
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Text.Json
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-text-json
- - gregsdennis
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Text.RegularExpressions
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-text-regularexpressions
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Threading
- then:
- - mentionUsers:
- mentionees:
- - mangod9
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Threading.Channels
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-threading-channels
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Threading.Tasks
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-threading-tasks
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Transactions
- then:
- - mentionUsers:
- mentionees:
- - roji
- - ajcvickers
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-System.Xml
- then:
- - mentionUsers:
- mentionees:
- - dotnet/area-system-xml
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-Tools-ILLink
- then:
- - mentionUsers:
- mentionees:
- - agocke
- - sbomer
- - vitek-karas
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- - if:
- - hasLabel:
- label: area-vm-coreclr
- then:
- - mentionUsers:
- mentionees:
- - mangod9
- replyTemplate: >-
- Tagging subscribers to this area: ${mentionees}
-
- See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed.
- assignMentionees: False
- description: Area-owners
- - if:
- - payloadType: Issues
- - labelAdded:
- label: breaking-change
- then:
- - addLabel:
- label: needs-breaking-change-doc-created
- - addReply:
- reply: >-
- Added `needs-breaking-change-doc-created` label because this issue has the `breaking-change` label.
-
-
- 1. [ ] Create and link to this issue a matching issue in the dotnet/docs repo using the [breaking change documentation template](https://aka.ms/dotnet/docs/new-breaking-change-issue), then remove this `needs-breaking-change-doc-created` label.
-
-
- Tagging @dotnet/compat for awareness of the breaking change.
- description: Add breaking change doc label to issue
- - if:
- - payloadType: Pull_Request
- - labelAdded:
- label: breaking-change
- - isPullRequest
- then:
- - addLabel:
- label: needs-breaking-change-doc-created
- - addReply:
- reply: >-
- Added `needs-breaking-change-doc-created` label because this PR has the `breaking-change` label.
-
-
- When you commit this breaking change:
-
-
- 1. [ ] Create and link to this PR and the issue a matching issue in the dotnet/docs repo using the [breaking change documentation template](https://aka.ms/dotnet/docs/new-breaking-change-issue), then remove this `needs-breaking-change-doc-created` label.
-
- 2. [ ] Ask a committer to mail the `.NET Breaking Change Notification` DL.
-
-
- Tagging @dotnet/compat for awareness of the breaking change.
- description: Add breaking change doc label to PR
- - if:
- - or:
- - payloadType: Issues
- - payloadType: Pull_Request
- - isAction:
- action: Opened
- then:
- - if:
- - hasLabel:
- label: linkable-framework
- then:
- - mentionUsers:
- mentionees:
- - eerhardt
- - vitek-karas
- - LakshanF
- - sbomer
- - joperezr
- - marek-safar
- replyTemplate: >-
- Tagging subscribers to 'linkable-framework': ${mentionees}
-
- See info in area-owners.md if you want to be subscribed.
- assignMentionees: False
- description: '@Mention for linkable-framework'
- - if:
- - or:
- - payloadType: Issues
- - payloadType: Pull_Request
- - isAction:
- action: Opened
- then:
- - if:
- - hasLabel:
- label: size-reduction
- then:
- - mentionUsers:
- mentionees:
- - eerhardt
- - SamMonoRT
- - marek-safar
- replyTemplate: >-
- Tagging subscribers to 'size-reduction': ${mentionees}
-
- See info in area-owners.md if you want to be subscribed.
- assignMentionees: False
- description: '@Mention for size-reduction'
- - if:
- - or:
- - payloadType: Issues
- - payloadType: Pull_Request
- - isAction:
- action: Opened
- then:
- - if:
- - hasLabel:
- label: arch-wasm
- then:
- - mentionUsers:
- mentionees:
- - lewing
- replyTemplate: >-
- Tagging subscribers to 'arch-wasm': ${mentionees}
-
- See info in area-owners.md if you want to be subscribed.
- assignMentionees: False
- description: '@Mention for wasm'
- - if:
- - or:
- - payloadType: Issues
- - payloadType: Pull_Request
- - isAction:
- action: Opened
- then:
- - if:
- - hasLabel:
- label: os-ios
- then:
- - mentionUsers:
- mentionees:
- - steveisok
- - akoeplinger
- - kotlarmilos
- replyTemplate: >-
- Tagging subscribers to 'os-ios': ${mentionees}
-
- See info in area-owners.md if you want to be subscribed.
- assignMentionees: False
- description: '@Mention for ios'
- - if:
- - or:
- - payloadType: Issues
- - payloadType: Pull_Request
- - isAction:
- action: Opened
- then:
- - if:
- - hasLabel:
- label: os-android
- then:
- - mentionUsers:
- mentionees:
- - steveisok
- - akoeplinger
- replyTemplate: >-
- Tagging subscribers to 'arch-android': ${mentionees}
-
- See info in area-owners.md if you want to be subscribed.
- assignMentionees: False
- description: '@Mention for android'
- - if:
- - payloadType: Pull_Request
- - or:
- - filesMatchPattern:
- pattern: .*ILLink.*
- - filesMatchPattern:
- pattern: .*illink.*
- - not:
- hasLabel:
- label: linkable-framework
- - isPullRequest
- - isOpen
- then:
- - addLabel:
- label: linkable-framework
- description: '[Linkable-framework workgroup] Add linkable-framework label to new Prs that touch files with *ILLink* that not have it already'
- - if:
- - payloadType: Pull_Request
- - or:
- - filesMatchPattern:
- pattern: .*ILLink.*
- - filesMatchPattern:
- pattern: .*illink.*
- - not:
- hasLabel:
- label: linkable-framework
- - isPullRequest
- - isOpen
- - isAction:
- action: Synchronize
- then:
- - addLabel:
- label: linkable-framework
- description: '[Linkable-framework workgroup] Add linkable-framework label to Prs that get changes pushed where they touch *ILLInk* files'
- - if:
- - payloadType: Issues
- - labelAdded:
- label: backlog-cleanup-candidate
- then:
- - addReply:
- reply: >-
- Due to lack of recent activity, this issue has been marked as a candidate for backlog cleanup. It will be closed if no further activity occurs within 14 more days. Any new comment (by anyone, not necessarily the author) will undo this process.
-
-
- This process is part of our [issue cleanup automation](https://github.com/dotnet/runtime/blob/main/docs/issue-cleanup.md).
- - addLabel:
- label: no-recent-activity
- description: Manual Issue Cleanup
- - if:
- - or:
- - payloadType: Issues
- - payloadType: Pull_Request
- - isAction:
- action: Opened
- then:
- - if:
- - hasLabel:
- label: os-tvos
- then:
- - mentionUsers:
- mentionees:
- - steveisok
- - akoeplinger
- replyTemplate: >-
- Tagging subscribers to 'os-tvos': ${mentionees}
-
- See info in area-owners.md if you want to be subscribed.
- assignMentionees: False
- description: '@Mention for tvos'
- - if:
- - or:
- - payloadType: Issues
- - payloadType: Pull_Request
- - isAction:
- action: Opened
- then:
- - if:
- - hasLabel:
- label: os-maccatalyst
- then:
- - mentionUsers:
- mentionees:
- - steveisok
- - akoeplinger
- replyTemplate: >-
- Tagging subscribers to 'os-maccatalyst': ${mentionees}
-
- See info in area-owners.md if you want to be subscribed.
- assignMentionees: False
- description: '@Mention for maccatalyst'
- - if:
- - payloadType: Issues
- - or:
- - isAction:
- action: Opened
- - isAction:
- action: Reopened
- - isOpen
- - not: isPartOfAnyMilestone
- - not:
- hasLabel:
- label: untriaged
- then:
- - addLabel:
- label: untriaged
- description: Add untriaged label to new/reopened issues without a milestone
- - if:
- - payloadType: Issues
- - or:
- - isAction:
- action: Closed
- - isPartOfAnyMilestone
- - hasLabel:
- label: untriaged
- then:
- - removeLabel:
- label: untriaged
- description: Remove untriaged label from issues when closed or added to a milestone
- - if:
- - payloadType: Pull_Request
- then:
- - inPrLabel:
- label: in-pr
- description: Add `in-pr` label on issue when an open pull request is targeting it
- - if:
- - payloadType: Pull_Request
- - isAction:
- action: Opened
- - not:
- activitySenderHasPermission:
- permission: Read
- then:
- - assignTo:
- author: True
- description: Assign Team PRs to author
- - if:
- - payloadType: Pull_Request
- - isAction:
- action: Opened
- - isPullRequest
- - and:
- - not:
- activitySenderHasPermission:
- permission: Admin
- - not:
- activitySenderHasPermission:
- permission: Write
- - not:
- isActivitySender:
- user: github-actions[bot]
- issueAuthor: False
- - not:
- isActivitySender:
- user: dotnet-maestro[bot]
- issueAuthor: False
- - not:
- isActivitySender:
- user: dotnet-maestro-bot[bot]
- issueAuthor: False
- - not:
- isActivitySender:
- user: dotnet-maestro-bot
- issueAuthor: False
- - not:
- isActivitySender:
- user: dotnet-maestro
- issueAuthor: False
- - not:
- isActivitySender:
- user: github-actions
- issueAuthor: False
- then:
- - addLabel:
- label: community-contribution
- description: Label community PRs
- - if:
- - payloadType: Issues
- - labelAdded:
- label: needs-author-action
- then:
- - addReply:
- reply: This issue has been marked `needs-author-action` and may be missing some important information.
- description: Needs-author-action notification
- - if:
- - payloadType: Pull_Request_Review
- - not:
- activitySenderHasPermission:
- permission: Read
- - isPullRequest
- - isAction:
- action: Submitted
- - isReviewState:
- reviewState: Changes_requested
- then:
- - addLabel:
- label: needs-author-action
- description: PR reviews with "changes requested" applies the needs-author-action label
- - if:
- - payloadType: Issue_Comment
- - isAction:
- action: Created
- - isActivitySender:
- issueAuthor: True
- - hasLabel:
- label: needs-author-action
- - not:
- hasLabel:
- label: untriaged
- - isIssue
- - isOpen
- then:
- - addLabel:
- label: needs-further-triage
- - removeLabel:
- label: needs-author-action
- description: Replace `needs-author-action` label with `needs-further-triage` label when the author comments on an issue that is not still untriaged
- - if:
- - payloadType: Issue_Comment
- - isAction:
- action: Created
- - isActivitySender:
- issueAuthor: True
- - hasLabel:
- label: needs-author-action
- - hasLabel:
- label: untriaged
- - isIssue
- - isOpen
- then:
- - removeLabel:
- label: needs-author-action
- description: Remove `needs-author-action` label when the author comments on an `untriaged` issue
- - if:
- - payloadType: Pull_Request
- - isPullRequest
- - isAction:
- action: Synchronize
- - hasLabel:
- label: needs-author-action
- then:
- - removeLabel:
- label: needs-author-action
- description: Pushing changes to PR branch removes the needs-author-action label
- - if:
- - payloadType: Issue_Comment
- - isActivitySender:
- issueAuthor: True
- - isAction:
- action: Created
- - hasLabel:
- label: needs-author-action
- - isPullRequest
- - isOpen
- then:
- - removeLabel:
- label: needs-author-action
- description: Author commenting in PR removes the needs-author-action label
- - if:
- - payloadType: Pull_Request_Review
- - isActivitySender:
- issueAuthor: True
- - hasLabel:
- label: needs-author-action
- - isAction:
- action: Submitted
- - isPullRequest
- - isOpen
- then:
- - removeLabel:
- label: needs-author-action
- description: Author responding to a pull request review comment removes the needs-author-action label
- - if:
- - payloadType: Issues
- - not:
- isAction:
- action: Closed
- - hasLabel:
- label: no-recent-activity
- - not:
- labelAdded:
- label: no-recent-activity
- then:
- - removeLabel:
- label: no-recent-activity
- - removeLabel:
- label: backlog-cleanup-candidate
- description: Remove `no-recent-activity` label from issues when issue is modified
- - if:
- - payloadType: Issue_Comment
- - hasLabel:
- label: no-recent-activity
- - isIssue
- then:
- - removeLabel:
- label: no-recent-activity
- - removeLabel:
- label: backlog-cleanup-candidate
- description: Remove `no-recent-activity` label when an issue is commented on
- - if:
- - payloadType: Pull_Request
- - isPullRequest
- - isOpen
- - hasLabel:
- label: no-recent-activity
- - not:
- labelAdded:
- label: no-recent-activity
- then:
- - removeLabel:
- label: no-recent-activity
- - removeLabel:
- label: backlog-cleanup-candidate
- description: Remove `no-recent-activity` label from PRs when modified
- - if:
- - payloadType: Issue_Comment
- - hasLabel:
- label: no-recent-activity
- - isPullRequest
- - isOpen
- then:
- - removeLabel:
- label: no-recent-activity
- - removeLabel:
- label: backlog-cleanup-candidate
- description: Remove `no-recent-activity` label from PRs when commented on
- - if:
- - payloadType: Pull_Request_Review
- - hasLabel:
- label: no-recent-activity
- - isPullRequest
- - isOpen
- then:
- - removeLabel:
- label: no-recent-activity
- - removeLabel:
- label: backlog-cleanup-candidate
- description: Remove `no-recent-activity` label from PRs when new review is added
-onFailure:
-onSuccess:
diff --git a/Directory.Build.props b/Directory.Build.props
index b1ac2559f9ed1e..26e112fab56e16 100644
--- a/Directory.Build.props
+++ b/Directory.Build.props
@@ -11,9 +11,6 @@
all cases, rather than ordinarily where we build them during mobile or wasm
build legs. This makes the manifests available on source-only builds. -->
true
-
- false
@@ -316,8 +313,6 @@
'$(OfficialBuildId)' == ''">true
true
-
- ClrFullNativeBuild;ClrRuntimeSubset;ClrJitSubset;ClrPalTestsSubset;ClrAllJitsSubset;ClrILToolsSubset;ClrNativeAotSubset;ClrSpmiSubset;ClrCrossComponentsSubset;ClrDebugSubset;HostArchitecture;PgoInstrument;NativeOptimizationDataSupported;CMakeArgs
diff --git a/THIRD-PARTY-NOTICES.TXT b/THIRD-PARTY-NOTICES.TXT
index 065abb6033e966..ff5aaacd21b76a 100644
--- a/THIRD-PARTY-NOTICES.TXT
+++ b/THIRD-PARTY-NOTICES.TXT
@@ -73,7 +73,7 @@ https://github.com/madler/zlib
https://zlib.net/zlib_license.html
/* zlib.h -- interface of the 'zlib' general purpose compression library
- version 1.3.1, January 22nd, 2024
+ version 1.2.13, October 13th, 2022
Copyright (C) 1995-2022 Jean-loup Gailly and Mark Adler
diff --git a/docs/area-owners.md b/docs/area-owners.md
index aaa81100379d7a..52cb16d8d8d720 100644
--- a/docs/area-owners.md
+++ b/docs/area-owners.md
@@ -73,9 +73,9 @@ Note: Editing this file doesn't update the mapping used by `@msftbot` for area-s
| area-System.Composition | @ericstj | @dotnet/area-system-composition | |
| area-System.Configuration | @ericstj | @dotnet/area-system-configuration | |
| area-System.Console | @jeffhandley | @dotnet/area-system-console | |
-| area-System.Data | @sammonort | @ajcvickers @davoudeshtehari @david-engel @roji |
- Odbc, OleDb - @saurabh500
|
-| area-System.Data.Odbc | @sammonort | @ajcvickers @roji | |
-| area-System.Data.OleDB | @sammonort | @ajcvickers @roji | |
+| area-System.Data | @ajcvickers | @ajcvickers @davoudeshtehari @david-engel @roji | - Odbc, OleDb - @saurabh500
|
+| area-System.Data.Odbc | @ajcvickers | @ajcvickers @roji | |
+| area-System.Data.OleDB | @ajcvickers | @ajcvickers @roji | |
| area-System.Data.SqlClient | @David-Engel | @davoudeshtehari @david-engel @jrahnama | Archived component - limited churn/contributions (see https://devblogs.microsoft.com/dotnet/introducing-the-new-microsoftdatasqlclient/) |
| area-System.DateTime | @ericstj | @dotnet/area-system-datetime | System namespace APIs related to dates and times, including DateOnly, DateTime, DateTimeKind, DateTimeOffset, DayOfWeek, TimeOnly, TimeSpan, TimeZone, and TimeZoneInfo |
| area-System.Diagnostics | @tommcdon | @dotnet/area-system-diagnostics | |
@@ -135,7 +135,7 @@ Note: Editing this file doesn't update the mapping used by `@msftbot` for area-s
| area-System.Threading.Channels | @ericstj | @dotnet/area-system-threading-channels | Consultants: @stephentoub |
| area-System.Threading.RateLimiting | @rafikiassumani-msft | @BrennanConroy @halter73 | |
| area-System.Threading.Tasks | @ericstj | @dotnet/area-system-threading-tasks | Consultants: @stephentoub |
-| area-System.Transactions | @sammonort | @roji | |
+| area-System.Transactions | @ajcvickers | @roji | |
| area-System.Xml | @jeffhandley | @dotnet/area-system-xml | |
| area-TieredCompilation-coreclr | @mangod9 | @kouvel | |
| area-Tools-ILLink | @agocke | @dotnet/illink | |
@@ -162,16 +162,16 @@ Note: Editing this file doesn't update the mapping used by `@msftbot` for area-s
> dedicated OS lead/owner, rather ownership falls back to the `area-*` label. However,
> Windows is a supported operating system of course.
-| Operating System | Lead | Owners (area experts to tag in PRs and issues) | Description |
-|------------------|---------------|----------------------------------------------------|-----------------|
-| os-android | @vitek-karas | @akoeplinger | |
-| os-freebsd | | @wfurt @Thefrank @sec | |
-| os-maccatalyst | @vitek-karas | @kotlarmilos | |
-| os-ios | @vitek-karas | @vargaz, @kotlarmilos | |
-| os-tizen | @gbalykov | @dotnet/samsung | |
-| os-tvos | @vitek-karas | @vargaz, @kotlarmilos | |
-| os-wasi | @lewing | @pavelsavara | |
-| os-browser | @lewing | @pavelsavara | |
+| Operating System | Lead | Owners (area experts to tag in PRs and issues) | Description |
+|------------------|---------------|---------------------------------------------------------------------|-----------------|
+| os-android | @vitek-karas | @akoeplinger | |
+| os-freebsd | | @wfurt @Thefrank @sec | |
+| os-maccatalyst | @vitek-karas | @kotlarmilos | |
+| os-ios | @vitek-karas | @vargaz, @kotlarmilos | |
+| os-tizen | @gbalykov | @hjleee, @wscho77, @clamp03, @JongHeonChoi, @t-mustafin, @viewizard | |
+| os-tvos | @vitek-karas | @vargaz, @kotlarmilos | |
+| os-wasi | @lewing | @pavelsavara | |
+| os-browser | @lewing | @pavelsavara | |
## Architectures
@@ -180,12 +180,12 @@ Note: Editing this file doesn't update the mapping used by `@msftbot` for area-s
> [!NOTE]
> Ownership isn't the same as supported. See [operating systems](#operating-systems) for details.
-| Architecture | Lead | Owners (area experts to tag in PRs and issues) | Description |
-|------------------|---------------|----------------------------------------------------|-----------------|
-| arch-loongarch64 | @shushanhf | @LuckyXu-HF | |
-| arch-riscv | @gbalykov | @dotnet/samsung | |
-| arch-s390x | @uweigand | @uweigand | |
-| arch-wasm | @lewing | @lewing, @pavelsavara | |
+| Architecture | Lead | Owners (area experts to tag in PRs and issues) | Description |
+|------------------|---------------|---------------------------------------------------------------------|--------------|
+| arch-loongarch64 | @shushanhf | @LuckyXu-HF | |
+| arch-riscv | @gbalykov | @hjleee, @wscho77, @clamp03, @JongHeonChoi, @t-mustafin, @viewizard | |
+| arch-s390x | @uweigand | @uweigand | |
+| arch-wasm | @lewing | @lewing, @pavelsavara | |
## Community Triagers
diff --git a/docs/design/coreclr/botr/guide-for-porting.md b/docs/design/coreclr/botr/guide-for-porting.md
index f7ca105bf165db..5d2c01aa52d066 100644
--- a/docs/design/coreclr/botr/guide-for-porting.md
+++ b/docs/design/coreclr/botr/guide-for-porting.md
@@ -413,6 +413,12 @@ Here is an annotated list of the stubs implemented for Unix on Arm64.
Today use of this feature on Unix requires hand-written IL. On Windows
this feature is commonly used by C++/CLI
+3. EH Correctness. Some helpers are written in assembly to provide well known
+ locations for NullReferenceExceptions to be generated out of a SIGSEGV
+ signal.
+
+ 1. `JIT_MemSet`, and `JIT_MemCpy` have this requirement
+
#### cgencpu.h
This header is included by various code in the VM directory. It provides a large
diff --git a/docs/design/coreclr/jit/first-class-structs.md b/docs/design/coreclr/jit/first-class-structs.md
index 4211f75ff745f8..dc017aee75f2e6 100644
--- a/docs/design/coreclr/jit/first-class-structs.md
+++ b/docs/design/coreclr/jit/first-class-structs.md
@@ -94,6 +94,10 @@ encountered by most phases of the JIT:
[#21705](https://github.com/dotnet/coreclr/pull/21705) they are no longer large nodes.
* `GT_STORE_OBJ` and `GT_STORE_BLK` have the same structure as `GT_OBJ` and `GT_BLK`, respectively
* `Data()` is op2
+ * `GT_STORE_DYN_BLK` (GenTreeStoreDynBlk extends GenTreeBlk)
+ * Additional child `gtDynamicSize`
+ * Note that these aren't really struct stores; they represent dynamically sized blocks
+ of arbitrary data.
* For `GT_LCL_FLD` nodes, we store a pointer to `ClassLayout` in the node.
* For `GT_LCL_VAR` nodes, the `ClassLayout` is obtained from the `LclVarDsc`.
diff --git a/docs/design/coreclr/jit/ryujit-overview.md b/docs/design/coreclr/jit/ryujit-overview.md
index 5e63d38e98f664..cdb17002ee1974 100644
--- a/docs/design/coreclr/jit/ryujit-overview.md
+++ b/docs/design/coreclr/jit/ryujit-overview.md
@@ -222,7 +222,6 @@ The top-level function of interest is `Compiler::compCompile`. It invokes the fo
| [Common Subexpression Elimination (CSE)](#cse) | Elimination of redundant subexressions based on value numbers. |
| [Assertion Propagation](#assertion-propagation) | Utilizes value numbers to propagate and transform based on properties such as non-nullness. |
| [Range analysis](#range-analysis) | Eliminate array index range checks based on value numbers and assertions |
-| [Induction variable optimization](#iv-opts) | Optimize induction variables used inside natural loops based on scalar evolution analysis |
| [VN-based dead store elimination](#vn-based-dead-store-elimination) | Eliminate stores that do not change the value of a local. |
| [If conversion](#if-conversion) | Transform conditional definitions into `GT_SELECT` operators. |
| [Rationalization](#rationalization) | Flowgraph order changes from `FGOrderTree` to `FGOrderLinear`. All `GT_COMMA` nodes are transformed. |
@@ -348,11 +347,6 @@ reused.
Utilizes value numbers to propagate and transform based on properties such as non-nullness.
-### Induction variable optimization
-
-Performs scalar evolution analysis and utilized it to optimize induction variables inside loops.
-Currently this entails IV widening which is done on x64 only.
-
### Range analysis
Optimize array index range checks based on value numbers and assertions.
diff --git a/docs/design/coreclr/jit/ryujit-tutorial.md b/docs/design/coreclr/jit/ryujit-tutorial.md
index ec900ccc8cd937..34466e45afbcdc 100644
--- a/docs/design/coreclr/jit/ryujit-tutorial.md
+++ b/docs/design/coreclr/jit/ryujit-tutorial.md
@@ -447,10 +447,6 @@ This is the same diagram as before, but with additional links to indicate execut
- Determine initial value for dependent phis
- Eliminate checks where the range of the index is within the check range
-### Induction Variable Optimization
-- Perform scalar evolution analysis to describe values of IR nodes inside loops
-- Perform IV widening on x64 to avoid unnecessary zero extensions for array/span indexing
-
## RyuJIT Back-End
### Rationalization
diff --git a/docs/design/features/byreflike-generics.md b/docs/design/features/byreflike-generics.md
index 99aa905ebf0f58..d644a25e7f3f22 100644
--- a/docs/design/features/byreflike-generics.md
+++ b/docs/design/features/byreflike-generics.md
@@ -127,129 +127,3 @@ The following are IL sequences involving the `box` instruction. They are used fo
`box` ; `isinst` ; `unbox.any` – The box, `isint`, and unbox target types are all equal.
`box` ; `isinst` ; `br_true/false` – The box target type is equal to the unboxed target type or the box target type is `Nullable` and target type equalities can be computed.
-
-## Examples
-
-Below are valid and invalid examples of ByRefLike as Generic parameters. All examples use the **not official** syntax, `allows ref struct`, for indicating the Generic permits ByRefLike types.
-
-**1) Valid**
-```csharp
-class A where T1: allows ref struct
-{
- public void M();
-}
-
-// The derived class is okay to lack the 'allows'
-// because the base permits non-ByRefLike (default)
-// _and_ ByRefLike types.
-class B : A
-{
- public void N()
- => M(); // Any T2 satisfies the constraints from A<>
-}
-```
-
-**2) Invalid**
-```csharp
-class A
-{
- public void M();
-}
-
-// The derived class cannot push up the allows
-// constraint for ByRefLike types.
-class B : A where T2: allows ref struct
-{
- public void N()
- => M(); // A<> may not permit a T2
-}
-```
-
-**3) Valid**
-```csharp
-interface IA
-{
- void M();
-}
-
-ref struct A : IA
-{
- public void M() { }
-}
-
-class B
-{
- // This call is permitted because no boxing is needed
- // to dispatch to the method - it is implemented on A.
- public static void C(T t) where T: IA, allows ref struct
- => t.M();
-}
-```
-
-**4) Invalid**
-```csharp
-interface IA
-{
- public void M() { }
-}
-
-ref struct A : IA
-{
- // Relies on IA::M() implementation.
-}
-
-class B
-{
- // Reliance on a DIM forces the generic parameter
- // to be boxed, which is invalid for ByRefLike types.
- public static void C(T t) where T: IA, allows ref struct
- => t.M();
-}
-```
-
-**5) Valid**
-```csharp
-class A where T1: allows ref struct
-{
-}
-
-class B
-{
- // The type parameter is okay to lack the 'allows'
- // because the field permits non-ByRefLike (default)
- // _and_ ByRefLike types.
- A Field;
-}
-```
-
-**6) Invalid**
-```csharp
-class A
-{
-}
-
-class B where T2: allows ref struct
-{
- // The type parameter can be passed to
- // the field type, but will fail if
- // T2 is a ByRefLike type.
- A Field;
-}
-```
-
-**7) Invalid**
-```csharp
-class A
-{
- virtual void M() where T1: allows ref struct;
-}
-
-class B : A
-{
- // Override methods need to match be at least
- // as restrictive with respect to constraints.
- // If a user has an instance of A, they are
- // not aware they could be calling B.
- override void M();
-}
-```
\ No newline at end of file
diff --git a/docs/design/features/globalization-icu-wasm.md b/docs/design/features/globalization-icu-wasm.md
index ed5c03e88aa20d..956807b30c5cd5 100644
--- a/docs/design/features/globalization-icu-wasm.md
+++ b/docs/design/features/globalization-icu-wasm.md
@@ -28,7 +28,7 @@ Removing specific feature data might result in an exception that starts with `[C
* For prerequisites run `.devcontainer/postCreateCommand.sh` (it is run automatically on creation if using Codespaces)
* Building:
```
- ./build.sh /p:TargetOS=Browser /p:TargetArchitecture=wasm
+ ./build.sh /p:TargetOS=Browser /p:TargetArchitecture=wasm /p:IcuTracing=true
```
Output is located in `artifacts/bin/icu-browser-wasm`.
@@ -45,7 +45,7 @@ Removing specific feature data might result in an exception that starts with `[C
```
* Building:
```bash
- ./build.sh /p:TargetOS=Android /p:TargetArchitecture=x64
+ ./build.sh /p:TargetOS=Android /p:TargetArchitecture=x64 /p:IcuTracing=true
```
Output from both builds will be located in subdirectories of `artifacts/bin`. Copy the generated `.dat` files to your project location and provide the path to it in the `.csproj`, e.g.:
diff --git a/docs/infra/automation.md b/docs/infra/automation.md
index a4ed601cf33e23..6b15e2a9171662 100644
--- a/docs/infra/automation.md
+++ b/docs/infra/automation.md
@@ -1,9 +1,13 @@
## Automation
-### Policy Service Bot
+### Fabric Bot
-This repository uses the Policy Service bot to automate issue and pull request management. All automation rules are defined in the [`.github/policies`](../../.github/policies) folder.
+This repository uses Fabric Bot to automate issue and pull request management. All automation rules are defined in the [`.github/fabricbot.json`](../../.github/fabricbot.json) file.
#### Notifications
-You are welcome to enable notifications for yourself for one or more areas. You will be tagged whenever there are new issues and PR's in the area. You do not need to have commit access for this. To add or remove notifications for yourself, please offer a PR that edits the "mentionees" value for that area in the policy YAML file.
+You are welcome to enable notifications for yourself for one or more areas. You will be tagged whenever there are new issues and PR's in the area. You do not need to have commit access for this. To add or remove notifications for yourself, please offer a PR that edits the "mentionees" value for that area. [Here is an example](https://github.com/dotnet/runtime/commit/c28b13f0cf4e2127a74285b65188413ca7e677d4).
+
+#### Other changes
+
+For any other changes, you will need access to the [`Fabric Bot portal`](https://portal.fabricbot.ms/bot/) which is only available to Microsoft employees at present. Ensure you are signed out from the portal, choose "Import Configuration" option and make changes using the editor. It's necessary to use the portal because there is at present no published JSON schema for the configuration format.
diff --git a/docs/project/list-of-diagnostics.md b/docs/project/list-of-diagnostics.md
index 9ad0f02f5f88bd..4cae3a85a87d8d 100644
--- a/docs/project/list-of-diagnostics.md
+++ b/docs/project/list-of-diagnostics.md
@@ -108,7 +108,6 @@ The PR that reveals the implementation of the ``/``/``
The `corflags.exe` tool that ships with the .NET Framework SDK can show whether a binary is delay-signed or strong-named. For a delay-signed assembly it may show:
diff --git a/docs/workflow/building/coreclr/nativeaot.md b/docs/workflow/building/coreclr/nativeaot.md
index a0f782aad4774b..31783578b9fde0 100644
--- a/docs/workflow/building/coreclr/nativeaot.md
+++ b/docs/workflow/building/coreclr/nativeaot.md
@@ -22,6 +22,8 @@ The Native AOT toolchain can be currently built for Linux (x64/arm64), macOS (x6
The paths to major components can be overridden using `IlcToolsPath`, `IlcSdkPath`, `IlcFrameworkPath`, `IlcFrameworkNativePath` and `IlcMibcPath` properties for `dotnet publish`. For example, `/p:IlcToolsPath=\artifacts\bin\coreclr\windows.x64.Debug\ilc` can be used to override the compiler with a local debug build for troubleshooting or quick iterations.
+The component that writes out object files (objwriter.dll/libobjwriter.so/libobjwriter.dylib) is based on LLVM and doesn't build in the runtime repo. It gets published as a NuGet package out of the [dotnet/llvm-project](https://github.com/dotnet/llvm-project) repo (branch [objwriter/12.x](https://github.com/dotnet/llvm-project/tree/objwriter/12.x)). If you're working on ObjWriter or bringing up a new platform that doesn't have ObjWriter packages yet, as additional pre-requisites you need to build objwriter out of that repo and replace the file in the output.
+
### Building packages
Run `build[.cmd|.sh] -c Release` from the repo root to build the NativeAOT toolchain packages. The build will place the toolchain packages at `artifacts\packages\Release\Shipping`. To publish your project using these packages:
diff --git a/docs/workflow/ci/failure-analysis.md b/docs/workflow/ci/failure-analysis.md
index 58a11c06bdfa4f..57917c841316a8 100644
--- a/docs/workflow/ci/failure-analysis.md
+++ b/docs/workflow/ci/failure-analysis.md
@@ -12,19 +12,6 @@
## Triaging errors seen in CI
-## Summary
-
-**Passing Build Analysis is required to merge into the runtime repo**.
-
-To resolve failures, do the following, in order:
-
-1. Fix the problem if your PR is the cause.
-2. For all failures not in the "Known test errors" section, [try to file a Known Build Error issue](#what-to-do-if-you-determine-the-failure-is-unrelated).
-3. If all else fails, perform a [manual bypass](#bypassing-build-analysis).
-
-
-## Details
-
In case of failure, any PR on the runtime will have a failed GitHub check - PR Build Analysis - which has a summary of all failures, including a list of matching known issues as well as any regressions introduced to the build or the tests. This tab should be your first stop for analyzing the PR failures.
![Build analysis check](analysis-check.png)
@@ -91,7 +78,6 @@ If you have considered all the diagnostic artifacts and determined the failure i
````
It already contains most of the essential information, but *it is very important that you fill out the json blob*.
- - You can now use the [Build Analysis Known Issue Helper](https://helix.dot.net/BuildAnalysis/CreateKnownIssues) to create an issue. It assists in adding the right set of labels, fill the necessary paths in the json blob, and it will validate that it matches the text presented for the issue found in the logs.
- You can add into the `ErrorMessage` field the string that you found uniquely identifies the issue. In case you need to use a regex, use the `ErrorPattern` field instead. This is a limited to a single-line, non-backtracking regex as described [here](https://github.com/dotnet/arcade/blob/main/Documentation/Projects/Build%20Analysis/KnownIssues.md#regex-matching). This regex also needs to be appropriately escaped. Check the [arcade known issues](https://github.com/dotnet/arcade/blob/main/Documentation/Projects/Build%20Analysis/KnownIssues.md#filling-out-known-issues-json-blob) documentation for a good guide on proper regex and JSON escaping.
- The field `ExcludeConsoleLog` describes if the execution logs should be considered on top of the individual test results. **For most cases, this should be set to `true` as the failure will happen within a single test**. Setting it to `false` will mean all failures within an xUnit set of tests will also get attributed to this particular error, since there's one log describing all the problems. Due to limitations in Known Issues around rate limiting and xUnit resiliency, setting `ExcludeConsoleLog=false` is necessary in two scenarios:
+ Nested tests as reported to Azure DevOps. Essentially this means theory failures, which look like this when reported in Azure DevOps: ![xUnit theory seen in azure devops](theory-azdo.png).
@@ -109,16 +95,6 @@ After you do this, if the failure is occurring frequently as per the data captur
There are plenty of intermittent failures that won't manifest again on a retry. Therefore these steps should be followed for every iteration of the PR build, e.g. before retrying/rebuilding.
-### Bypassing build analysis
-
-To unconditionally bypass the build analysis check (turn it green), you can add a comment to your PR with the following text:
-
-```
-/ba-g
-```
-
-For more information, see https://github.com/dotnet/arcade/blob/main/Documentation/Projects/Build%20Analysis/EscapeMechanismforBuildAnalysis.md
-
### Examples of Build Analysis
#### Good usage examples
diff --git a/docs/workflow/ci/triaging-failures.md b/docs/workflow/ci/triaging-failures.md
index bf5e80f7522e6b..1baa5605277475 100644
--- a/docs/workflow/ci/triaging-failures.md
+++ b/docs/workflow/ci/triaging-failures.md
@@ -8,7 +8,7 @@ stress mode test configuration failures, such as failures in a JIT stress test r
One goal of failure investigation is to quickly route failures to the correct area owner. The ownership of various product areas
is detailed [here](../../area-owners.md). The GitHub auto-tagging bot uses the ownership information
-in the file [Policy Service configuration](../../../.github/policies).
+in the file [fabricbot.json](../../../.github/fabricbot.json).
## Platform configuration
diff --git a/docs/workflow/debugging/coreclr/debugging-aot-compilers.md b/docs/workflow/debugging/coreclr/debugging-aot-compilers.md
index 7896e1b8bb5021..341e5489548e1d 100644
--- a/docs/workflow/debugging/coreclr/debugging-aot-compilers.md
+++ b/docs/workflow/debugging/coreclr/debugging-aot-compilers.md
@@ -85,7 +85,7 @@ The object files generated by the ILC compiler contain debug information for met
The ILC compiler typically compiles the whole program - it loosely corresponds to the composite mode of crossgen2. There is a multifile mode, where each managed assembly corresponds to a single object file, but this mode is not shipping.
-The supported object files generated by the ILC compiler are PE/ELF/Mach-O formats.
+The object files generated by the ILC compiler are written out using an LLVM-based object writer (consumed as a NuGet package built out of the dotnet/llvm-project repo, branch objwriter/12.x). The object writer uses the LLVM assembler APIs (APIs meant to be used by tools that convert textual assembly into machine code) to emit object files in PE/ELF/Mach-O formats.
## Example of debugging a test application in Crossgen2
diff --git a/docs/workflow/debugging/coreclr/debugging-runtime.md b/docs/workflow/debugging/coreclr/debugging-runtime.md
index 6edce3c7646e0a..dd92fe93cfaf2e 100644
--- a/docs/workflow/debugging/coreclr/debugging-runtime.md
+++ b/docs/workflow/debugging/coreclr/debugging-runtime.md
@@ -150,7 +150,7 @@ It might also be the case that you would need the latest changes in SOS, or you'
**NOTE**: Only `lldb` is supported to use with SOS. You can also use `gdb`, `cgdb`, or other debuggers, but you might not have access to SOS.
1. Perform a build of the _clr_ subset of the runtime repo.
-2. Start lldb passing `corerun`, the app to run (e.g. `HelloWorld.dll`), and any arguments this app might need: `lldb -- /path/to/corerun /path/to/app.dll `
+2. Start lldb passing `corerun`, the app to run (e.g. `HelloWorld.dll`), and any arguments this app might need: `lldb /path/to/corerun /path/to/app.dll `
3. If you're using the installed version of SOS, you can skip this step. If you built SOS manually, you have to load it before starting the debugging session: `plugin load /path/to/built/sos/libsosplugin.so`. Note that `.so` is for Linux, and `.dylib` is for macOS. You can find more information in the diagnostics repo [private sos build doc](https://github.com/dotnet/diagnostics/blob/main/documentation/using-sos-private-build.md).
4. Launch program: `process launch -s`
5. To stop breaks on _SIGUSR1_ signals used by the runtime run the following command: `process handle -s false SIGUSR1`
diff --git a/docs/workflow/debugging/mono/android-debugging.md b/docs/workflow/debugging/mono/android-debugging.md
index 7e86eb7753242b..918ac1503efa5e 100644
--- a/docs/workflow/debugging/mono/android-debugging.md
+++ b/docs/workflow/debugging/mono/android-debugging.md
@@ -57,27 +57,25 @@ Since you're debugging an optimized release build, it is likely the debugger wil
## Native debugging using a local debug build of Mono
-Ensure the prerequisites are met for [Testing Android](../../testing/libraries/testing-android.md#prerequisites).
+Build the runtime for your android architecture: `ANDROID_NDK_ROOT= ./build.sh --os android --arch x86 -c Debug`. See the instructions for [Testing Android](../../testing/libraries/testing-android.md) for details.
-Build the runtime for your android architecture `` and keep debug symbols in the binary:
-`./build.sh -s mono+libs -os android -arch -c Debug /p:KeepNativeSymbols=true`
-
-In the source code for the C# project, add the following to the .csproj (replacing `` by the appropriate location and `` with the built android architecture):
+In the source code for the C# project, add the following to the .csproj (replacing `` by the appropriate location):
```
-
```
-Then rebuild and reinstall the project, open the apk in Android Studio (File > Profile or Debug APK), and debug.
-
-Note: If debugging in Android Studio stops at signals `SIGPWR` and `SIGXCPU` during startup, configure LLDB to not stop the process for those signals via `process handle -p true -s false -n true SIGPWR` and `process handle -p true -s false -n true SIGXCPU` in Android Studio's LLDB tab.
+Then rebuild and reinstall the project, open the apk in Android Studio, and debug. The
+runtime native libraries will be stripped, so to make use of debug symbols, you
+will need to follow the steps above (rename `*.so.dbg` in the artifacts to
+`*.so.so` and add them to the APK project in Android Studio)
## Native and managed debugging or debugging the managed debugger
diff --git a/docs/workflow/testing/host/testing.md b/docs/workflow/testing/host/testing.md
index bb45307ecf46f3..35c7359c411a9a 100644
--- a/docs/workflow/testing/host/testing.md
+++ b/docs/workflow/testing/host/testing.md
@@ -77,11 +77,6 @@ The `category!=failing` is to respect the [filtering traits](../libraries/filter
### Visual Studio
The [Microsoft.DotNet.CoreSetup.sln](/src/installer/Microsoft.DotNet.CoreSetup.sln) can be used to run and debug host tests through Visual Studio. When using the solution, the product should have already been [built](#building-tests) and the [test context](#test-context) set up.
-If you built the runtime or libraries with a different configuration from the host, you have to specify this when starting visual studio:
-
-```console
-build.cmd -vs Microsoft.DotNet.CoreSetup -rc Release -lc Release
-```
### Preserving test artifacts
diff --git a/eng/CodeAnalysis.src.globalconfig b/eng/CodeAnalysis.src.globalconfig
index 307e910d1628e9..2677ac469e6671 100644
--- a/eng/CodeAnalysis.src.globalconfig
+++ b/eng/CodeAnalysis.src.globalconfig
@@ -274,12 +274,6 @@ dotnet_diagnostic.CA1512.severity = warning
# CA1513: Use ObjectDisposedException throw helper
dotnet_diagnostic.CA1513.severity = warning
-# CA1514: Avoid redundant length argument
-dotnet_diagnostic.CA1514.severity = warning
-
-# CA1515: Consider making public types internal
-dotnet_diagnostic.CA1515.severity = none
-
# CA1700: Do not name enum values 'Reserved'
dotnet_diagnostic.CA1700.severity = none
@@ -498,9 +492,6 @@ dotnet_diagnostic.CA1869.severity = warning
# CA1870: Use a cached 'SearchValues' instance
dotnet_diagnostic.CA1870.severity = warning
-# CA1871: Do not pass a nullable struct to 'ArgumentNullException.ThrowIfNull'
-dotnet_diagnostic.CA1871.severity = warning
-
# CA2000: Dispose objects before losing scope
dotnet_diagnostic.CA2000.severity = none
@@ -688,18 +679,6 @@ dotnet_diagnostic.CA2260.severity = warning
# CA2261: Do not use ConfigureAwaitOptions.SuppressThrowing with Task
dotnet_diagnostic.CA2261.severity = warning
-# CA2262: Set 'MaxResponseHeadersLength' properly
-dotnet_diagnostic.CA2262.severity = warning
-
-# CA2263: Prefer generic overload when type is known
-dotnet_diagnostic.CA2263.severity = info
-
-# CA2264: Do not pass a non-nullable value to 'ArgumentNullException.ThrowIfNull'
-dotnet_diagnostic.CA2264.severity = warning
-
-# CA2265: Do not compare Span to 'null' or 'default'
-dotnet_diagnostic.CA2265.severity = warning
-
# CA2300: Do not use insecure deserializer BinaryFormatter
dotnet_diagnostic.CA2300.severity = none
diff --git a/eng/CodeAnalysis.test.globalconfig b/eng/CodeAnalysis.test.globalconfig
index 3f58c3aab64ea9..79e35931782f56 100644
--- a/eng/CodeAnalysis.test.globalconfig
+++ b/eng/CodeAnalysis.test.globalconfig
@@ -273,12 +273,6 @@ dotnet_diagnostic.CA1512.severity = none
# CA1513: Use ObjectDisposedException throw helper
dotnet_diagnostic.CA1513.severity = none
-# CA1514: Avoid redundant length argument
-dotnet_diagnostic.CA1514.severity = none
-
-# CA1515: Consider making public types internal
-dotnet_diagnostic.CA1515.severity = none
-
# CA1700: Do not name enum values 'Reserved'
dotnet_diagnostic.CA1700.severity = none
@@ -495,9 +489,6 @@ dotnet_diagnostic.CA1869.severity = none
# CA1870: Use a cached 'SearchValues' instance
dotnet_diagnostic.CA1870.severity = none
-# CA1871: Do not pass a nullable struct to 'ArgumentNullException.ThrowIfNull'
-dotnet_diagnostic.CA1871.severity = none
-
# CA2000: Dispose objects before losing scope
dotnet_diagnostic.CA2000.severity = none
@@ -684,18 +675,6 @@ dotnet_diagnostic.CA2260.severity = none
# CA2261: Do not use ConfigureAwaitOptions.SuppressThrowing with Task
dotnet_diagnostic.CA2261.severity = none
-# CA2262: Set 'MaxResponseHeadersLength' properly
-dotnet_diagnostic.CA2262.severity = none
-
-# CA2263: Prefer generic overload when type is known
-dotnet_diagnostic.CA2263.severity = none
-
-# CA2264: Do not pass a non-nullable value to 'ArgumentNullException.ThrowIfNull'
-dotnet_diagnostic.CA2264.severity = none
-
-# CA2265: Do not compare Span to 'null' or 'default'
-dotnet_diagnostic.CA2265.severity = none
-
# CA2300: Do not use insecure deserializer BinaryFormatter
dotnet_diagnostic.CA2300.severity = none
diff --git a/eng/DotNetBuild.props b/eng/DotNetBuild.props
index c5da9ec68e4517..3c68bf354bae17 100644
--- a/eng/DotNetBuild.props
+++ b/eng/DotNetBuild.props
@@ -21,10 +21,6 @@
<_hostArch>$(_hostRid.Substring($(_hostRidPlatformIndex)).TrimStart('-'))
minimal
-
-
- true
@@ -47,7 +43,7 @@
Properties that control flags from the VMR build, and the expected output for the VMR build should be added to this file. -->
$(InnerBuildArgs) $(FlagParameterPrefix)arch $(TargetArch)
$(InnerBuildArgs) $(FlagParameterPrefix)os $(TargetOS)
- $(InnerBuildArgs) $(FlagParameterPrefix)cross
+ $(InnerBuildArgs) $(FlagParameterPrefix)cross
$(InnerBuildArgs) $(FlagParameterPrefix)configuration $(Configuration)
$(InnerBuildArgs) $(FlagParameterPrefix)allconfigurations
$(InnerBuildArgs) $(FlagParameterPrefix)verbosity $(LogVerbosity)
@@ -69,7 +65,6 @@
$(InnerBuildArgs) /p:DotNetBuildRepo=true
- $(InnerBuildArgs) /p:DotNetBuildOrchestrator=true
$(InnerBuildArgs) /p:OfficialBuildId=$(OfficialBuildId)
$(InnerBuildArgs) /p:ContinuousIntegrationBuild=$(ContinuousIntegrationBuild)
$(InnerBuildArgs) /p:PortableBuild=$(PortableBuild)
diff --git a/eng/Publishing.props b/eng/Publishing.props
index 8b796225f8274b..920e79cbbd2f7c 100644
--- a/eng/Publishing.props
+++ b/eng/Publishing.props
@@ -1,7 +1,6 @@
-
+
- true
+ 3
-
-
+
\ No newline at end of file
diff --git a/eng/Subsets.props b/eng/Subsets.props
index cbe9cfc7d73c99..dd284ea6d99776 100644
--- a/eng/Subsets.props
+++ b/eng/Subsets.props
@@ -119,9 +119,7 @@
- <_NativeAotSupportedOS Condition="'$(TargetOS)' == 'windows' or '$(TargetOS)' == 'linux' or '$(TargetOS)' == 'osx' or '$(TargetOS)' == 'maccatalyst' or '$(TargetOS)' == 'iossimulator' or '$(TargetOS)' == 'ios' or '$(TargetOS)' == 'tvossimulator' or '$(TargetOS)' == 'tvos' or '$(TargetOS)' == 'freebsd'">true
- <_NativeAotSupportedArch Condition="'$(TargetArchitecture)' == 'x64' or '$(TargetArchitecture)' == 'arm64' or '$(TargetArchitecture)' == 'arm' or ('$(TargetOS)' == 'windows' and '$(TargetArchitecture)' == 'x86')">true
- true
+ true
true
@@ -257,7 +255,7 @@
-
+
diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
index 26ce2a8139000a..61fe9ade1dbb81 100644
--- a/eng/Version.Details.xml
+++ b/eng/Version.Details.xml
@@ -1,52 +1,84 @@
-
+
https://github.com/dotnet/icu
- 9712d1680642c09dfd46827f5e850b9027ba6086
+ 1b9c03a1103cf622ee2f7850d16aa1095a719e56
-
+
https://github.com/dotnet/msquic
- 6281631a8328ffdbb1b63b231af1aaa803915b23
+ 3fb2583170384341dbbc444cd5bb3d2319433fb6
https://github.com/dotnet/wcf
7f504aabb1988e9a093c1e74d8040bd52feb2f01
-
+
https://github.com/dotnet/emsdk
- 5dd0620274178dd73cac5049e5187c00e07ecf0c
+ 8afd92448d03a80001c9cac5f2acb53b336263a4
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
+
+
+ https://github.com/dotnet/llvm-project
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
+
+
+ https://github.com/dotnet/llvm-project
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
+
+
+ https://github.com/dotnet/llvm-project
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
+
+
+ https://github.com/dotnet/llvm-project
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
+
+
+ https://github.com/dotnet/llvm-project
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
+
+
+ https://github.com/dotnet/llvm-project
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
+
+
+ https://github.com/dotnet/llvm-project
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
+
+
+ https://github.com/dotnet/llvm-project
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
https://github.com/dotnet/command-line-api
@@ -58,307 +90,307 @@
a045dd54a4c44723c215d992288160eb1401bb7f
-
+
https://github.com/dotnet/cecil
- 896cafe8d0683f74a5ff3eff1f92b6b2ed3500f3
+ ca7e93445acbd94bfa696c16fa039f2a6130f2cb
-
+
https://github.com/dotnet/cecil
- 896cafe8d0683f74a5ff3eff1f92b6b2ed3500f3
+ ca7e93445acbd94bfa696c16fa039f2a6130f2cb
-
+
https://github.com/dotnet/emsdk
- 5dd0620274178dd73cac5049e5187c00e07ecf0c
+ 8afd92448d03a80001c9cac5f2acb53b336263a4
-
+
https://github.com/dotnet/emsdk
- 5dd0620274178dd73cac5049e5187c00e07ecf0c
+ 8afd92448d03a80001c9cac5f2acb53b336263a4
-
+
https://github.com/dotnet/source-build-reference-packages
- 768378e775fc5ddc99d41f2c4d1c78182f326ea7
+ 8ee50f75f960fbfb20fce0fefc5a3b05d15b1d21
-
+
https://github.com/dotnet/source-build-externals
- 472629e451a5a87410ea3670606f7235a4dd5a02
+ ddfb60463c966af55fd0e222c2266170e83d1324
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://github.com/dotnet/runtime-assets
- fe7f6de587d1867b71a8e846ffee3b2035afbfe5
+ b5ac2d9031d4b2dc40683b31de86b05a20b670af
-
+
https://github.com/dotnet/runtime-assets
- fe7f6de587d1867b71a8e846ffee3b2035afbfe5
+ b5ac2d9031d4b2dc40683b31de86b05a20b670af
-
+
https://github.com/dotnet/runtime-assets
- fe7f6de587d1867b71a8e846ffee3b2035afbfe5
+ b5ac2d9031d4b2dc40683b31de86b05a20b670af
-
+
https://github.com/dotnet/runtime-assets
- fe7f6de587d1867b71a8e846ffee3b2035afbfe5
+ b5ac2d9031d4b2dc40683b31de86b05a20b670af
-
+
https://github.com/dotnet/runtime-assets
- fe7f6de587d1867b71a8e846ffee3b2035afbfe5
+ b5ac2d9031d4b2dc40683b31de86b05a20b670af
-
+
https://github.com/dotnet/runtime-assets
- fe7f6de587d1867b71a8e846ffee3b2035afbfe5
+ b5ac2d9031d4b2dc40683b31de86b05a20b670af
-
+
https://github.com/dotnet/runtime-assets
- fe7f6de587d1867b71a8e846ffee3b2035afbfe5
+ b5ac2d9031d4b2dc40683b31de86b05a20b670af
-
+
https://github.com/dotnet/runtime-assets
- fe7f6de587d1867b71a8e846ffee3b2035afbfe5
+ b5ac2d9031d4b2dc40683b31de86b05a20b670af
-
+
https://github.com/dotnet/runtime-assets
- fe7f6de587d1867b71a8e846ffee3b2035afbfe5
+ b5ac2d9031d4b2dc40683b31de86b05a20b670af
-
+
https://github.com/dotnet/runtime-assets
- fe7f6de587d1867b71a8e846ffee3b2035afbfe5
+ b5ac2d9031d4b2dc40683b31de86b05a20b670af
-
+
https://github.com/dotnet/runtime-assets
- fe7f6de587d1867b71a8e846ffee3b2035afbfe5
+ b5ac2d9031d4b2dc40683b31de86b05a20b670af
-
+
https://github.com/dotnet/runtime-assets
- fe7f6de587d1867b71a8e846ffee3b2035afbfe5
+ b5ac2d9031d4b2dc40683b31de86b05a20b670af
-
+
https://github.com/dotnet/runtime-assets
- fe7f6de587d1867b71a8e846ffee3b2035afbfe5
+ b5ac2d9031d4b2dc40683b31de86b05a20b670af
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/llvm-project
- 3ac1a8b8d575b4bfe28f3cfb11a32589b6a05eca
+ 9885e5aecc176ca701fc3527877d608bf7ccfb7d
-
+
https://github.com/dotnet/runtime
- 2909fe8a13b361bd83727cef1af9360a0949ad2b
+ 963626276e11bf5587aaed69826b62682b05d9c4
-
+
https://github.com/dotnet/runtime
- 2909fe8a13b361bd83727cef1af9360a0949ad2b
+ 963626276e11bf5587aaed69826b62682b05d9c4
-
+
https://github.com/dotnet/runtime
- 2909fe8a13b361bd83727cef1af9360a0949ad2b
+ 963626276e11bf5587aaed69826b62682b05d9c4
-
+
https://github.com/dotnet/runtime
- 2909fe8a13b361bd83727cef1af9360a0949ad2b
+ 963626276e11bf5587aaed69826b62682b05d9c4
-
+
https://github.com/dotnet/runtime
- 2909fe8a13b361bd83727cef1af9360a0949ad2b
+ 963626276e11bf5587aaed69826b62682b05d9c4
-
+
https://github.com/dotnet/runtime
- 2909fe8a13b361bd83727cef1af9360a0949ad2b
+ 963626276e11bf5587aaed69826b62682b05d9c4
-
+
https://github.com/dotnet/runtime
- 2909fe8a13b361bd83727cef1af9360a0949ad2b
+ 963626276e11bf5587aaed69826b62682b05d9c4
-
+
https://github.com/dotnet/runtime
- 2909fe8a13b361bd83727cef1af9360a0949ad2b
+ 963626276e11bf5587aaed69826b62682b05d9c4
-
+
https://github.com/dotnet/runtime
- 2909fe8a13b361bd83727cef1af9360a0949ad2b
+ 963626276e11bf5587aaed69826b62682b05d9c4
-
+
https://github.com/dotnet/xharness
- 134035492ed8154fc9c5a930a4ca52c422b21afb
+ 11ae3663fe3de366ea3566d7ae9b4731adee2ca3
-
+
https://github.com/dotnet/xharness
- 134035492ed8154fc9c5a930a4ca52c422b21afb
+ 11ae3663fe3de366ea3566d7ae9b4731adee2ca3
-
+
https://github.com/dotnet/xharness
- 134035492ed8154fc9c5a930a4ca52c422b21afb
+ 11ae3663fe3de366ea3566d7ae9b4731adee2ca3
-
+
https://github.com/dotnet/arcade
- 39839f3007d9f3bbabf7a4b6a96ef5dd6be9e5ac
+ c3f5cbfb2829795294f5c2d9fa5a0522f47e91fb
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
- 78a5b978e1965c1335edb4b9a22bc4d6ff5a77a6
+ db9f1c2362565f3ef41c8e8feb5ed49ab11a6459
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
- 78a5b978e1965c1335edb4b9a22bc4d6ff5a77a6
+ db9f1c2362565f3ef41c8e8feb5ed49ab11a6459
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
- 78a5b978e1965c1335edb4b9a22bc4d6ff5a77a6
+ db9f1c2362565f3ef41c8e8feb5ed49ab11a6459
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
- 78a5b978e1965c1335edb4b9a22bc4d6ff5a77a6
+ db9f1c2362565f3ef41c8e8feb5ed49ab11a6459
-
+
https://github.com/dotnet/hotreload-utils
- afafaea82e66771c6c4e874dc9560c06adad4a13
+ 465874b5842702bf69bbb6bacd94a52d8ea2a073
-
+
https://github.com/dotnet/runtime-assets
- fe7f6de587d1867b71a8e846ffee3b2035afbfe5
+ b5ac2d9031d4b2dc40683b31de86b05a20b670af
https://github.com/dotnet/roslyn
@@ -367,18 +399,19 @@
https://github.com/dotnet/roslyn
77372c66fd54927312b5b0a2e399e192f74445c9
+
https://github.com/dotnet/roslyn
77372c66fd54927312b5b0a2e399e192f74445c9
-
+
https://github.com/dotnet/roslyn-analyzers
- 94749ce487be31b74bae5629b5af5d2392377f6d
+ 68c643b4667c6808bd21910ef32f7e2f7bd776c5
-
+
https://github.com/dotnet/roslyn-analyzers
- 94749ce487be31b74bae5629b5af5d2392377f6d
+ 68c643b4667c6808bd21910ef32f7e2f7bd776c5
@@ -386,23 +419,23 @@
77372c66fd54927312b5b0a2e399e192f74445c9
-
+
https://github.com/dotnet/sdk
- 1aed5c88d3f91dd1610cd60b03782a82a6a07e81
+ 0962c1f89f5daf924a9fe876c80e80b0bde63b0d
-
+
https://github.com/dotnet/sdk
- 1aed5c88d3f91dd1610cd60b03782a82a6a07e81
+ 0962c1f89f5daf924a9fe876c80e80b0bde63b0d
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
- 78a5b978e1965c1335edb4b9a22bc4d6ff5a77a6
+ db9f1c2362565f3ef41c8e8feb5ed49ab11a6459
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
- 78a5b978e1965c1335edb4b9a22bc4d6ff5a77a6
+ db9f1c2362565f3ef41c8e8feb5ed49ab11a6459
@@ -410,9 +443,9 @@
https://github.com/NuGet/NuGet.Client
8fef55f5a55a3b4f2c96cd1a9b5ddc51d4b927f8
-
+
https://github.com/dotnet/installer
- e911f5c82cc02aea96e227596e16c830d54cf03a
+ ab44b49a395ac6a524628cdd8c5d99614248c584
diff --git a/eng/Versions.props b/eng/Versions.props
index e32b450af54776..bb1645abaf2b4c 100644
--- a/eng/Versions.props
+++ b/eng/Versions.props
@@ -7,11 +7,11 @@
0
0
9.0.100
- 8.0.2
+ 8.0.0
7.0.$([MSBuild]::Add($([System.Version]::Parse('$(PackageVersionNet8)').Build),14))
6.0.$([MSBuild]::Add($([System.Version]::Parse('$(PackageVersionNet7)').Build),11))
preview
- 3
+ 2
false
release
@@ -34,8 +34,8 @@
- 3.11.0-beta1.24158.2
- 9.0.0-preview.24158.2
+ 3.11.0-beta1.24109.1
+ 9.0.0-preview.24109.1
- 9.0.100-preview.3.24161.5
+ 9.0.100-preview.2.24112.1
- 9.0.0-beta.24161.5
- 9.0.0-beta.24161.5
- 9.0.0-beta.24161.5
- 9.0.0-beta.24161.5
- 2.6.7-beta.24161.5
- 9.0.0-beta.24161.5
- 2.6.7-beta.24161.5
- 9.0.0-beta.24161.5
- 9.0.0-beta.24161.5
- 9.0.0-beta.24161.5
- 9.0.0-beta.24161.5
- 9.0.0-beta.24161.5
- 9.0.0-beta.24161.5
- 9.0.0-beta.24161.5
- 9.0.0-beta.24161.5
- 9.0.0-beta.24161.5
+ 9.0.0-beta.24112.1
+ 9.0.0-beta.24112.1
+ 9.0.0-beta.24112.1
+ 9.0.0-beta.24112.1
+ 2.6.7-beta.24112.1
+ 9.0.0-beta.24112.1
+ 2.6.7-beta.24112.1
+ 9.0.0-beta.24112.1
+ 9.0.0-beta.24112.1
+ 9.0.0-beta.24112.1
+ 9.0.0-beta.24112.1
+ 9.0.0-beta.24112.1
+ 9.0.0-beta.24112.1
+ 9.0.0-beta.24112.1
+ 9.0.0-beta.24112.1
+ 9.0.0-beta.24112.1
1.4.0
6.0.0-preview.1.102
- 9.0.0-preview.3.24161.1
+ 9.0.0-preview.2.24115.1
6.0.0
- 9.0.0-preview.3.24161.1
+ 9.0.0-preview.2.24115.1
+
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
6.0.0
1.1.1
@@ -119,45 +128,45 @@
8.0.0
5.0.0
4.5.5
- 9.0.0-preview.3.24161.1
- 9.0.0-preview.3.24161.1
+ 9.0.0-preview.2.24115.1
+ 9.0.0-preview.2.24115.1
6.0.0
5.0.0
5.0.0
5.0.0
7.0.0
- 9.0.0-preview.3.24161.1
+ 9.0.0-preview.2.24115.1
6.0.0
7.0.0
4.5.4
4.5.0
- 9.0.0-preview.3.24161.1
+ 9.0.0-preview.2.24115.1
8.0.0
8.0.0
8.0.0
- 9.0.0-beta.24161.1
- 9.0.0-beta.24161.1
- 9.0.0-beta.24161.1
- 9.0.0-beta.24161.1
- 9.0.0-beta.24161.1
- 9.0.0-beta.24161.1
- 9.0.0-beta.24161.1
- 9.0.0-beta.24161.1
- 9.0.0-beta.24161.1
- 9.0.0-beta.24161.1
- 9.0.0-beta.24161.1
- 9.0.0-beta.24161.1
- 9.0.0-beta.24161.1
- 9.0.0-beta.24161.1
+ 9.0.0-beta.24108.2
+ 9.0.0-beta.24108.2
+ 9.0.0-beta.24108.2
+ 9.0.0-beta.24108.2
+ 9.0.0-beta.24108.2
+ 9.0.0-beta.24108.2
+ 9.0.0-beta.24108.2
+ 9.0.0-beta.24108.2
+ 9.0.0-beta.24108.2
+ 9.0.0-beta.24108.2
+ 9.0.0-beta.24108.2
+ 9.0.0-beta.24108.2
+ 9.0.0-beta.24108.2
+ 9.0.0-beta.24108.2
- 1.0.0-prerelease.24106.4
- 1.0.0-prerelease.24106.4
- 1.0.0-prerelease.24106.4
- 1.0.0-prerelease.24106.4
- 1.0.0-prerelease.24106.4
- 1.0.0-prerelease.24106.4
+ 1.0.0-prerelease.24104.2
+ 1.0.0-prerelease.24104.2
+ 1.0.0-prerelease.24104.2
+ 1.0.0-prerelease.24104.2
+ 1.0.0-prerelease.24104.2
+ 1.0.0-prerelease.24104.2
2.0.0
17.8.0-beta1.23475.2
@@ -178,10 +187,10 @@
1.4.0
17.4.0-preview-20220707-01
- 9.0.0-prerelease.24161.1
- 9.0.0-prerelease.24161.1
- 9.0.0-prerelease.24161.1
- 9.0.0-alpha.0.24161.1
+ 9.0.0-prerelease.24112.4
+ 9.0.0-prerelease.24112.4
+ 9.0.0-prerelease.24112.4
+ 9.0.0-alpha.0.24112.1
3.12.0
4.5.0
6.0.0
@@ -207,53 +216,53 @@
8.0.0-preview-20230918.1
- 0.11.4-alpha.24161.3
+ 0.11.4-alpha.24112.1
- 9.0.0-preview.3.24161.1
+ 9.0.0-preview.2.24115.1
- 9.0.0-preview.3.24155.1
+ 9.0.0-preview.2.24112.1
- 2.3.5
- 9.0.0-alpha.1.24162.1
+ 2.2.3
+ 9.0.0-alpha.1.24067.1
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
- 9.0.0-preview.3.24160.1
+ 9.0.0-preview.2.24114.5
$(MicrosoftNETWorkloadEmscriptenCurrentManifest90100TransportVersion)
- 9.0.0-preview.3.24160.1
+ 9.0.0-preview.2.24114.5
1.1.87-gba258badda
1.0.0-v3.14.0.5722
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
- 16.0.5-alpha.1.24154.3
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
+ 16.0.5-alpha.1.24112.1
3.1.7
1.0.406601
- 9.0.100-preview.3.24161.2
+ 9.0.100-preview.2.24112.1
$(MicrosoftDotnetSdkInternalVersion)
diff --git a/eng/build-analysis-configuration.json b/eng/build-analysis-configuration.json
index d647594ab402ae..7f532220c1001f 100644
--- a/eng/build-analysis-configuration.json
+++ b/eng/build-analysis-configuration.json
@@ -3,10 +3,6 @@
{
"PipelineId": 129,
"PipelineName": "runtime"
- },
- {
- "PipelineId": 133,
- "PipelineName": "runtime-dev-innerloop"
}
]
}
diff --git a/eng/build.ps1 b/eng/build.ps1
index 474e55e40f2e58..db18267f33e1c5 100644
--- a/eng/build.ps1
+++ b/eng/build.ps1
@@ -261,12 +261,6 @@ if ($vs) {
$env:RUNTIMECONFIGURATION=$runtimeConfiguration
}
- if ($librariesConfiguration)
- {
- # Respect the LibrariesConfiguration variable for building inside VS with different libraries configurations
- $env:LIBRARIESCONFIGURATION=$librariesConfiguration
- }
-
# Respect the RuntimeFlavor variable for building inside VS with a different CoreLib and runtime
if ($runtimeFlavor)
{
@@ -331,9 +325,6 @@ if ($env:TreatWarningsAsErrors -eq 'false') {
$arguments += " -warnAsError 0"
}
-# disable terminal logger for now: https://github.com/dotnet/runtime/issues/97211
-$arguments += " /tl:false"
-
# Disable targeting pack caching as we reference a partially constructed targeting pack and update it later.
# The later changes are ignored when using the cache.
$env:DOTNETSDK_ALLOW_TARGETING_PACK_CACHING=0
diff --git a/eng/build.sh b/eng/build.sh
index 75fe2cdc39c5d0..67f3cfeea47278 100755
--- a/eng/build.sh
+++ b/eng/build.sh
@@ -553,9 +553,6 @@ if [[ "${TreatWarningsAsErrors:-}" == "false" ]]; then
arguments="$arguments -warnAsError 0"
fi
-# disable terminal logger for now: https://github.com/dotnet/runtime/issues/97211
-arguments="$arguments -tl:false"
-
initDistroRid "$os" "$arch" "$crossBuild"
# Disable targeting pack caching as we reference a partially constructed targeting pack and update it later.
diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1
index efa2fd72bfaa22..6c65e81925f2a3 100644
--- a/eng/common/SetupNugetSources.ps1
+++ b/eng/common/SetupNugetSources.ps1
@@ -35,7 +35,7 @@ Set-StrictMode -Version 2.0
. $PSScriptRoot\tools.ps1
# Add source entry to PackageSources
-function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $pwd) {
+function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $Password) {
$packageSource = $sources.SelectSingleNode("add[@key='$SourceName']")
if ($packageSource -eq $null)
@@ -48,11 +48,12 @@ function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Usern
else {
Write-Host "Package source $SourceName already present."
}
- AddCredential -Creds $creds -Source $SourceName -Username $Username -pwd $pwd
+
+ AddCredential -Creds $creds -Source $SourceName -Username $Username -Password $Password
}
# Add a credential node for the specified source
-function AddCredential($creds, $source, $username, $pwd) {
+function AddCredential($creds, $source, $username, $password) {
# Looks for credential configuration for the given SourceName. Create it if none is found.
$sourceElement = $creds.SelectSingleNode($Source)
if ($sourceElement -eq $null)
@@ -81,18 +82,17 @@ function AddCredential($creds, $source, $username, $pwd) {
$passwordElement.SetAttribute("key", "ClearTextPassword")
$sourceElement.AppendChild($passwordElement) | Out-Null
}
-
- $passwordElement.SetAttribute("value", $pwd)
+ $passwordElement.SetAttribute("value", $Password)
}
-function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $pwd) {
+function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $Password) {
$maestroPrivateSources = $Sources.SelectNodes("add[contains(@key,'darc-int')]")
Write-Host "Inserting credentials for $($maestroPrivateSources.Count) Maestro's private feeds."
ForEach ($PackageSource in $maestroPrivateSources) {
Write-Host "`tInserting credential for Maestro's feed:" $PackageSource.Key
- AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -pwd $pwd
+ AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -Password $Password
}
}
@@ -144,13 +144,13 @@ if ($disabledSources -ne $null) {
$userName = "dn-bot"
# Insert credential nodes for Maestro's private feeds
-InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -pwd $Password
+InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -Password $Password
# 3.1 uses a different feed url format so it's handled differently here
$dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']")
if ($dotnet31Source -ne $null) {
- AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password
- AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password
+ AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
+ AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
}
$dotnetVersions = @('5','6','7','8')
@@ -159,9 +159,9 @@ foreach ($dotnetVersion in $dotnetVersions) {
$feedPrefix = "dotnet" + $dotnetVersion;
$dotnetSource = $sources.SelectSingleNode("add[@key='$feedPrefix']")
if ($dotnetSource -ne $null) {
- AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password
- AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password
+ AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
+ AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
}
}
-$doc.Save($filename)
\ No newline at end of file
+$doc.Save($filename)
diff --git a/eng/common/build.ps1 b/eng/common/build.ps1
index 83e6d82e027a82..510458eb35b84b 100644
--- a/eng/common/build.ps1
+++ b/eng/common/build.ps1
@@ -125,6 +125,7 @@ function Build {
/p:Test=$test `
/p:Pack=$pack `
/p:DotNetBuildRepo=$($productBuild -or $verticalBuild) `
+ /p:ArcadeBuildVertical=$verticalBuild `
/p:IntegrationTest=$integrationTest `
/p:PerformanceTest=$performanceTest `
/p:Sign=$sign `
diff --git a/eng/common/build.sh b/eng/common/build.sh
index d82ebf7428080f..bec7d02594f668 100755
--- a/eng/common/build.sh
+++ b/eng/common/build.sh
@@ -241,6 +241,7 @@ function Build {
/p:DotNetBuildRepo=$product_build \
/p:ArcadeBuildFromSource=$source_build \
/p:DotNetBuildSourceOnly=$source_build \
+ /p:ArcadeBuildVertical=$vertical_build \
/p:Rebuild=$rebuild \
/p:Test=$test \
/p:Pack=$pack \
diff --git a/eng/common/native/init-distro-rid.sh b/eng/common/native/init-distro-rid.sh
index 5dcbfd700f0362..de1687b2ccbe79 100644
--- a/eng/common/native/init-distro-rid.sh
+++ b/eng/common/native/init-distro-rid.sh
@@ -1,4 +1,4 @@
-#!/bin/sh
+#!/usr/bin/env bash
# getNonPortableDistroRid
#
@@ -11,20 +11,21 @@
# non-portable rid
getNonPortableDistroRid()
{
- targetOs="$1"
- targetArch="$2"
- rootfsDir="$3"
- nonPortableRid=""
+ local targetOs="$1"
+ local targetArch="$2"
+ local rootfsDir="$3"
+ local nonPortableRid=""
if [ "$targetOs" = "linux" ]; then
- # shellcheck disable=SC1091
if [ -e "${rootfsDir}/etc/os-release" ]; then
- . "${rootfsDir}/etc/os-release"
- if [ "${ID}" = "rhel" ] || [ "${ID}" = "rocky" ] || [ "${ID}" = "alpine" ]; then
- VERSION_ID="${VERSION_ID%.*}" # Remove the last version digit for these distros
+ source "${rootfsDir}/etc/os-release"
+
+ if [[ "${ID}" == "rhel" || "${ID}" == "rocky" || "${ID}" == "alpine" ]]; then
+ # remove the last version digit
+ VERSION_ID="${VERSION_ID%.*}"
fi
- if echo "${VERSION_ID:-}" | grep -qE '^([[:digit:]]|\.)+$'; then
+ if [[ "${VERSION_ID:-}" =~ ^([[:digit:]]|\.)+$ ]]; then
nonPortableRid="${ID}.${VERSION_ID}-${targetArch}"
else
# Rolling release distros either do not set VERSION_ID, set it as blank or
@@ -32,45 +33,45 @@ getNonPortableDistroRid()
# so omit it here to be consistent with everything else.
nonPortableRid="${ID}-${targetArch}"
fi
+
elif [ -e "${rootfsDir}/android_platform" ]; then
- # shellcheck disable=SC1091
- . "${rootfsDir}/android_platform"
+ source "$rootfsDir"/android_platform
nonPortableRid="$RID"
fi
fi
if [ "$targetOs" = "freebsd" ]; then
- # $rootfsDir can be empty. freebsd-version is a shell script and should always work.
- __freebsd_major_version=$("$rootfsDir"/bin/freebsd-version | cut -d'.' -f1)
+ # $rootfsDir can be empty. freebsd-version is shell script and it should always work.
+ __freebsd_major_version=$($rootfsDir/bin/freebsd-version | { read v; echo "${v%%.*}"; })
nonPortableRid="freebsd.$__freebsd_major_version-${targetArch}"
- elif command -v getprop >/dev/null && getprop ro.product.system.model | grep -qi android; then
+ elif command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then
__android_sdk_version=$(getprop ro.build.version.sdk)
nonPortableRid="android.$__android_sdk_version-${targetArch}"
elif [ "$targetOs" = "illumos" ]; then
__uname_version=$(uname -v)
case "$__uname_version" in
omnios-*)
- __omnios_major_version=$(echo "$__uname_version" | cut -c9-10)
- nonPortableRid="omnios.$__omnios_major_version-${targetArch}"
- ;;
+ __omnios_major_version=$(echo "${__uname_version:8:2}")
+ nonPortableRid=omnios."$__omnios_major_version"-"$targetArch"
+ ;;
joyent_*)
- __smartos_major_version=$(echo "$__uname_version" | cut -c9-10)
- nonPortableRid="smartos.$__smartos_major_version-${targetArch}"
- ;;
- *)
- nonPortableRid="illumos-${targetArch}"
- ;;
+ __smartos_major_version=$(echo "${__uname_version:7:4}")
+ nonPortableRid=smartos."$__smartos_major_version"-"$targetArch"
+ ;;
+ illumos_*)
+ nonPortableRid=openindiana-"$targetArch"
+ ;;
esac
elif [ "$targetOs" = "solaris" ]; then
__uname_version=$(uname -v)
- __solaris_major_version=$(echo "$__uname_version" | cut -d'.' -f1)
- nonPortableRid="solaris.$__solaris_major_version-${targetArch}"
+ __solaris_major_version=$(echo "${__uname_version%.*}")
+ nonPortableRid=solaris."$__solaris_major_version"-"$targetArch"
elif [ "$targetOs" = "haiku" ]; then
- __uname_release="$(uname -r)"
+ __uname_release=$(uname -r)
nonPortableRid=haiku.r"$__uname_release"-"$targetArch"
fi
- echo "$nonPortableRid" | tr '[:upper:]' '[:lower:]'
+ echo "$(echo $nonPortableRid | tr '[:upper:]' '[:lower:]')"
}
# initDistroRidGlobal
@@ -84,23 +85,26 @@ getNonPortableDistroRid()
# None
#
# Notes:
-# It is important to note that the function does not return anything, but it
-# exports the following variables on success:
-# __DistroRid : Non-portable rid of the target platform.
-# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform.
+#
+# It is important to note that the function does not return anything, but it
+# exports the following variables on success:
+#
+# __DistroRid : Non-portable rid of the target platform.
+# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform.
+#
initDistroRidGlobal()
{
- targetOs="$1"
- targetArch="$2"
- rootfsDir=""
- if [ $# -ge 3 ]; then
+ local targetOs="$1"
+ local targetArch="$2"
+ local rootfsDir=""
+ if [ "$#" -ge 3 ]; then
rootfsDir="$3"
fi
if [ -n "${rootfsDir}" ]; then
# We may have a cross build. Check for the existence of the rootfsDir
if [ ! -e "${rootfsDir}" ]; then
- echo "Error: rootfsDir has been passed, but the location is not valid."
+ echo "Error rootfsDir has been passed, but the location is not valid."
exit 1
fi
fi
@@ -115,7 +119,7 @@ initDistroRidGlobal()
STRINGS="$(command -v llvm-strings || true)"
fi
- # Check for musl-based distros (e.g. Alpine Linux, Void Linux).
+ # Check for musl-based distros (e.g Alpine Linux, Void Linux).
if "${rootfsDir}/usr/bin/ldd" --version 2>&1 | grep -q musl ||
( [ -n "$STRINGS" ] && "$STRINGS" "${rootfsDir}/usr/bin/ldd" 2>&1 | grep -q musl ); then
__PortableTargetOS="linux-musl"
diff --git a/eng/common/native/init-os-and-arch.sh b/eng/common/native/init-os-and-arch.sh
index 38921d4338f744..caa448ff030070 100644
--- a/eng/common/native/init-os-and-arch.sh
+++ b/eng/common/native/init-os-and-arch.sh
@@ -1,4 +1,4 @@
-#!/bin/sh
+#!/usr/bin/env bash
# Use uname to determine what the OS is.
OSName=$(uname -s | tr '[:upper:]' '[:lower:]')
@@ -54,7 +54,6 @@ case "$CPUName" in
;;
armv7l|armv8l)
- # shellcheck disable=SC1091
if (NAME=""; . /etc/os-release; test "$NAME" = "Tizen"); then
arch=armel
else
diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml
deleted file mode 100644
index dfc3c0cf07ceae..00000000000000
--- a/eng/common/templates-official/job/job.yml
+++ /dev/null
@@ -1,263 +0,0 @@
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
-parameters:
-# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- cancelTimeoutInMinutes: ''
- condition: ''
- container: ''
- continueOnError: false
- dependsOn: ''
- displayName: ''
- pool: ''
- steps: []
- strategy: ''
- timeoutInMinutes: ''
- variables: []
- workspace: ''
- templateContext: ''
-
-# Job base template specific parameters
- # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
- artifacts: ''
- enableMicrobuild: false
- enablePublishBuildArtifacts: false
- enablePublishBuildAssets: false
- enablePublishTestResults: false
- enablePublishUsingPipelines: false
- enableBuildRetry: false
- disableComponentGovernance: ''
- componentGovernanceIgnoreDirectories: ''
- mergeTestResults: false
- testRunTitle: ''
- testResultsFormat: ''
- name: ''
- preSteps: []
- runAsPublic: false
-# Sbom related params
- enableSbom: true
- PackageVersion: 7.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
-
-jobs:
-- job: ${{ parameters.name }}
-
- ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
- cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
-
- ${{ if ne(parameters.condition, '') }}:
- condition: ${{ parameters.condition }}
-
- ${{ if ne(parameters.container, '') }}:
- container: ${{ parameters.container }}
-
- ${{ if ne(parameters.continueOnError, '') }}:
- continueOnError: ${{ parameters.continueOnError }}
-
- ${{ if ne(parameters.dependsOn, '') }}:
- dependsOn: ${{ parameters.dependsOn }}
-
- ${{ if ne(parameters.displayName, '') }}:
- displayName: ${{ parameters.displayName }}
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
-
- ${{ if ne(parameters.strategy, '') }}:
- strategy: ${{ parameters.strategy }}
-
- ${{ if ne(parameters.timeoutInMinutes, '') }}:
- timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
-
- ${{ if ne(parameters.templateContext, '') }}:
- templateContext: ${{ parameters.templateContext }}
-
- variables:
- - ${{ if ne(parameters.enableTelemetry, 'false') }}:
- - name: DOTNET_CLI_TELEMETRY_PROFILE
- value: '$(Build.Repository.Uri)'
- - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
- - name: EnableRichCodeNavigation
- value: 'true'
- # Retry signature validation up to three times, waiting 2 seconds between attempts.
- # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
- - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
- value: 3,2000
- - ${{ each variable in parameters.variables }}:
- # handle name-value variable syntax
- # example:
- # - name: [key]
- # value: [value]
- - ${{ if ne(variable.name, '') }}:
- - name: ${{ variable.name }}
- value: ${{ variable.value }}
-
- # handle variable groups
- - ${{ if ne(variable.group, '') }}:
- - group: ${{ variable.group }}
-
- # handle template variable syntax
- # example:
- # - template: path/to/template.yml
- # parameters:
- # [key]: [value]
- - ${{ if ne(variable.template, '') }}:
- - template: ${{ variable.template }}
- ${{ if ne(variable.parameters, '') }}:
- parameters: ${{ variable.parameters }}
-
- # handle key-value variable syntax.
- # example:
- # - [key]: [value]
- - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
- - ${{ each pair in variable }}:
- - name: ${{ pair.key }}
- value: ${{ pair.value }}
-
- # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
- - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: DotNet-HelixApi-Access
-
- ${{ if ne(parameters.workspace, '') }}:
- workspace: ${{ parameters.workspace }}
-
- steps:
- - ${{ if ne(parameters.preSteps, '') }}:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - task: MicroBuildSigningPlugin@3
- displayName: Install MicroBuild plugin
- inputs:
- signType: $(_SignType)
- zipSources: false
- feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
- env:
- TeamName: $(_TeamName)
- continueOnError: ${{ parameters.continueOnError }}
- condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
- - task: NuGetAuthenticate@1
-
- - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
- - task: DownloadPipelineArtifact@2
- inputs:
- buildType: current
- artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
- targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
- itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
-
- - ${{ each step in parameters.steps }}:
- - ${{ step }}
-
- - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
- - task: RichCodeNavIndexer@0
- displayName: RichCodeNav Upload
- inputs:
- languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
- environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }}
- richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
- uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
- continueOnError: true
-
- - template: /eng/common/templates-official/steps/component-governance.yml
- parameters:
- ${{ if eq(parameters.disableComponentGovernance, '') }}:
- ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
- disableComponentGovernance: false
- ${{ else }}:
- disableComponentGovernance: true
- ${{ else }}:
- disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
- componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: MicroBuildCleanup@1
- displayName: Execute Microbuild cleanup tasks
- condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- env:
- TeamName: $(_TeamName)
-
- - ${{ if ne(parameters.artifacts.publish, '') }}:
- - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
- - task: CopyFiles@2
- displayName: Gather binaries for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/bin'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
- - task: CopyFiles@2
- displayName: Gather packages for publish to artifacts
- inputs:
- SourceFolder: 'artifacts/packages'
- Contents: '**'
- TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish pipeline artifacts
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
- continueOnError: true
- condition: always()
- - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- - task: 1ES.PublishPipelineArtifact@1
- inputs:
- targetPath: 'artifacts/log'
- artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
- displayName: 'Publish logs'
- continueOnError: true
- condition: always()
-
- - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
- PublishLocation: Container
- ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
- continueOnError: true
- condition: always()
-
- - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
- - task: PublishTestResults@2
- displayName: Publish XUnit Test Results
- inputs:
- testResultsFormat: 'xUnit'
- testResultsFiles: '*.xml'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
- testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
- mergeTestResults: ${{ parameters.mergeTestResults }}
- continueOnError: true
- condition: always()
- - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
- - task: PublishTestResults@2
- displayName: Publish TRX Test Results
- inputs:
- testResultsFormat: 'VSTest'
- testResultsFiles: '*.trx'
- searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
- testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
- mergeTestResults: ${{ parameters.mergeTestResults }}
- continueOnError: true
- condition: always()
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
- - template: /eng/common/templates-official/steps/generate-sbom.yml
- parameters:
- PackageVersion: ${{ parameters.packageVersion}}
- BuildDropPath: ${{ parameters.buildDropPath }}
- IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
-
- - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
- - task: 1ES.PublishPipelineArtifact@1
- inputs:
- targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration'
- artifactName: 'BuildConfiguration'
- displayName: 'Publish build retry configuration'
- continueOnError: true
\ No newline at end of file
diff --git a/eng/common/templates-official/job/onelocbuild.yml b/eng/common/templates-official/job/onelocbuild.yml
deleted file mode 100644
index ba9ba49303292a..00000000000000
--- a/eng/common/templates-official/job/onelocbuild.yml
+++ /dev/null
@@ -1,112 +0,0 @@
-parameters:
- # Optional: dependencies of the job
- dependsOn: ''
-
- # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
- pool: ''
-
- CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
- GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
-
- SourcesDirectory: $(Build.SourcesDirectory)
- CreatePr: true
- AutoCompletePr: false
- ReusePr: true
- UseLfLineEndings: true
- UseCheckedInLocProjectJson: false
- SkipLocProjectJsonGeneration: false
- LanguageSet: VS_Main_Languages
- LclSource: lclFilesInRepo
- LclPackageId: ''
- RepoType: gitHub
- GitHubOrg: dotnet
- MirrorRepo: ''
- MirrorBranch: main
- condition: ''
- JobNameSuffix: ''
-
-jobs:
-- job: OneLocBuild${{ parameters.JobNameSuffix }}
-
- dependsOn: ${{ parameters.dependsOn }}
-
- displayName: OneLocBuild${{ parameters.JobNameSuffix }}
-
- variables:
- - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
- - name: _GenerateLocProjectArguments
- value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
- -LanguageSet "${{ parameters.LanguageSet }}"
- -CreateNeutralXlfs
- - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
- - name: _GenerateLocProjectArguments
- value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
- - template: /eng/common/templates-official/variables/pool-providers.yml
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
- ${{ if eq(parameters.pool, '') }}:
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022-pt
- os: windows
-
- steps:
- - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
- - task: Powershell@2
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
- arguments: $(_GenerateLocProjectArguments)
- displayName: Generate LocProject.json
- condition: ${{ parameters.condition }}
-
- - task: OneLocBuild@2
- displayName: OneLocBuild
- env:
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- inputs:
- locProj: eng/Localize/LocProject.json
- outDir: $(Build.ArtifactStagingDirectory)
- lclSource: ${{ parameters.LclSource }}
- lclPackageId: ${{ parameters.LclPackageId }}
- isCreatePrSelected: ${{ parameters.CreatePr }}
- isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
- ${{ if eq(parameters.CreatePr, true) }}:
- isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- isShouldReusePrSelected: ${{ parameters.ReusePr }}
- packageSourceAuth: patAuth
- patVariable: ${{ parameters.CeapexPat }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- repoType: ${{ parameters.RepoType }}
- gitHubPatVariable: "${{ parameters.GithubPat }}"
- ${{ if ne(parameters.MirrorRepo, '') }}:
- isMirrorRepoSelected: true
- gitHubOrganization: ${{ parameters.GitHubOrg }}
- mirrorRepo: ${{ parameters.MirrorRepo }}
- mirrorBranch: ${{ parameters.MirrorBranch }}
- condition: ${{ parameters.condition }}
-
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish Localization Files
- inputs:
- PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc'
- PublishLocation: Container
- ArtifactName: Loc
- condition: ${{ parameters.condition }}
-
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish LocProject.json
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/'
- PublishLocation: Container
- ArtifactName: Loc
- condition: ${{ parameters.condition }}
\ No newline at end of file
diff --git a/eng/common/templates-official/job/publish-build-assets.yml b/eng/common/templates-official/job/publish-build-assets.yml
deleted file mode 100644
index 5f54135569b290..00000000000000
--- a/eng/common/templates-official/job/publish-build-assets.yml
+++ /dev/null
@@ -1,157 +0,0 @@
-parameters:
- configuration: 'Debug'
-
- # Optional: condition for the job to run
- condition: ''
-
- # Optional: 'true' if future jobs should run even if this job fails
- continueOnError: false
-
- # Optional: dependencies of the job
- dependsOn: ''
-
- # Optional: Include PublishBuildArtifacts task
- enablePublishBuildArtifacts: false
-
- # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
- pool: {}
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishUsingPipelines: false
-
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishAssetsImmediately: false
-
- artifactsPublishingAdditionalParameters: ''
-
- signingValidationAdditionalParameters: ''
-
-jobs:
-- job: Asset_Registry_Publish
-
- dependsOn: ${{ parameters.dependsOn }}
- timeoutInMinutes: 150
-
- ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- displayName: Publish Assets
- ${{ else }}:
- displayName: Publish to Build Asset Registry
-
- variables:
- - template: /eng/common/templates-official/variables/pool-providers.yml
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: Publish-Build-Assets
- - group: AzureDevOps-Artifact-Feeds-Pats
- - name: runCodesignValidationInjection
- value: false
- # unconditional - needed for logs publishing (redactor tool version)
- - template: /eng/common/templates-official/post-build/common-variables.yml
-
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022-pt
- os: windows
- steps:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - checkout: self
- fetchDepth: 3
- clean: true
-
- - task: DownloadBuildArtifacts@0
- displayName: Download artifact
- inputs:
- artifactName: AssetManifests
- downloadPath: '$(Build.StagingDirectory)/Download'
- checkDownloadedFiles: true
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
-
- - task: NuGetAuthenticate@1
-
- - task: PowerShell@2
- displayName: Publish Build Assets
- inputs:
- filePath: eng\common\sdk-task.ps1
- arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
- /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
- /p:BuildAssetRegistryToken=$(MaestroAccessToken)
- /p:MaestroApiEndpoint=https://maestro.dot.net
- /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
- /p:OfficialBuildId=$(Build.BuildNumber)
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
-
- - task: powershell@2
- displayName: Create ReleaseConfigs Artifact
- inputs:
- targetType: inline
- script: |
- Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId)
- Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)"
- Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild)
-
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish ReleaseConfigs Artifact
- inputs:
- PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt'
- PublishLocation: Container
- ArtifactName: ReleaseConfigs
-
- - task: powershell@2
- displayName: Check if SymbolPublishingExclusionsFile.txt exists
- inputs:
- targetType: inline
- script: |
- $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
- if(Test-Path -Path $symbolExclusionfile)
- {
- Write-Host "SymbolExclusionFile exists"
- Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
- }
- else{
- Write-Host "Symbols Exclusion file does not exists"
- Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
- }
-
- - task: 1ES.PublishBuildArtifacts@1
- displayName: Publish SymbolPublishingExclusionsFile Artifact
- condition: eq(variables['SymbolExclusionFile'], 'true')
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
- PublishLocation: Container
- ArtifactName: ReleaseConfigs
-
- - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
- - template: /eng/common/templates-official/post-build/setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: PowerShell@2
- displayName: Publish Using Darc
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
- -PublishingInfraVersion 3
- -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
- -MaestroToken '$(MaestroApiAccessToken)'
- -WaitPublishingFinish true
- -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
- -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
-
- - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- - template: /eng/common/templates-official/steps/publish-logs.yml
- parameters:
- JobLabel: 'Publish_Artifacts_Logs'
diff --git a/eng/common/templates-official/job/source-build.yml b/eng/common/templates-official/job/source-build.yml
deleted file mode 100644
index 50f04e642a3543..00000000000000
--- a/eng/common/templates-official/job/source-build.yml
+++ /dev/null
@@ -1,67 +0,0 @@
-parameters:
- # This template adds arcade-powered source-build to CI. The template produces a server job with a
- # default ID 'Source_Build_Complete' to put in a dependency list if necessary.
-
- # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
- jobNamePrefix: 'Source_Build'
-
- # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
- # managed-only repositories. This is an object with these properties:
- #
- # name: ''
- # The name of the job. This is included in the job ID.
- # targetRID: ''
- # The name of the target RID to use, instead of the one auto-detected by Arcade.
- # nonPortable: false
- # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
- # linux-x64), and compiling against distro-provided packages rather than portable ones.
- # skipPublishValidation: false
- # Disables publishing validation. By default, a check is performed to ensure no packages are
- # published by source-build.
- # container: ''
- # A container to use. Runs in docker.
- # pool: {}
- # A pool to use. Runs directly on an agent.
- # buildScript: ''
- # Specifies the build script to invoke to perform the build in the repo. The default
- # './build.sh' should work for typical Arcade repositories, but this is customizable for
- # difficult situations.
- # jobProperties: {}
- # A list of job properties to inject at the top level, for potential extensibility beyond
- # container and pool.
- platform: {}
-
-jobs:
-- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
- displayName: Source-Build (${{ parameters.platform.name }})
-
- ${{ each property in parameters.platform.jobProperties }}:
- ${{ property.key }}: ${{ property.value }}
-
- ${{ if ne(parameters.platform.container, '') }}:
- container: ${{ parameters.platform.container }}
-
- ${{ if eq(parameters.platform.pool, '') }}:
- # The default VM host AzDO pool. This should be capable of running Docker containers: almost all
- # source-build builds run in Docker, including the default managed platform.
- # /eng/common/templates-official/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
- pool:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
- demands: ImageOverride -equals build.ubuntu.1804.amd64
-
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
- image: 1es-mariner-2-pt
- os: linux
-
- ${{ if ne(parameters.platform.pool, '') }}:
- pool: ${{ parameters.platform.pool }}
-
- workspace:
- clean: all
-
- steps:
- - template: /eng/common/templates-official/steps/source-build.yml
- parameters:
- platform: ${{ parameters.platform }}
diff --git a/eng/common/templates-official/job/source-index-stage1.yml b/eng/common/templates-official/job/source-index-stage1.yml
deleted file mode 100644
index 53a9ef51fd82d2..00000000000000
--- a/eng/common/templates-official/job/source-index-stage1.yml
+++ /dev/null
@@ -1,67 +0,0 @@
-parameters:
- runAsPublic: false
- sourceIndexPackageVersion: 1.0.1-20240129.2
- sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
- sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
- preSteps: []
- binlogPath: artifacts/log/Debug/Build.binlog
- condition: ''
- dependsOn: ''
- pool: ''
-
-jobs:
-- job: SourceIndexStage1
- dependsOn: ${{ parameters.dependsOn }}
- condition: ${{ parameters.condition }}
- variables:
- - name: SourceIndexPackageVersion
- value: ${{ parameters.sourceIndexPackageVersion }}
- - name: SourceIndexPackageSource
- value: ${{ parameters.sourceIndexPackageSource }}
- - name: BinlogPath
- value: ${{ parameters.binlogPath }}
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: source-dot-net stage1 variables
- - template: /eng/common/templates-official/variables/pool-providers.yml
-
- ${{ if ne(parameters.pool, '') }}:
- pool: ${{ parameters.pool }}
- ${{ if eq(parameters.pool, '') }}:
- pool:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- name: $(DncEngPublicBuildPool)
- image: windows.vs2022.amd64.open
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- name: $(DncEngInternalBuildPool)
- image: windows.vs2022.amd64
-
- steps:
- - ${{ each preStep in parameters.preSteps }}:
- - ${{ preStep }}
-
- - task: UseDotNet@2
- displayName: Use .NET 8 SDK
- inputs:
- packageType: sdk
- version: 8.0.x
- installationPath: $(Agent.TempDirectory)/dotnet
- workingDirectory: $(Agent.TempDirectory)
-
- - script: |
- $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- displayName: Download Tools
- # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
- workingDirectory: $(Agent.TempDirectory)
-
- - script: ${{ parameters.sourceIndexBuildCommand }}
- displayName: Build Repository
-
- - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
- displayName: Process Binlog into indexable sln
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name)
- displayName: Upload stage1 artifacts to source index
- env:
- BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url)
diff --git a/eng/common/templates-official/jobs/codeql-build.yml b/eng/common/templates-official/jobs/codeql-build.yml
deleted file mode 100644
index b68d3c2f31990f..00000000000000
--- a/eng/common/templates-official/jobs/codeql-build.yml
+++ /dev/null
@@ -1,31 +0,0 @@
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
- # Optional: if specified, restore and use this version of Guardian instead of the default.
- overrideGuardianVersion: ''
-
-jobs:
-- template: /eng/common/templates-official/jobs/jobs.yml
- parameters:
- enableMicrobuild: false
- enablePublishBuildArtifacts: false
- enablePublishTestResults: false
- enablePublishBuildAssets: false
- enablePublishUsingPipelines: false
- enableTelemetry: true
-
- variables:
- - group: Publish-Build-Assets
- # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
- # sync with the packages.config file.
- - name: DefaultGuardianVersion
- value: 0.109.0
- - name: GuardianPackagesConfigFile
- value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
- - name: GuardianVersion
- value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
-
- jobs: ${{ parameters.jobs }}
-
diff --git a/eng/common/templates-official/jobs/jobs.yml b/eng/common/templates-official/jobs/jobs.yml
deleted file mode 100644
index 857a0f8ba43e84..00000000000000
--- a/eng/common/templates-official/jobs/jobs.yml
+++ /dev/null
@@ -1,97 +0,0 @@
-parameters:
- # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
- continueOnError: false
-
- # Optional: Include PublishBuildArtifacts task
- enablePublishBuildArtifacts: false
-
- # Optional: Enable publishing using release pipelines
- enablePublishUsingPipelines: false
-
- # Optional: Enable running the source-build jobs to build repo from source
- enableSourceBuild: false
-
- # Optional: Parameters for source-build template.
- # See /eng/common/templates-official/jobs/source-build.yml for options
- sourceBuildParameters: []
-
- graphFileGeneration:
- # Optional: Enable generating the graph files at the end of the build
- enabled: false
- # Optional: Include toolset dependencies in the generated graph files
- includeToolset: false
-
- # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
- jobs: []
-
- # Optional: Override automatically derived dependsOn value for "publish build assets" job
- publishBuildAssetsDependsOn: ''
-
- # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
- publishAssetsImmediately: false
-
- # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
- artifactsPublishingAdditionalParameters: ''
- signingValidationAdditionalParameters: ''
-
- # Optional: should run as a public build even in the internal project
- # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
- runAsPublic: false
-
- enableSourceIndex: false
- sourceIndexParams: {}
-
-# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
-# and some (Microbuild) should only be applied to non-PR cases for internal builds.
-
-jobs:
-- ${{ each job in parameters.jobs }}:
- - template: ../job/job.yml
- parameters:
- # pass along parameters
- ${{ each parameter in parameters }}:
- ${{ if ne(parameter.key, 'jobs') }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
- # pass along job properties
- ${{ each property in job }}:
- ${{ if ne(property.key, 'job') }}:
- ${{ property.key }}: ${{ property.value }}
-
- name: ${{ job.job }}
-
-- ${{ if eq(parameters.enableSourceBuild, true) }}:
- - template: /eng/common/templates-official/jobs/source-build.yml
- parameters:
- allCompletedJobId: Source_Build_Complete
- ${{ each parameter in parameters.sourceBuildParameters }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
-- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
- - template: ../job/source-index-stage1.yml
- parameters:
- runAsPublic: ${{ parameters.runAsPublic }}
- ${{ each parameter in parameters.sourceIndexParams }}:
- ${{ parameter.key }}: ${{ parameter.value }}
-
-- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
- - template: ../job/publish-build-assets.yml
- parameters:
- continueOnError: ${{ parameters.continueOnError }}
- dependsOn:
- - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- - ${{ each job in parameters.jobs }}:
- - ${{ job.job }}
- - ${{ if eq(parameters.enableSourceBuild, true) }}:
- - Source_Build_Complete
-
- runAsPublic: ${{ parameters.runAsPublic }}
- publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
- publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
- enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
- artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
- signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
diff --git a/eng/common/templates-official/jobs/source-build.yml b/eng/common/templates-official/jobs/source-build.yml
deleted file mode 100644
index 08e5db9bb11616..00000000000000
--- a/eng/common/templates-official/jobs/source-build.yml
+++ /dev/null
@@ -1,46 +0,0 @@
-parameters:
- # This template adds arcade-powered source-build to CI. A job is created for each platform, as
- # well as an optional server job that completes when all platform jobs complete.
-
- # The name of the "join" job for all source-build platforms. If set to empty string, the job is
- # not included. Existing repo pipelines can use this job depend on all source-build jobs
- # completing without maintaining a separate list of every single job ID: just depend on this one
- # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
- allCompletedJobId: ''
-
- # See /eng/common/templates-official/job/source-build.yml
- jobNamePrefix: 'Source_Build'
-
- # This is the default platform provided by Arcade, intended for use by a managed-only repo.
- defaultManagedPlatform:
- name: 'Managed'
- container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8'
-
- # Defines the platforms on which to run build jobs. One job is created for each platform, and the
- # object in this array is sent to the job template as 'platform'. If no platforms are specified,
- # one job runs on 'defaultManagedPlatform'.
- platforms: []
-
-jobs:
-
-- ${{ if ne(parameters.allCompletedJobId, '') }}:
- - job: ${{ parameters.allCompletedJobId }}
- displayName: Source-Build Complete
- pool: server
- dependsOn:
- - ${{ each platform in parameters.platforms }}:
- - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
- - ${{ if eq(length(parameters.platforms), 0) }}:
- - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
-
-- ${{ each platform in parameters.platforms }}:
- - template: /eng/common/templates-official/job/source-build.yml
- parameters:
- jobNamePrefix: ${{ parameters.jobNamePrefix }}
- platform: ${{ platform }}
-
-- ${{ if eq(length(parameters.platforms), 0) }}:
- - template: /eng/common/templates-official/job/source-build.yml
- parameters:
- jobNamePrefix: ${{ parameters.jobNamePrefix }}
- platform: ${{ parameters.defaultManagedPlatform }}
diff --git a/eng/common/templates-official/post-build/common-variables.yml b/eng/common/templates-official/post-build/common-variables.yml
deleted file mode 100644
index b9ede10bf099ae..00000000000000
--- a/eng/common/templates-official/post-build/common-variables.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-variables:
- - group: Publish-Build-Assets
-
- # Whether the build is internal or not
- - name: IsInternalBuild
- value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
-
- # Default Maestro++ API Endpoint and API Version
- - name: MaestroApiEndPoint
- value: "https://maestro.dot.net"
- - name: MaestroApiAccessToken
- value: $(MaestroAccessToken)
- - name: MaestroApiVersion
- value: "2020-02-20"
-
- - name: SourceLinkCLIVersion
- value: 3.0.0
- - name: SymbolToolVersion
- value: 1.0.1
- - name: BinlogToolVersion
- value: 1.0.11
-
- - name: runCodesignValidationInjection
- value: false
diff --git a/eng/common/templates-official/post-build/post-build.yml b/eng/common/templates-official/post-build/post-build.yml
deleted file mode 100644
index 5c98fe1c0f3a96..00000000000000
--- a/eng/common/templates-official/post-build/post-build.yml
+++ /dev/null
@@ -1,285 +0,0 @@
-parameters:
- # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
- # Publishing V1 is no longer supported
- # Publishing V2 is no longer supported
- # Publishing V3 is the default
- - name: publishingInfraVersion
- displayName: Which version of publishing should be used to promote the build definition?
- type: number
- default: 3
- values:
- - 3
-
- - name: BARBuildId
- displayName: BAR Build Id
- type: number
- default: 0
-
- - name: PromoteToChannelIds
- displayName: Channel to promote BARBuildId to
- type: string
- default: ''
-
- - name: enableSourceLinkValidation
- displayName: Enable SourceLink validation
- type: boolean
- default: false
-
- - name: enableSigningValidation
- displayName: Enable signing validation
- type: boolean
- default: true
-
- - name: enableSymbolValidation
- displayName: Enable symbol validation
- type: boolean
- default: false
-
- - name: enableNugetValidation
- displayName: Enable NuGet validation
- type: boolean
- default: true
-
- - name: publishInstallersAndChecksums
- displayName: Publish installers and checksums
- type: boolean
- default: true
-
- - name: SDLValidationParameters
- type: object
- default:
- enable: false
- publishGdn: false
- continueOnError: false
- params: ''
- artifactNames: ''
- downloadArtifacts: true
-
- # These parameters let the user customize the call to sdk-task.ps1 for publishing
- # symbols & general artifacts as well as for signing validation
- - name: symbolPublishingAdditionalParameters
- displayName: Symbol publishing additional parameters
- type: string
- default: ''
-
- - name: artifactsPublishingAdditionalParameters
- displayName: Artifact publishing additional parameters
- type: string
- default: ''
-
- - name: signingValidationAdditionalParameters
- displayName: Signing validation additional parameters
- type: string
- default: ''
-
- # Which stages should finish execution before post-build stages start
- - name: validateDependsOn
- type: object
- default:
- - build
-
- - name: publishDependsOn
- type: object
- default:
- - Validate
-
- # Optional: Call asset publishing rather than running in a separate stage
- - name: publishAssetsImmediately
- type: boolean
- default: false
-
-stages:
-- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- - stage: Validate
- dependsOn: ${{ parameters.validateDependsOn }}
- displayName: Validate Build Assets
- variables:
- - template: common-variables.yml
- - template: /eng/common/templates-official/variables/pool-providers.yml
- jobs:
- - job:
- displayName: NuGet Validation
- condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022-pt
- os: windows
-
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
- arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
- -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
-
- - job:
- displayName: Signing Validation
- condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022-pt
- os: windows
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Package Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: PackageArtifacts
- checkDownloadedFiles: true
- itemPattern: |
- **
- !**/Microsoft.SourceBuild.Intermediate.*.nupkg
-
- # This is necessary whenever we want to publish/restore to an AzDO private feed
- # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
- # otherwise it'll complain about accessing a private feed.
- - task: NuGetAuthenticate@1
- displayName: 'Authenticate to AzDO Feeds'
-
- # Signing validation will optionally work with the buildmanifest file which is downloaded from
- # Azure DevOps above.
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: eng\common\sdk-task.ps1
- arguments: -task SigningValidation -restore -msbuildEngine vs
- /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
- /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
- ${{ parameters.signingValidationAdditionalParameters }}
-
- - template: ../steps/publish-logs.yml
- parameters:
- StageLabel: 'Validation'
- JobLabel: 'Signing'
- BinlogToolVersion: $(BinlogToolVersion)
-
- - job:
- displayName: SourceLink Validation
- condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022-pt
- os: windows
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: DownloadBuildArtifacts@0
- displayName: Download Blob Artifacts
- inputs:
- buildType: specific
- buildVersionToDownload: specific
- project: $(AzDOProjectName)
- pipeline: $(AzDOPipelineId)
- buildId: $(AzDOBuildId)
- artifactName: BlobArtifacts
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- displayName: Validate
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
- arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
- -ExtractPath $(Agent.BuildDirectory)/Extract/
- -GHRepoName $(Build.Repository.Name)
- -GHCommit $(Build.SourceVersion)
- -SourcelinkCliVersion $(SourceLinkCLIVersion)
- continueOnError: true
-
-- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
- - stage: publish_using_darc
- ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
- dependsOn: ${{ parameters.publishDependsOn }}
- ${{ else }}:
- dependsOn: ${{ parameters.validateDependsOn }}
- displayName: Publish using Darc
- variables:
- - template: common-variables.yml
- - template: /eng/common/templates-official/variables/pool-providers.yml
- jobs:
- - job:
- displayName: Publish Using Darc
- timeoutInMinutes: 120
- pool:
- # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- name: AzurePipelines-EO
- image: 1ESPT-Windows2022
- demands: Cmd
- os: windows
- # If it's not devdiv, it's dnceng
- ${{ else }}:
- name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022-pt
- os: windows
- steps:
- - template: setup-maestro-vars.yml
- parameters:
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
-
- - task: NuGetAuthenticate@1
-
- - task: PowerShell@2
- displayName: Publish Using Darc
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
- arguments: -BuildId $(BARBuildId)
- -PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
- -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
- -MaestroToken '$(MaestroApiAccessToken)'
- -WaitPublishingFinish true
- -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
- -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
diff --git a/eng/common/templates-official/post-build/setup-maestro-vars.yml b/eng/common/templates-official/post-build/setup-maestro-vars.yml
deleted file mode 100644
index 0c87f149a4ad77..00000000000000
--- a/eng/common/templates-official/post-build/setup-maestro-vars.yml
+++ /dev/null
@@ -1,70 +0,0 @@
-parameters:
- BARBuildId: ''
- PromoteToChannelIds: ''
-
-steps:
- - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
- - task: DownloadBuildArtifacts@0
- displayName: Download Release Configs
- inputs:
- buildType: current
- artifactName: ReleaseConfigs
- checkDownloadedFiles: true
-
- - task: PowerShell@2
- name: setReleaseVars
- displayName: Set Release Configs Vars
- inputs:
- targetType: inline
- pwsh: true
- script: |
- try {
- if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
- $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
-
- $BarId = $Content | Select -Index 0
- $Channels = $Content | Select -Index 1
- $IsStableBuild = $Content | Select -Index 2
-
- $AzureDevOpsProject = $Env:System_TeamProject
- $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
- $AzureDevOpsBuildId = $Env:Build_BuildId
- }
- else {
- $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}"
-
- $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
- $apiHeaders.Add('Accept', 'application/json')
- $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}")
-
- $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
-
- $BarId = $Env:BARBuildId
- $Channels = $Env:PromoteToMaestroChannels -split ","
- $Channels = $Channels -join "]["
- $Channels = "[$Channels]"
-
- $IsStableBuild = $buildInfo.stable
- $AzureDevOpsProject = $buildInfo.azureDevOpsProject
- $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
- $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
- }
-
- Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
- Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
- Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
-
- Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
- Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
- Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
- }
- catch {
- Write-Host $_
- Write-Host $_.Exception
- Write-Host $_.ScriptStackTrace
- exit 1
- }
- env:
- MAESTRO_API_TOKEN: $(MaestroApiAccessToken)
- BARBuildId: ${{ parameters.BARBuildId }}
- PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
diff --git a/eng/common/templates-official/post-build/trigger-subscription.yml b/eng/common/templates-official/post-build/trigger-subscription.yml
deleted file mode 100644
index da669030daf6e9..00000000000000
--- a/eng/common/templates-official/post-build/trigger-subscription.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-parameters:
- ChannelId: 0
-
-steps:
-- task: PowerShell@2
- displayName: Triggering subscriptions
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1
- arguments: -SourceRepo $(Build.Repository.Uri)
- -ChannelId ${{ parameters.ChannelId }}
- -MaestroApiAccessToken $(MaestroAccessToken)
- -MaestroApiEndPoint $(MaestroApiEndPoint)
- -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates-official/steps/add-build-to-channel.yml b/eng/common/templates-official/steps/add-build-to-channel.yml
deleted file mode 100644
index f67a210d62f3e5..00000000000000
--- a/eng/common/templates-official/steps/add-build-to-channel.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-parameters:
- ChannelId: 0
-
-steps:
-- task: PowerShell@2
- displayName: Add Build to Channel
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
- arguments: -BuildId $(BARBuildId)
- -ChannelId ${{ parameters.ChannelId }}
- -MaestroApiAccessToken $(MaestroApiAccessToken)
- -MaestroApiEndPoint $(MaestroApiEndPoint)
- -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates-official/steps/component-governance.yml b/eng/common/templates-official/steps/component-governance.yml
deleted file mode 100644
index 0ecec47b0c9177..00000000000000
--- a/eng/common/templates-official/steps/component-governance.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-parameters:
- disableComponentGovernance: false
- componentGovernanceIgnoreDirectories: ''
-
-steps:
-- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
- - script: "echo ##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
- displayName: Set skipComponentGovernanceDetection variable
-- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
- - task: ComponentGovernanceComponentDetection@0
- continueOnError: true
- inputs:
- ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/generate-sbom.yml b/eng/common/templates-official/steps/generate-sbom.yml
deleted file mode 100644
index 488b560e8ba4eb..00000000000000
--- a/eng/common/templates-official/steps/generate-sbom.yml
+++ /dev/null
@@ -1,48 +0,0 @@
-# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
-# PackageName - The name of the package this SBOM represents.
-# PackageVersion - The version of the package this SBOM represents.
-# ManifestDirPath - The path of the directory where the generated manifest files will be placed
-# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
-
-parameters:
- PackageVersion: 7.0.0
- BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
- PackageName: '.NET'
- ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
- IgnoreDirectories: ''
- sbomContinueOnError: true
-
-steps:
-- task: PowerShell@2
- displayName: Prep for SBOM generation in (Non-linux)
- condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
- inputs:
- filePath: ./eng/common/generate-sbom-prep.ps1
- arguments: ${{parameters.manifestDirPath}}
-
-# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
-- script: |
- chmod +x ./eng/common/generate-sbom-prep.sh
- ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
- displayName: Prep for SBOM generation in (Linux)
- condition: eq(variables['Agent.Os'], 'Linux')
- continueOnError: ${{ parameters.sbomContinueOnError }}
-
-- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
- displayName: 'Generate SBOM manifest'
- continueOnError: ${{ parameters.sbomContinueOnError }}
- inputs:
- PackageName: ${{ parameters.packageName }}
- BuildDropPath: ${{ parameters.buildDropPath }}
- PackageVersion: ${{ parameters.packageVersion }}
- ManifestDirPath: ${{ parameters.manifestDirPath }}
- ${{ if ne(parameters.IgnoreDirectories, '') }}:
- AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
-
-- task: 1ES.PublishPipelineArtifact@1
- displayName: Publish SBOM manifest
- continueOnError: ${{parameters.sbomContinueOnError}}
- inputs:
- targetPath: '${{parameters.manifestDirPath}}'
- artifactName: $(ARTIFACT_NAME)
-
diff --git a/eng/common/templates-official/steps/publish-logs.yml b/eng/common/templates-official/steps/publish-logs.yml
deleted file mode 100644
index 84b2f559c56e40..00000000000000
--- a/eng/common/templates-official/steps/publish-logs.yml
+++ /dev/null
@@ -1,49 +0,0 @@
-parameters:
- StageLabel: ''
- JobLabel: ''
- CustomSensitiveDataList: ''
- # A default - in case value from eng/common/templates-official/post-build/common-variables.yml is not passed
- BinlogToolVersion: '1.0.11'
-
-steps:
-- task: Powershell@2
- displayName: Prepare Binlogs to Upload
- inputs:
- targetType: inline
- script: |
- New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
- continueOnError: true
- condition: always()
-
-- task: PowerShell@2
- displayName: Redact Logs
- inputs:
- filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1
- # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml
- # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
- # If the file exists - sensitive data for redaction will be sourced from it
- # (single entry per line, lines starting with '# ' are considered comments and skipped)
- arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs'
- -BinlogToolVersion ${{parameters.BinlogToolVersion}}
- -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt'
- '$(publishing-dnceng-devdiv-code-r-build-re)'
- '$(MaestroAccessToken)'
- '$(dn-bot-all-orgs-artifact-feeds-rw)'
- '$(akams-client-id)'
- '$(akams-client-secret)'
- '$(microsoft-symbol-server-pat)'
- '$(symweb-symbol-server-pat)'
- '$(dn-bot-all-orgs-build-rw-code-rw)'
- ${{parameters.CustomSensitiveDataList}}
- continueOnError: true
- condition: always()
-
-- task: 1ES.PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
- PublishLocation: Container
- ArtifactName: PostBuildLogs
- continueOnError: true
- condition: always()
diff --git a/eng/common/templates-official/steps/retain-build.yml b/eng/common/templates-official/steps/retain-build.yml
deleted file mode 100644
index 83d97a26a01ff9..00000000000000
--- a/eng/common/templates-official/steps/retain-build.yml
+++ /dev/null
@@ -1,28 +0,0 @@
-parameters:
- # Optional azure devops PAT with build execute permissions for the build's organization,
- # only needed if the build that should be retained ran on a different organization than
- # the pipeline where this template is executing from
- Token: ''
- # Optional BuildId to retain, defaults to the current running build
- BuildId: ''
- # Azure devops Organization URI for the build in the https://dev.azure.com/ format.
- # Defaults to the organization the current pipeline is running on
- AzdoOrgUri: '$(System.CollectionUri)'
- # Azure devops project for the build. Defaults to the project the current pipeline is running on
- AzdoProject: '$(System.TeamProject)'
-
-steps:
- - task: powershell@2
- inputs:
- targetType: 'filePath'
- filePath: eng/common/retain-build.ps1
- pwsh: true
- arguments: >
- -AzdoOrgUri: ${{parameters.AzdoOrgUri}}
- -AzdoProject ${{parameters.AzdoProject}}
- -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
- -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
- displayName: Enable permanent build retention
- env:
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- BUILD_ID: $(Build.BuildId)
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/send-to-helix.yml b/eng/common/templates-official/steps/send-to-helix.yml
deleted file mode 100644
index 68fa739c4ab215..00000000000000
--- a/eng/common/templates-official/steps/send-to-helix.yml
+++ /dev/null
@@ -1,93 +0,0 @@
-# Please remember to update the documentation if you make changes to these parameters!
-parameters:
- HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
- HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
- HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
- HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
- HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
- HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY
- HelixProjectArguments: '' # optional -- arguments passed to the build command
- HelixConfiguration: '' # optional -- additional property attached to a job
- HelixPreCommands: '' # optional -- commands to run before Helix work item execution
- HelixPostCommands: '' # optional -- commands to run after Helix work item execution
- WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
- WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
- WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
- CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
- XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
- XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
- XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
- XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
- XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
- IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
- DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
- DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
- WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
- IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
- HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
- Creator: '' # optional -- if the build is external, use this to specify who is sending the job
- DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
- condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
- continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
-
-steps:
- - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
- displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixConfiguration: ${{ parameters.HelixConfiguration }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- WorkItemCommand: ${{ parameters.WorkItemCommand }}
- WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- XUnitProjects: ${{ parameters.XUnitProjects }}
- XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
- XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
- XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
- XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
- - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
- displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
- env:
- BuildConfig: $(_BuildConfig)
- HelixSource: ${{ parameters.HelixSource }}
- HelixType: ${{ parameters.HelixType }}
- HelixBuild: ${{ parameters.HelixBuild }}
- HelixConfiguration: ${{ parameters.HelixConfiguration }}
- HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
- HelixAccessToken: ${{ parameters.HelixAccessToken }}
- HelixPreCommands: ${{ parameters.HelixPreCommands }}
- HelixPostCommands: ${{ parameters.HelixPostCommands }}
- WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
- WorkItemCommand: ${{ parameters.WorkItemCommand }}
- WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
- CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
- XUnitProjects: ${{ parameters.XUnitProjects }}
- XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
- XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
- XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
- XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
- IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
- DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
- DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
- WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
- HelixBaseUri: ${{ parameters.HelixBaseUri }}
- Creator: ${{ parameters.Creator }}
- SYSTEM_ACCESSTOKEN: $(System.AccessToken)
- condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
- continueOnError: ${{ parameters.continueOnError }}
diff --git a/eng/common/templates-official/steps/source-build.yml b/eng/common/templates-official/steps/source-build.yml
deleted file mode 100644
index 53ed57b6d48abc..00000000000000
--- a/eng/common/templates-official/steps/source-build.yml
+++ /dev/null
@@ -1,131 +0,0 @@
-parameters:
- # This template adds arcade-powered source-build to CI.
-
- # This is a 'steps' template, and is intended for advanced scenarios where the existing build
- # infra has a careful build methodology that must be followed. For example, a repo
- # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
- # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
- # GitHub. Using this steps template leaves room for that infra to be included.
-
- # Defines the platform on which to run the steps. See 'eng/common/templates-official/job/source-build.yml'
- # for details. The entire object is described in the 'job' template for simplicity, even though
- # the usage of the properties on this object is split between the 'job' and 'steps' templates.
- platform: {}
-
-steps:
-# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
-- script: |
- set -x
- df -h
-
- # If building on the internal project, the artifact feeds variable may be available (usually only if needed)
- # In that case, call the feed setup script to add internal feeds corresponding to public ones.
- # In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
- # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
- # changes.
- internalRestoreArgs=
- if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
- # Temporarily work around https://github.com/dotnet/arcade/issues/7709
- chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
- $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
- internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
-
- # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
- # This only works if there is a username/email configured, which won't be the case in most CI runs.
- git config --get user.email
- if [ $? -ne 0 ]; then
- git config user.email dn-bot@microsoft.com
- git config user.name dn-bot
- fi
- fi
-
- # If building on the internal project, the internal storage variable may be available (usually only if needed)
- # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
- # in the default public locations.
- internalRuntimeDownloadArgs=
- if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
- internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
- fi
-
- buildConfig=Release
- # Check if AzDO substitutes in a build config from a variable, and use it if so.
- if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
- buildConfig='$(_BuildConfig)'
- fi
-
- officialBuildArgs=
- if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
- officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
- fi
-
- targetRidArgs=
- if [ '${{ parameters.platform.targetRID }}' != '' ]; then
- targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
- fi
-
- runtimeOsArgs=
- if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
- runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
- fi
-
- baseOsArgs=
- if [ '${{ parameters.platform.baseOS }}' != '' ]; then
- baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
- fi
-
- publishArgs=
- if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
- publishArgs='--publish'
- fi
-
- assetManifestFileName=SourceBuild_RidSpecific.xml
- if [ '${{ parameters.platform.name }}' != '' ]; then
- assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
- fi
-
- ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
- --configuration $buildConfig \
- --restore --build --pack $publishArgs -bl \
- $officialBuildArgs \
- $internalRuntimeDownloadArgs \
- $internalRestoreArgs \
- $targetRidArgs \
- $runtimeOsArgs \
- $baseOsArgs \
- /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
- /p:ArcadeBuildFromSource=true \
- /p:DotNetBuildSourceOnly=true \
- /p:DotNetBuildRepo=true \
- /p:AssetManifestFileName=$assetManifestFileName
- displayName: Build
-
-# Upload build logs for diagnosis.
-- task: CopyFiles@2
- displayName: Prepare BuildLogs staging directory
- inputs:
- SourceFolder: '$(Build.SourcesDirectory)'
- Contents: |
- **/*.log
- **/*.binlog
- artifacts/sb/prebuilt-report/**
- TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
- CleanTargetFolder: true
- continueOnError: true
- condition: succeededOrFailed()
-
-- task: 1ES.PublishPipelineArtifact@1
- displayName: Publish BuildLogs
- inputs:
- targetPath: '$(Build.StagingDirectory)/BuildLogs'
- artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
- continueOnError: true
- condition: succeededOrFailed()
-
-# Manually inject component detection so that we can ignore the source build upstream cache, which contains
-# a nupkg cache of input packages (a local feed).
-# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
-# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
-- task: ComponentGovernanceComponentDetection@0
- displayName: Component Detection (Exclude upstream cache)
- inputs:
- ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache'
diff --git a/eng/common/templates-official/variables/pool-providers.yml b/eng/common/templates-official/variables/pool-providers.yml
deleted file mode 100644
index beab7d1bfba062..00000000000000
--- a/eng/common/templates-official/variables/pool-providers.yml
+++ /dev/null
@@ -1,45 +0,0 @@
-# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool,
-# otherwise it should go into the "normal" pools. This separates out the queueing and billing of released branches.
-
-# Motivation:
-# Once a given branch of a repository's output has been officially "shipped" once, it is then considered to be COGS
-# (Cost of goods sold) and should be moved to a servicing pool provider. This allows both separation of queueing
-# (allowing release builds and main PR builds to not intefere with each other) and billing (required for COGS.
-# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services
-# team needs to move resources around and create new and potentially differently-named pools. Using this template
-# file from an Arcade-ified repo helps guard against both having to update one's release/* branches and renaming.
-
-# How to use:
-# This yaml assumes your shipped product branches use the naming convention "release/..." (which many do).
-# If we find alternate naming conventions in broad usage it can be added to the condition below.
-#
-# First, import the template in an arcade-ified repo to pick up the variables, e.g.:
-#
-# variables:
-# - template: /eng/common/templates-official/variables/pool-providers.yml
-#
-# ... then anywhere specifying the pool provider use the runtime variables,
-# $(DncEngInternalBuildPool)
-#
-# pool:
-# name: $(DncEngInternalBuildPool)
-# image: 1es-windows-2022-pt
-
-variables:
- # Coalesce the target and source branches so we know when a PR targets a release branch
- # If these variables are somehow missing, fall back to main (tends to have more capacity)
-
- # Any new -Svc alternative pools should have variables added here to allow for splitting work
-
- - name: DncEngInternalBuildPool
- value: $[
- replace(
- replace(
- eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'),
- True,
- 'NetCore1ESPool-Svc-Internal'
- ),
- False,
- 'NetCore1ESPool-Internal'
- )
- ]
\ No newline at end of file
diff --git a/eng/common/templates-official/variables/sdl-variables.yml b/eng/common/templates-official/variables/sdl-variables.yml
deleted file mode 100644
index dbdd66d4a4b3a0..00000000000000
--- a/eng/common/templates-official/variables/sdl-variables.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-variables:
-# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
-# sync with the packages.config file.
-- name: DefaultGuardianVersion
- value: 0.109.0
-- name: GuardianPackagesConfigFile
- value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
\ No newline at end of file
diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml
index a3277bf15c51ff..01c0dd995e4b61 100644
--- a/eng/common/templates/job/job.yml
+++ b/eng/common/templates/job/job.yml
@@ -15,7 +15,6 @@ parameters:
timeoutInMinutes: ''
variables: []
workspace: ''
- templateContext: ''
# Job base template specific parameters
# See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
@@ -69,9 +68,6 @@ jobs:
${{ if ne(parameters.timeoutInMinutes, '') }}:
timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
- ${{ if ne(parameters.templateContext, '') }}:
- templateContext: ${{ parameters.templateContext }}
-
variables:
- ${{ if ne(parameters.enableTelemetry, 'false') }}:
- name: DOTNET_CLI_TELEMETRY_PROFILE
diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1
index a58aef2847e1c8..7d8dc89b919bc8 100644
--- a/eng/common/tools.ps1
+++ b/eng/common/tools.ps1
@@ -65,11 +65,6 @@ $ErrorActionPreference = 'Stop'
# Base-64 encoded SAS token that has permission to storage container described by $runtimeSourceFeed
[string]$runtimeSourceFeedKey = if (Test-Path variable:runtimeSourceFeedKey) { $runtimeSourceFeedKey } else { $null }
-# True if the build is a product build
-[bool]$productBuild = if (Test-Path variable:productBuild) { $productBuild } else { $false }
-
-[String[]]$properties = if (Test-Path variable:properties) { $properties } else { @() }
-
function Create-Directory ([string[]] $path) {
New-Item -Path $path -Force -ItemType 'Directory' | Out-Null
}
@@ -855,8 +850,7 @@ function MSBuild-Core() {
}
# When running on Azure Pipelines, override the returned exit code to avoid double logging.
- # Skip this when the build is a child of the VMR orchestrator build.
- if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null -and !$productBuild -and -not($properties -like "*DotNetBuildRepo=true*")) {
+ if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null) {
Write-PipelineSetResult -Result "Failed" -Message "msbuild execution failed."
# Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
# The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error
diff --git a/eng/common/tools.sh b/eng/common/tools.sh
index db64e298ff6314..ece4b730795360 100755
--- a/eng/common/tools.sh
+++ b/eng/common/tools.sh
@@ -68,9 +68,6 @@ fi
runtime_source_feed=${runtime_source_feed:-''}
runtime_source_feed_key=${runtime_source_feed_key:-''}
-# True if the build is a product build
-product_build=${product_build:-false}
-
# Resolve any symlinks in the given path.
function ResolvePath {
local path=$1
@@ -144,7 +141,7 @@ function InitializeDotNetCli {
if [[ $global_json_has_runtimes == false && -n "${DOTNET_INSTALL_DIR:-}" && -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
dotnet_root="$DOTNET_INSTALL_DIR"
else
- dotnet_root="${repo_root}.dotnet"
+ dotnet_root="$repo_root/.dotnet"
export DOTNET_INSTALL_DIR="$dotnet_root"
@@ -506,8 +503,7 @@ function MSBuild-Core {
echo "Build failed with exit code $exit_code. Check errors above."
# When running on Azure Pipelines, override the returned exit code to avoid double logging.
- # Skip this when the build is a child of the VMR orchestrator build.
- if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$product_build" != true && "$properties" != *"DotNetBuildRepo=true"* ]]; then
+ if [[ "$ci" == "true" && -n ${SYSTEM_TEAMPROJECT:-} ]]; then
Write-PipelineSetResult -result "Failed" -message "msbuild execution failed."
# Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
# The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error
diff --git a/eng/native/configurecompiler.cmake b/eng/native/configurecompiler.cmake
index 0ab4a178b055fa..166fd52ab96562 100644
--- a/eng/native/configurecompiler.cmake
+++ b/eng/native/configurecompiler.cmake
@@ -832,32 +832,10 @@ if (MSVC)
add_compile_options($<$:/we4013>) # 'function' undefined - assuming extern returning int.
add_compile_options($<$:/we4102>) # "'%$S' : unreferenced label".
add_compile_options($<$:/we4551>) # Function call missing argument list.
+ add_compile_options($<$:/we4700>) # Local used w/o being initialized.
add_compile_options($<$:/we4640>) # 'instance' : construction of local static object is not thread-safe
add_compile_options($<$:/we4806>) # Unsafe operation involving type 'bool'.
- # SDL requires the below warnings to be treated as errors:
- # More info: https://liquid.microsoft.com/Web/Object/Read/ms.security/Requirements/Microsoft.Security.SystemsADM.10086
- # (Access to that URL restricted to Microsoft employees.)
- add_compile_options($<$:/we4055>) # 'conversion' : from data pointer 'type1' to function pointer 'type2'
- add_compile_options($<$:/we4146>) # unary minus operator applied to unsigned type, result still unsigned
- add_compile_options($<$:/we4242>) # 'identifier' : conversion from 'type1' to 'type2', possible loss of data
- add_compile_options($<$:/we4244>) # 'conversion' conversion from 'type1' to 'type2', possible loss of data
- add_compile_options($<$:/we4267>) # 'var' : conversion from 'size_t' to 'type', possible loss of data
- add_compile_options($<$:/we4302>) # 'conversion' : truncation from 'type 1' to 'type 2'
- add_compile_options($<$:/we4308>) # negative integral constant converted to unsigned type
- add_compile_options($<$:/we4509>) # nonstandard extension used: 'function' uses SEH and 'object' has destructor
- add_compile_options($<$:/we4510>) # 'class' : default constructor could not be generated
- add_compile_options($<$:/we4532>) # 'continue' : jump out of __finally/finally block has undefined behavior during termination handling
- add_compile_options($<$:/we4533>) # initialization of 'variable' is skipped by 'instruction'
- add_compile_options($<$:/we4610>) # object 'class' can never be instantiated - user-defined constructor required
- add_compile_options($<$:/we4611>) # interaction between 'function' and C++ object destruction is non-portable
- add_compile_options($<$:/we4700>) # uninitialized local variable 'name' used
- add_compile_options($<$:/we4701>) # Potentially uninitialized local variable 'name' used
- add_compile_options($<$:/we4703>) # Potentially uninitialized local pointer variable 'name' used
- add_compile_options($<$:/we4789>) # destination of memory copy is too small
- add_compile_options($<$:/we4995>) # 'function': name was marked as #pragma deprecated
- add_compile_options($<$:/we4996>) # 'function': was declared deprecated
-
# Set Warning Level 3:
add_compile_options($<$:/w34092>) # Sizeof returns 'unsigned long'.
add_compile_options($<$:/w34121>) # Structure is sensitive to alignment.
diff --git a/eng/native/configureplatform.cmake b/eng/native/configureplatform.cmake
index c7c378ab0e41b3..f5b5753e129eb5 100644
--- a/eng/native/configureplatform.cmake
+++ b/eng/native/configureplatform.cmake
@@ -27,8 +27,6 @@ if(CLR_CMAKE_HOST_OS STREQUAL linux)
endif()
elseif(CMAKE_HOST_SYSTEM_PROCESSOR STREQUAL i686)
set(CLR_CMAKE_HOST_UNIX_X86 1)
- elseif(CMAKE_HOST_SYSTEM_PROCESSOR STREQUAL aarch64)
- set(CLR_CMAKE_HOST_UNIX_ARM64 1)
else()
clr_unknown_arch()
endif()
diff --git a/eng/native/functions.cmake b/eng/native/functions.cmake
index e10e008d775e44..543722a9c0a59e 100644
--- a/eng/native/functions.cmake
+++ b/eng/native/functions.cmake
@@ -1,8 +1,8 @@
function(clr_unknown_arch)
if (WIN32)
- message(FATAL_ERROR "Only AMD64, ARM64, ARM and I386 hosts are supported. Found: ${CMAKE_SYSTEM_PROCESSOR}")
+ message(FATAL_ERROR "Only AMD64, ARM64, ARM and I386 are supported. Found: ${CMAKE_SYSTEM_PROCESSOR}")
elseif(CLR_CROSS_COMPONENTS_BUILD)
- message(FATAL_ERROR "Only AMD64, ARM64 and I386 hosts are supported for linux cross-architecture component. Found: ${CMAKE_SYSTEM_PROCESSOR}")
+ message(FATAL_ERROR "Only AMD64, I386 host are supported for linux cross-architecture component. Found: ${CMAKE_SYSTEM_PROCESSOR}")
else()
message(FATAL_ERROR "'${CMAKE_SYSTEM_PROCESSOR}' is an unsupported architecture.")
endif()
@@ -228,12 +228,9 @@ function(preprocess_file inputFilename outputFilename)
COMMENT "Preprocessing ${inputFilename}. Outputting to ${outputFilename}"
)
else()
- if (CMAKE_CXX_COMPILER_TARGET AND CMAKE_CXX_COMPILER_ID MATCHES "Clang")
- set(_LOCAL_CROSS_TARGET "--target=${CMAKE_CXX_COMPILER_TARGET}")
- endif()
add_custom_command(
OUTPUT ${outputFilename}
- COMMAND ${CMAKE_CXX_COMPILER} ${_LOCAL_CROSS_TARGET} -E -P ${PREPROCESS_DEFINITIONS} ${PREPROCESS_INCLUDE_DIRECTORIES} -o ${outputFilename} -x c ${inputFilename}
+ COMMAND ${CMAKE_CXX_COMPILER} -E -P ${PREPROCESS_DEFINITIONS} ${PREPROCESS_INCLUDE_DIRECTORIES} -o ${outputFilename} -x c ${inputFilename}
DEPENDS ${inputFilename}
COMMENT "Preprocessing ${inputFilename}. Outputting to ${outputFilename}"
)
diff --git a/eng/pipelines/common/evaluate-default-paths.yml b/eng/pipelines/common/evaluate-default-paths.yml
index edbc1c618f6066..a5b40862c30c6a 100644
--- a/eng/pipelines/common/evaluate-default-paths.yml
+++ b/eng/pipelines/common/evaluate-default-paths.yml
@@ -28,7 +28,7 @@ parameters:
src/mono/nuget/Microsoft.NET.Runtime.wasm.Sample.Mono/*
src/mono/nuget/Microsoft.NET.Sdk.WebAssembly.Pack/*
src/mono/nuget/Microsoft.NETCore.BrowserDebugHost.Transport/*
- src/mono/nuget/Microsoft.NET.Runtime.WorkloadTesting.Internal/**/*
+ src/mono/nuget/Microsoft.NET.Runtime.WorkloadTesting.Internal/*
src/mono/nuget/Microsoft.NET.Workload*
src/mono/sample/wasm/*
src/mono/browser/*
@@ -213,7 +213,7 @@ jobs:
- eng/testing/scenarios/BuildWasmAppsJobsList.txt
- eng/testing/tests.browser.targets
- eng/testing/tests.was*.targets
- - src/mono/nuget/Microsoft.NET.Runtime.WorkloadTesting.Internal/Sdk/WorkloadTesting.Core.targets
+ - src/mono/nuget/Microsoft.NET.Runtime.WorkloadTesting.Internal/WorkloadTesting.Core.targets
- eng/testing/workloads-browser.targets
- eng/testing/workloads-testing.targets
- eng/testing/workloads-wasi.targets
@@ -303,7 +303,7 @@ jobs:
exclude:
- eng/testing/scenarios/BuildWasiAppsJobsList.txt
- eng/testing/scenarios/BuildWasmAppsJobsList.txt
- src/mono/nuget/Microsoft.NET.Runtime.WorkloadTesting.Internal/Sdk/WorkloadTesting.Core.targets
+ src/mono/nuget/Microsoft.NET.Runtime.WorkloadTesting.Internal/WorkloadTesting.Core.targets
- eng/testing/workloads-browser.targets
- eng/testing/workloads-testing.targets
- eng/testing/workloads-wasi.targets
diff --git a/eng/pipelines/common/platform-matrix.yml b/eng/pipelines/common/platform-matrix.yml
index a846faca46e3c4..776cdae314c429 100644
--- a/eng/pipelines/common/platform-matrix.yml
+++ b/eng/pipelines/common/platform-matrix.yml
@@ -158,30 +158,6 @@ jobs:
crossBuild: true
${{ insert }}: ${{ parameters.jobParameters }}
-# Linux Bionic arm
-
-- ${{ if containsValue(parameters.platforms, 'linux_bionic_arm') }}:
- - template: xplat-setup.yml
- parameters:
- jobTemplate: ${{ parameters.jobTemplate }}
- helixQueuesTemplate: ${{ parameters.helixQueuesTemplate }}
- variables: ${{ parameters.variables }}
- osGroup: linux
- osSubgroup: _bionic
- archType: arm
- targetRid: linux-bionic-arm
- platform: linux_bionic_arm
- shouldContinueOnError: ${{ parameters.shouldContinueOnError }}
- container: linux_bionic
- jobParameters:
- runtimeFlavor: mono
- # We build on Linux, but the test queue runs Windows, so
- # we need to override the test script generation
- runScriptWindowsCmd: true
- buildConfig: ${{ parameters.buildConfig }}
- helixQueueGroup: ${{ parameters.helixQueueGroup }}
- ${{ insert }}: ${{ parameters.jobParameters }}
-
# Linux Bionic arm64
- ${{ if containsValue(parameters.platforms, 'linux_bionic_arm64') }}:
diff --git a/eng/pipelines/common/templates/runtimes/run-test-job.yml b/eng/pipelines/common/templates/runtimes/run-test-job.yml
index a063c2127ecc71..a2e13dca489deb 100644
--- a/eng/pipelines/common/templates/runtimes/run-test-job.yml
+++ b/eng/pipelines/common/templates/runtimes/run-test-job.yml
@@ -389,7 +389,7 @@ jobs:
- jitstress1_tiered
- jitstress2
- jitstress2_tiered
- - disabler2r
+ - zapdisable
- tailcallstress
${{ if in(parameters.testGroup, 'jitstress-random') }}:
scenarios:
@@ -507,9 +507,9 @@ jobs:
${{ if in(parameters.testGroup, 'gcstress-extra') }}:
scenarios:
- heapverify1
- - gcstress0xc_disabler2r
- - gcstress0xc_disabler2r_jitstress2
- - gcstress0xc_disabler2r_heapverify1
+ - gcstress0xc_zapdisable
+ - gcstress0xc_zapdisable_jitstress2
+ - gcstress0xc_zapdisable_heapverify1
- gcstress0xc_jitstress1
- gcstress0xc_jitstress2
- gcstress0xc_tailcallstress
@@ -585,7 +585,7 @@ jobs:
- jitobjectstackallocation
- jitphysicalpromotion_only
- jitphysicalpromotion_full
- - jitrlcse
+ - jitcrossblocklocalassertionprop
${{ if in(parameters.testGroup, 'jit-cfg') }}:
scenarios:
- jitcfg
diff --git a/eng/pipelines/coreclr/perf-non-wasm-jobs.yml b/eng/pipelines/coreclr/perf-non-wasm-jobs.yml
index c72da916d6f094..79dce1867bf695 100644
--- a/eng/pipelines/coreclr/perf-non-wasm-jobs.yml
+++ b/eng/pipelines/coreclr/perf-non-wasm-jobs.yml
@@ -281,24 +281,6 @@ jobs:
logicalmachine: 'perfowl'
experimentName: 'gdv3'
- # run coreclr perfowl microbenchmarks perf rlcse jobs
- - template: /eng/pipelines/common/platform-matrix.yml
- parameters:
- jobTemplate: /eng/pipelines/coreclr/templates/perf-job.yml
- buildConfig: release
- runtimeFlavor: coreclr
- platforms:
- - linux_x64
- - windows_x64
- jobParameters:
- testGroup: perf
- liveLibrariesBuildConfig: Release
- projectFile: microbenchmarks.proj
- runKind: micro
- runJobTemplate: /eng/pipelines/coreclr/templates/run-performance-job.yml
- logicalmachine: 'perfowl'
- experimentName: 'rlcse'
-
# run coreclr crossgen perf job
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
@@ -334,29 +316,30 @@ jobs:
parameters:
name: MonoRuntimePacks
- # build PerfBDN app
- - template: /eng/pipelines/common/platform-matrix.yml
- parameters:
- jobTemplate: /eng/pipelines/common/global-build-job.yml
- buildConfig: release
- runtimeFlavor: mono
- platforms:
- - ios_arm64
- jobParameters:
- dependsOn:
- - Build_android_arm64_release_Mono_Packs
- buildArgs: -s mono -c $(_BuildConfig)
- nameSuffix: PerfBDNApp
- isOfficialBuild: false
- pool:
- vmImage: 'macos-12'
- postBuildSteps:
- - template: /eng/pipelines/coreclr/templates/build-perf-bdn-app.yml
- parameters:
- rootFolder: '$(Build.SourcesDirectory)/artifacts/'
- includeRootFolder: true
- displayName: Android BDN App Artifacts
- artifactName: PerfBDNAppArm
- archiveExtension: '.tar.gz'
- archiveType: tar
- tarCompression: gz
+ # Disabled due to: https://github.com/dotnet/performance/issues/3655
+ # # build PerfBDN app
+ # - template: /eng/pipelines/common/platform-matrix.yml
+ # parameters:
+ # jobTemplate: /eng/pipelines/common/global-build-job.yml
+ # buildConfig: release
+ # runtimeFlavor: mono
+ # platforms:
+ # - ios_arm64
+ # jobParameters:
+ # dependsOn:
+ # - Build_android_arm64_release_Mono_Packs
+ # buildArgs: -s mono -c $(_BuildConfig)
+ # nameSuffix: PerfBDNApp
+ # isOfficialBuild: false
+ # pool:
+ # vmImage: 'macos-12'
+ # postBuildSteps:
+ # - template: /eng/pipelines/coreclr/templates/build-perf-bdn-app.yml
+ # parameters:
+ # rootFolder: '$(Build.SourcesDirectory)/artifacts/'
+ # includeRootFolder: true
+ # displayName: Android BDN App Artifacts
+ # artifactName: PerfBDNAppArm
+ # archiveExtension: '.tar.gz'
+ # archiveType: tar
+ # tarCompression: gz
diff --git a/eng/pipelines/coreclr/runtime-nativeaot-outerloop.yml b/eng/pipelines/coreclr/runtime-nativeaot-outerloop.yml
index de23519f9c62ed..164485e7d00770 100644
--- a/eng/pipelines/coreclr/runtime-nativeaot-outerloop.yml
+++ b/eng/pipelines/coreclr/runtime-nativeaot-outerloop.yml
@@ -60,17 +60,14 @@ extends:
- osx_x64
- osx_arm64
- linux_x64
- - linux_arm
- linux_arm64
- linux_musl_x64
- - linux_musl_arm64
jobParameters:
testGroup: innerloop
isSingleFile: true
nameSuffix: NativeAOT_Libs
- buildArgs: -s clr.aot+host.native+libs+libs.tests -c $(_BuildConfig) /p:TestNativeAot=true /p:ArchiveTests=true /p:IlcUseServerGc=false /p:RunAnalyzers=false
+ buildArgs: -s clr.aot+host.native+libs+libs.tests -c $(_BuildConfig) /p:TestNativeAot=true /p:ArchiveTests=true /p:IlcUseServerGc=false
timeoutInMinutes: 300 # doesn't normally take this long, but I've seen Helix queues backed up for 160 minutes
- includeAllPlatforms: true
# extra steps, run tests
postBuildSteps:
- template: /eng/pipelines/libraries/helix.yml
@@ -94,7 +91,7 @@ extends:
testGroup: innerloop
isSingleFile: true
nameSuffix: NativeAOT_Checked_Libs
- buildArgs: -s clr.aot+host.native+libs+libs.tests -c $(_BuildConfig) -rc Checked /p:TestNativeAot=true /p:ArchiveTests=true /p:IlcUseServerGc=false /p:RunAnalyzers=false
+ buildArgs: -s clr.aot+host.native+libs+libs.tests -c $(_BuildConfig) -rc Checked /p:TestNativeAot=true /p:ArchiveTests=true /p:IlcUseServerGc=false
timeoutInMinutes: 360
# extra steps, run tests
postBuildSteps:
@@ -119,7 +116,7 @@ extends:
testGroup: innerloop
isSingleFile: true
nameSuffix: NativeAOT_Checked_Libs_SizeOpt
- buildArgs: -s clr.aot+host.native+libs+libs.tests -c $(_BuildConfig) -rc Checked /p:TestNativeAot=true /p:ArchiveTests=true /p:OptimizationPreference=Size /p:IlcUseServerGc=false /p:RunAnalyzers=false
+ buildArgs: -s clr.aot+host.native+libs+libs.tests -c $(_BuildConfig) -rc Checked /p:TestNativeAot=true /p:ArchiveTests=true /p:OptimizationPreference=Size /p:IlcUseServerGc=false
timeoutInMinutes: 240
# extra steps, run tests
postBuildSteps:
@@ -144,7 +141,7 @@ extends:
testGroup: innerloop
isSingleFile: true
nameSuffix: NativeAOT_Checked_Libs_SpeedOpt
- buildArgs: -s clr.aot+host.native+libs+libs.tests -c $(_BuildConfig) -rc Checked /p:TestNativeAot=true /p:ArchiveTests=true /p:OptimizationPreference=Speed /p:IlcUseServerGc=false /p:RunAnalyzers=false
+ buildArgs: -s clr.aot+host.native+libs+libs.tests -c $(_BuildConfig) -rc Checked /p:TestNativeAot=true /p:ArchiveTests=true /p:OptimizationPreference=Speed /p:IlcUseServerGc=false
timeoutInMinutes: 240
# extra steps, run tests
postBuildSteps:
@@ -165,7 +162,6 @@ extends:
platforms:
- windows_x64
- linux_x64
- - linux_arm
variables:
- name: timeoutPerTestInMinutes
value: 60
@@ -174,7 +170,7 @@ extends:
jobParameters:
timeoutInMinutes: 240
nameSuffix: NativeAOT_Pri0
- buildArgs: -s clr.aot+host.native+libs -rc $(_BuildConfig) -lc Release -hc Release /p:RunAnalyzers=false
+ buildArgs: -s clr.aot+host.native+libs -rc $(_BuildConfig) -lc Release -hc Release
postBuildSteps:
- template: /eng/pipelines/coreclr/nativeaot-post-build-steps.yml
parameters:
diff --git a/eng/pipelines/coreclr/templates/build-perf-bdn-app.yml b/eng/pipelines/coreclr/templates/build-perf-bdn-app.yml
index 02c96372885772..ddcea7b914715c 100644
--- a/eng/pipelines/coreclr/templates/build-perf-bdn-app.yml
+++ b/eng/pipelines/coreclr/templates/build-perf-bdn-app.yml
@@ -16,7 +16,7 @@ parameters:
archiveExtension: ''
archiveType: ''
tarCompression: ''
- framework: 'net9.0' # Framework version to get versions for and build for
+ framework: 'net8.0' # Framework version to get versions for and build for
perfRepo: 'main' # Perf repo to pull for the PerfLabExporter
@@ -61,13 +61,13 @@ steps:
echo '{ }' > ./global.json
curl -o NuGet.config 'https://raw.githubusercontent.com/dotnet/maui/${{parameters.framework}}/NuGet.config'
curl -o dotnet-install.sh 'https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.sh'
- curl -Lo maui-supported-sdk-version.json 'https://maui.blob.core.windows.net/metadata/sdks/${{parameters.framework}}.json'
+ curl -Lo maui-supported-sdk-version.json 'https://aka.ms/dotnet/sdk/maui/${{parameters.framework}}.json'
version=$(sed -nr 's/\s*"version": "(.*)"/\1/p' ./maui-supported-sdk-version.json)
chmod -R a+rx .
./dotnet-install.sh --version $version --install-dir .
./dotnet --info
- ./dotnet workload install maui --from-rollback-file https://maui.blob.core.windows.net/metadata/rollbacks/${{parameters.framework}}.json --configfile NuGet.config
- ./dotnet workload install android --from-rollback-file https://maui.blob.core.windows.net/metadata/rollbacks/${{parameters.framework}}.json --configfile NuGet.config
+ ./dotnet workload install maui --from-rollback-file https://aka.ms/dotnet/maui/${{parameters.framework}}.json --configfile NuGet.config
+ ./dotnet workload install android --from-rollback-file https://aka.ms/dotnet/maui/${{parameters.framework}}.json --configfile NuGet.config
displayName: Install MAUI workload
workingDirectory: $(Build.SourcesDirectory)
@@ -147,7 +147,7 @@ steps:
# Remove the embed assemblies from source
- script: |
- ../dotnet build ./src/Core/tests/Benchmarks.Droid/Benchmarks.Droid.csproj --configuration Release -bl:BenchmarksDroid.binlog /p:TF_Build=False /p:ForceNet8Current=true
+ ../dotnet build ./src/Core/tests/Benchmarks.Droid/Benchmarks.Droid.csproj --configuration Release -bl:BenchmarksDroid.binlog /p:TF_Build=False
mv ./src/Core/tests/Benchmarks.Droid/bin/Release/${{parameters.framework}}-android/android-arm64/com.microsoft.maui.benchmarks-Signed.apk ./MonoBenchmarksDroid.apk
displayName: Build BDN Android App
workingDirectory: $(Build.SourcesDirectory)/maui
diff --git a/eng/pipelines/coreclr/templates/helix-queues-setup.yml b/eng/pipelines/coreclr/templates/helix-queues-setup.yml
index 93391941114482..7b4ce6c6c7f431 100644
--- a/eng/pipelines/coreclr/templates/helix-queues-setup.yml
+++ b/eng/pipelines/coreclr/templates/helix-queues-setup.yml
@@ -86,9 +86,9 @@ jobs:
# Linux musl arm32
- ${{ if eq(parameters.platform, 'linux_musl_arm') }}:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- - (Alpine.316.Arm32.Open)Ubuntu.2004.ArmArch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.17-helix-arm32v7
+ - (Alpine.316.Arm32.Open)Ubuntu.2004.ArmArch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.16-helix-arm32v7
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- - (Alpine.316.Arm32)Ubuntu.2004.ArmArch@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.17-helix-arm32v7
+ - (Alpine.316.Arm32)Ubuntu.2004.ArmArch@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.16-helix-arm32v7
# Linux musl arm64
- ${{ if eq(parameters.platform, 'linux_musl_arm64') }}:
diff --git a/eng/pipelines/coreclr/templates/perf-job.yml b/eng/pipelines/coreclr/templates/perf-job.yml
index 450cd2799fa0ce..7adba086e45c11 100644
--- a/eng/pipelines/coreclr/templates/perf-job.yml
+++ b/eng/pipelines/coreclr/templates/perf-job.yml
@@ -90,7 +90,7 @@ jobs:
- ${{ format('build_{0}{1}_{2}_{3}_{4}', parameters.osGroup, parameters.osSubgroup, parameters.archType, parameters.buildConfig, parameters.codeGenType) }}
- ${{ if eq(parameters.runtimeType, 'AndroidMono')}}:
- ${{ 'build_android_arm64_release_AndroidMono' }}
- - ${{ 'Build_ios_arm64_release_PerfBDNApp' }}
+ # - ${{ 'Build_ios_arm64_release_PerfBDNApp' }} Disabled per: https://github.com/dotnet/performance/issues/3655
- ${{ if eq(parameters.runtimeType, 'iOSMono')}}:
- ${{ 'build_ios_arm64_release_iOSMono' }}
- ${{ if eq(parameters.runtimeType, 'iOSNativeAOT')}}:
@@ -228,13 +228,14 @@ jobs:
artifactFileName: 'AndroidMonoarm64.tar.gz'
artifactName: 'AndroidMonoarm64'
displayName: 'Mono Android HelloWorld'
- - template: /eng/pipelines/common/download-artifact-step.yml
- parameters:
- unpackFolder: $(Build.SourcesDirectory)
- cleanUnpackFolder: false
- artifactFileName: 'AndroidBDNApk.tar.gz'
- artifactName: 'AndroidBDNApk'
- displayName: 'Mono Android BDN Apk'
+ # Disabled per: https://github.com/dotnet/performance/issues/3655
+ # - template: /eng/pipelines/common/download-artifact-step.yml
+ # parameters:
+ # unpackFolder: $(Build.SourcesDirectory)
+ # cleanUnpackFolder: false
+ # artifactFileName: 'AndroidBDNApk.tar.gz'
+ # artifactName: 'AndroidBDNApk'
+ # displayName: 'Mono Android BDN Apk'
# Download iOSMono and Native AOT tests
- ${{ if or(eq(parameters.runtimeType, 'iOSMono'), eq(parameters.runtimeType, 'iOSNativeAOT')) }}:
diff --git a/eng/pipelines/extra-platforms/runtime-extra-platforms-wasm.yml b/eng/pipelines/extra-platforms/runtime-extra-platforms-wasm.yml
index fc8d757233cd45..31d15946c50da6 100644
--- a/eng/pipelines/extra-platforms/runtime-extra-platforms-wasm.yml
+++ b/eng/pipelines/extra-platforms/runtime-extra-platforms-wasm.yml
@@ -290,18 +290,17 @@ jobs:
# ff tests are unstable currently
shouldContinueOnError: true
- # Active Issue https://github.com/dotnet/runtime/issues/98771
- # - template: /eng/pipelines/common/templates/wasm-debugger-tests.yml
- # parameters:
- # platforms:
- # - Browser_wasm
- # - Browser_wasm_win
- # extraBuildArgs: /p:WasmEnableThreads=true /p:AotHostArchitecture=x64 /p:AotHostOS=$(_hostedOS)
- # nameSuffix: DebuggerTests_MultiThreaded
- # alwaysRun: ${{ parameters.isWasmOnlyBuild }}
- # isExtraPlatformsBuild: ${{ parameters.isExtraPlatformsBuild }}
- # isWasmOnlyBuild: ${{ parameters.isWasmOnlyBuild }}
- # runOnlyOnWasmOnlyPipelines: true
+ - template: /eng/pipelines/common/templates/wasm-debugger-tests.yml
+ parameters:
+ platforms:
+ - Browser_wasm
+ - Browser_wasm_win
+ extraBuildArgs: /p:WasmEnableThreads=true /p:AotHostArchitecture=x64 /p:AotHostOS=$(_hostedOS)
+ nameSuffix: DebuggerTests_MultiThreaded
+ alwaysRun: ${{ parameters.isWasmOnlyBuild }}
+ isExtraPlatformsBuild: ${{ parameters.isExtraPlatformsBuild }}
+ isWasmOnlyBuild: ${{ parameters.isWasmOnlyBuild }}
+ runOnlyOnWasmOnlyPipelines: true
# Disable for now
#- template: /eng/pipelines/coreclr/perf-wasm-jobs.yml
diff --git a/eng/pipelines/libraries/helix-queues-setup.yml b/eng/pipelines/libraries/helix-queues-setup.yml
index 6baf34455ee792..d6f52839e600ac 100644
--- a/eng/pipelines/libraries/helix-queues-setup.yml
+++ b/eng/pipelines/libraries/helix-queues-setup.yml
@@ -96,7 +96,7 @@ jobs:
# Android
- ${{ if in(parameters.platform, 'android_x86', 'android_x64', 'linux_bionic_x64') }}:
- Ubuntu.2204.Amd64.Android.29.Open
- - ${{ if in(parameters.platform, 'android_arm', 'android_arm64', 'linux_bionic_arm', 'linux_bionic_arm64') }}:
+ - ${{ if in(parameters.platform, 'android_arm', 'android_arm64', 'linux_bionic_arm64') }}:
- Windows.11.Amd64.Android.Open
# iOS Simulator/Mac Catalyst arm64
diff --git a/eng/pipelines/libraries/run-test-job.yml b/eng/pipelines/libraries/run-test-job.yml
index f6f452199d627b..5c68b4377ee143 100644
--- a/eng/pipelines/libraries/run-test-job.yml
+++ b/eng/pipelines/libraries/run-test-job.yml
@@ -183,7 +183,7 @@ jobs:
- jitstress1_tiered
- jitstress2
- jitstress2_tiered
- - disabler2r
+ - zapdisable
- tailcallstress
${{ if in(parameters.coreclrTestGroup, 'jitstress-random') }}:
scenarios:
@@ -220,9 +220,9 @@ jobs:
${{ if in(parameters.coreclrTestGroup, 'gcstress-extra') }}:
scenarios:
- heapverify1
- - gcstress0xc_disabler2r
- - gcstress0xc_disabler2r_jitstress2
- - gcstress0xc_disabler2r_heapverify1
+ - gcstress0xc_zapdisable
+ - gcstress0xc_zapdisable_jitstress2
+ - gcstress0xc_zapdisable_heapverify1
- gcstress0xc_jitstress1
- gcstress0xc_jitstress2
- gcstress0xc_jitminopts_heapverify1
@@ -242,7 +242,7 @@ jobs:
- jitosr_stress_random
- syntheticpgo
- syntheticpgo_blend
- - jitrlcse
+ - jitcrossblocklocalassertionprop
- ${{ if eq(parameters.SuperPmiCollect, true) }}:
- template: /eng/pipelines/libraries/superpmi-postprocess-step.yml
diff --git a/eng/pipelines/runtime-official.yml b/eng/pipelines/runtime-official.yml
index 0cc897455e2e06..ba4964eca0eb2c 100644
--- a/eng/pipelines/runtime-official.yml
+++ b/eng/pipelines/runtime-official.yml
@@ -316,7 +316,6 @@ extends:
- linux_musl_arm
- linux_musl_arm64
- linux_bionic_x64
- - linux_bionic_arm
- linux_bionic_arm64
- windows_x64
- windows_arm64
diff --git a/eng/pipelines/runtime.yml b/eng/pipelines/runtime.yml
index d7a1f1847eb11a..be2870611e617b 100644
--- a/eng/pipelines/runtime.yml
+++ b/eng/pipelines/runtime.yml
@@ -254,7 +254,7 @@ extends:
jobParameters:
timeoutInMinutes: 120
nameSuffix: NativeAOT
- buildArgs: -s clr.aot+host.native+libs -rc $(_BuildConfig) -lc Release -hc Release /p:RunAnalyzers=false
+ buildArgs: -s clr.aot+host.native+libs -rc $(_BuildConfig) -lc Release -hc Release
postBuildSteps:
- template: /eng/pipelines/coreclr/nativeaot-post-build-steps.yml
parameters:
@@ -293,7 +293,7 @@ extends:
jobParameters:
timeoutInMinutes: 180
nameSuffix: NativeAOT
- buildArgs: -s clr.aot+host.native+libs.native+libs.sfx -rc $(_BuildConfig) -lc Release -hc Release /p:RunAnalyzers=false
+ buildArgs: -s clr.aot+host.native+libs.native+libs.sfx -rc $(_BuildConfig) -lc Release -hc Release
postBuildSteps:
- template: /eng/pipelines/coreclr/nativeaot-post-build-steps.yml
parameters:
@@ -338,7 +338,7 @@ extends:
testGroup: innerloop
timeoutInMinutes: 120
nameSuffix: NativeAOT
- buildArgs: -s clr.aot+host.native+libs+tools.illink -c $(_BuildConfig) -rc $(_BuildConfig) -lc Release -hc Release /p:RunAnalyzers=false
+ buildArgs: -s clr.aot+host.native+libs+tools.illink -c $(_BuildConfig) -rc $(_BuildConfig) -lc Release -hc Release
postBuildSteps:
- template: /eng/pipelines/coreclr/nativeaot-post-build-steps.yml
parameters:
@@ -375,7 +375,7 @@ extends:
testGroup: innerloop
isSingleFile: true
nameSuffix: NativeAOT_Libraries
- buildArgs: -s clr.aot+host.native+libs+libs.tests -c $(_BuildConfig) /p:TestNativeAot=true /p:RunSmokeTestsOnly=true /p:ArchiveTests=true /p:RunAnalyzers=false
+ buildArgs: -s clr.aot+host.native+libs+libs.tests -c $(_BuildConfig) /p:TestNativeAot=true /p:RunSmokeTestsOnly=true /p:ArchiveTests=true
timeoutInMinutes: 240 # Doesn't actually take long, but we've seen the ARM64 Helix queue often get backlogged for 2+ hours
# extra steps, run tests
postBuildSteps:
diff --git a/eng/targetingpacks.targets b/eng/targetingpacks.targets
index 8a6a8b96a2f56c..befd249f231d2a 100644
--- a/eng/targetingpacks.targets
+++ b/eng/targetingpacks.targets
@@ -43,7 +43,7 @@
LatestRuntimeFrameworkVersion="$(ProductVersion)"
RuntimeFrameworkName="$(LocalFrameworkOverrideName)"
RuntimePackNamePatterns="$(LocalFrameworkOverrideName).Runtime.**RID**"
- RuntimePackRuntimeIdentifiers="linux-arm;linux-arm64;linux-musl-arm64;linux-musl-x64;linux-x64;osx-x64;rhel.6-x64;tizen.4.0.0-armel;tizen.5.0.0-armel;win-arm64;win-x64;win-x86;linux-musl-arm;osx-arm64;maccatalyst-x64;maccatalyst-arm64;linux-s390x;linux-bionic-arm;linux-bionic-arm64;linux-bionic-x64;linux-bionic-x86;freebsd-x64;freebsd-arm64;linux-ppc64le;linux-riscv64;linux-musl-riscv64"
+ RuntimePackRuntimeIdentifiers="linux-arm;linux-arm64;linux-musl-arm64;linux-musl-x64;linux-x64;osx-x64;rhel.6-x64;tizen.4.0.0-armel;tizen.5.0.0-armel;win-arm64;win-x64;win-x86;linux-musl-arm;osx-arm64;maccatalyst-x64;maccatalyst-arm64;linux-s390x;linux-bionic-arm;linux-bionic-arm64;linux-bionic-x64;linux-bionic-x86;freebsd-x64;freebsd-arm64;linux-ppc64le"
TargetFramework="$(NetCoreAppCurrent)"
TargetingPackName="$(LocalFrameworkOverrideName).Ref"
TargetingPackVersion="$(ProductVersion)"
@@ -53,7 +53,7 @@
RuntimeFrameworkName="$(LocalFrameworkOverrideName)"
LatestRuntimeFrameworkVersion="$(ProductVersion)"
RuntimePackNamePatterns="$(LocalFrameworkOverrideName).Runtime.Mono.**RID**"
- RuntimePackRuntimeIdentifiers="linux-arm;linux-arm64;linux-musl-arm64;linux-musl-x64;linux-x64;osx-x64;linux-riscv64;linux-musl-riscv64;rhel.6-x64;win-arm64;win-x64;win-x86;linux-musl-arm;osx-arm64;linux-s390x;linux-bionic-arm;linux-bionic-arm64;linux-bionic-x64;linux-bionic-x86;browser-wasm;ios-arm64;ios-arm;iossimulator-arm64;iossimulator-x64;iossimulator-x86;tvos-arm64;tvossimulator-arm64;tvossimulator-x64;maccatalyst-x64;maccatalyst-arm64;android-arm64;android-arm;android-x64;android-x86"
+ RuntimePackRuntimeIdentifiers="linux-arm;linux-arm64;linux-musl-arm64;linux-musl-x64;linux-x64;osx-x64;rhel.6-x64;win-arm64;win-x64;win-x86;linux-musl-arm;osx-arm64;linux-s390x;linux-bionic-arm;linux-bionic-arm64;linux-bionic-x64;linux-bionic-x86;browser-wasm;ios-arm64;ios-arm;iossimulator-arm64;iossimulator-x64;iossimulator-x86;tvos-arm64;tvossimulator-arm64;tvossimulator-x64;maccatalyst-x64;maccatalyst-arm64;android-arm64;android-arm;android-x64;android-x86"
RuntimePackLabels="Mono"
Condition="'$(UseLocalTargetingRuntimePack)' == 'true' and ('@(KnownRuntimePack)' == '' or @(KnownRuntimePack->WithMetadataValue('Identity', 'Microsoft.NETCore.App')->WithMetadataValue('RuntimePackLabels', 'Mono')->WithMetadataValue('TargetFramework', '$(NetCoreAppCurrent)')) == '')" />
@@ -78,7 +78,7 @@
TargetFramework="$(NetCoreAppCurrent)"
Crossgen2PackNamePattern="$(LocalFrameworkOverrideName).Crossgen2.**RID**"
Crossgen2PackVersion="$(ProductVersion)"
- Crossgen2RuntimeIdentifiers="linux-musl-x64;linux-x64;win-x64;linux-arm;linux-arm64;linux-musl-arm;linux-musl-arm64;osx-arm64;osx-x64;win-arm64;win-x86;linux-riscv64;linux-musl-riscv64"
+ Crossgen2RuntimeIdentifiers="linux-musl-x64;linux-x64;win-x64;linux-arm;linux-arm64;linux-musl-arm;linux-musl-arm64;osx-arm64;osx-x64;win-arm64;win-x86"
Condition="'$(UseLocalCrossgen2Pack)' == 'true' and '@(KnownCrossgen2Pack->AnyHaveMetadataValue('TargetFramework', '$(NetCoreAppCurrent)'))' != 'true'" />
- 122.0.6261.69
- 1250580
- https://storage.googleapis.com/chromium-browser-snapshots/Linux_x64/1250580
- 12.2.281
- 122.0.6261.69
- 1250580
- https://storage.googleapis.com/chromium-browser-snapshots/Win_x64/1250586
- 12.2.281
+ 121.0.6167.184
+ 1233107
+ https://storage.googleapis.com/chromium-browser-snapshots/Linux_x64/1233114
+ 12.1.285
+ 121.0.6167.185
+ 1233107
+ https://storage.googleapis.com/chromium-browser-snapshots/Win_x64/1233136
+ 12.1.285
\ No newline at end of file
diff --git a/eng/testing/WasmRunnerTemplate.cmd b/eng/testing/WasmRunnerTemplate.cmd
index f92cee17cc9df7..83aeb53cad03a6 100644
--- a/eng/testing/WasmRunnerTemplate.cmd
+++ b/eng/testing/WasmRunnerTemplate.cmd
@@ -59,9 +59,6 @@ if /I [%XHARNESS_COMMAND%] == [test] (
if [%BROWSER_PATH%] == [] if not [%HELIX_CORRELATION_PAYLOAD%] == [] (
set "BROWSER_PATH=--browser-path^=%HELIX_CORRELATION_PAYLOAD%\chrome-win\chrome.exe"
)
- if [%JS_ENGINE_ARGS%] == [] (
- set "JS_ENGINE_ARGS=--browser-arg^=--js-flags^=--stack-trace-limit^=1000"
- )
)
if [%XHARNESS_ARGS%] == [] (
diff --git a/eng/testing/WasmRunnerTemplate.sh b/eng/testing/WasmRunnerTemplate.sh
index 4f5856546fc56b..71347666cde802 100644
--- a/eng/testing/WasmRunnerTemplate.sh
+++ b/eng/testing/WasmRunnerTemplate.sh
@@ -58,10 +58,6 @@ if [[ "$XHARNESS_COMMAND" == "test" ]]; then
fi
fi
fi
-else
- if [[ -z "$JS_ENGINE_ARGS" ]]; then
- JS_ENGINE_ARGS="--browser-arg=--js-flags=--stack-trace-limit=1000"
- fi
fi
if [[ -z "$XHARNESS_ARGS" ]]; then
diff --git a/eng/testing/linker/project.csproj.template b/eng/testing/linker/project.csproj.template
index d31c8df9c9271e..41466e8d4492af 100644
--- a/eng/testing/linker/project.csproj.template
+++ b/eng/testing/linker/project.csproj.template
@@ -75,12 +75,6 @@
{AdditionalProjectReferences}
-
-
- <_BoolPropertiesThatTriggerRelinking Remove="InvariantGlobalization" />
-
-
-
diff --git a/eng/testing/outerBuild.targets b/eng/testing/outerBuild.targets
index 6465a272bca8b0..c071944c21d93d 100644
--- a/eng/testing/outerBuild.targets
+++ b/eng/testing/outerBuild.targets
@@ -1,19 +1,12 @@
-
+
-
-
+
-
-
-
- false
-
-
\ No newline at end of file
diff --git a/eng/testing/performance/android_scenarios.proj b/eng/testing/performance/android_scenarios.proj
index 4d0aad300cd99d..c2f3e7b1955e26 100644
--- a/eng/testing/performance/android_scenarios.proj
+++ b/eng/testing/performance/android_scenarios.proj
@@ -35,24 +35,24 @@
$(Python) test.py sod --scenario-name "%(Identity)"
$(Python) post.py
-
+
+
diff --git a/eng/testing/performance/performance-setup.ps1 b/eng/testing/performance/performance-setup.ps1
index 08225ae75004d2..f7d321930627a0 100644
--- a/eng/testing/performance/performance-setup.ps1
+++ b/eng/testing/performance/performance-setup.ps1
@@ -140,7 +140,7 @@ if ($NoR2R) {
}
if ($ExperimentName) {
- $SetupArguments = "$SetupArguments --experiment-name $ExperimentName"
+ $SetupArguments = "$SetupArguments --experiment-name '$ExperimentName'"
}
if ($UseLocalCommitTime) {
diff --git a/eng/testing/performance/performance-setup.sh b/eng/testing/performance/performance-setup.sh
index 0a735a796a6e0a..6eeb7223ffb430 100755
--- a/eng/testing/performance/performance-setup.sh
+++ b/eng/testing/performance/performance-setup.sh
@@ -492,7 +492,7 @@ if [[ "$nor2r" == "true" ]]; then
fi
if [[ ! -z "$experimentname" ]]; then
- setup_arguments="$setup_arguments --experiment-name $experimentname"
+ setup_arguments="$setup_arguments --experiment-name '$experimentname'"
fi
if [[ "$monoaot" == "true" ]]; then
diff --git a/eng/testing/scenarios/BuildWasmAppsJobsList.txt b/eng/testing/scenarios/BuildWasmAppsJobsList.txt
index 5ccb34b25e18ac..5519c03e6ef7db 100644
--- a/eng/testing/scenarios/BuildWasmAppsJobsList.txt
+++ b/eng/testing/scenarios/BuildWasmAppsJobsList.txt
@@ -36,13 +36,12 @@ Wasm.Build.Tests.TestAppScenarios.AppSettingsTests
Wasm.Build.Tests.TestAppScenarios.LazyLoadingTests
Wasm.Build.Tests.TestAppScenarios.LibraryInitializerTests
Wasm.Build.Tests.TestAppScenarios.SatelliteLoadingTests
-Wasm.Build.Tests.TestAppScenarios.DownloadResourceProgressTests
-Wasm.Build.Tests.TestAppScenarios.SignalRClientTests
Wasm.Build.Tests.WasmBuildAppTest
Wasm.Build.Tests.WasmNativeDefaultsTests
Wasm.Build.Tests.WasmRunOutOfAppBundleTests
Wasm.Build.Tests.WasmSIMDTests
Wasm.Build.Tests.WasmTemplateTests
Wasm.Build.Tests.WorkloadTests
+Wasm.Build.Tests.TestAppScenarios.DownloadResourceProgressTests
Wasm.Build.Tests.MT.Blazor.SimpleMultiThreadedTests
Wasm.Build.Tests.TestAppScenarios.DebugLevelTests
diff --git a/eng/testing/tests.browser.targets b/eng/testing/tests.browser.targets
index bce044984e9379..d27fa412d490b1 100644
--- a/eng/testing/tests.browser.targets
+++ b/eng/testing/tests.browser.targets
@@ -87,8 +87,8 @@
<_AppArgs Condition="'$(IsFunctionalTest)' != 'true' and '$(WasmMainAssemblyFileName)' != ''">--run $(WasmMainAssemblyFileName)
<_AppArgs Condition="'$(IsFunctionalTest)' == 'true'">--run $(AssemblyName).dll
+ <_XUnitBackgroundExec Condition="'$(_XUnitBackgroundExec)' == '' and '$(WasmEnableThreads)' == 'true'">true
$(WasmTestAppArgs) -backgroundExec
- $(WasmXHarnessMonoArgs) --setenv=IsWasmBackgroundExec=true
<_AppArgs Condition="'$(WasmTestAppArgs)' != ''">$(_AppArgs) $(WasmTestAppArgs)
$(WasmXHarnessMonoArgs) --setenv=XHARNESS_LOG_TEST_START=true
diff --git a/global.json b/global.json
index cd39f3cc389a3c..dc5aff95f8db27 100644
--- a/global.json
+++ b/global.json
@@ -1,18 +1,18 @@
{
"sdk": {
- "version": "9.0.100-preview.1.24101.2",
+ "version": "9.0.100-alpha.1.23615.4",
"allowPrerelease": true,
"rollForward": "major"
},
"tools": {
- "dotnet": "9.0.100-preview.1.24101.2"
+ "dotnet": "9.0.100-alpha.1.23615.4"
},
"msbuild-sdks": {
- "Microsoft.DotNet.Arcade.Sdk": "9.0.0-beta.24161.5",
- "Microsoft.DotNet.Helix.Sdk": "9.0.0-beta.24161.5",
- "Microsoft.DotNet.SharedFramework.Sdk": "9.0.0-beta.24161.5",
+ "Microsoft.DotNet.Arcade.Sdk": "9.0.0-beta.24112.1",
+ "Microsoft.DotNet.Helix.Sdk": "9.0.0-beta.24112.1",
+ "Microsoft.DotNet.SharedFramework.Sdk": "9.0.0-beta.24112.1",
"Microsoft.Build.NoTargets": "3.7.0",
"Microsoft.Build.Traversal": "3.4.0",
- "Microsoft.NET.Sdk.IL": "9.0.0-preview.3.24161.1"
+ "Microsoft.NET.Sdk.IL": "9.0.0-preview.2.24115.1"
}
}
diff --git a/src/coreclr/CMakeLists.txt b/src/coreclr/CMakeLists.txt
index 4aa45914ff54b0..1c314d9bf624e0 100644
--- a/src/coreclr/CMakeLists.txt
+++ b/src/coreclr/CMakeLists.txt
@@ -147,7 +147,7 @@ add_subdirectory(tools/aot/jitinterface)
if(NOT CLR_CROSS_COMPONENTS_BUILD)
# NativeAOT only buildable for a subset of CoreCLR-supported configurations
- if(CLR_CMAKE_HOST_ARCH_ARM64 OR CLR_CMAKE_HOST_ARCH_AMD64 OR CLR_CMAKE_HOST_ARCH_ARM OR (CLR_CMAKE_HOST_ARCH_I386 AND CLR_CMAKE_HOST_WIN32))
+ if(CLR_CMAKE_HOST_ARCH_ARM64 OR CLR_CMAKE_HOST_ARCH_AMD64 OR CLR_CMAKE_HOST_ARCH_ARM)
add_subdirectory(nativeaot)
endif()
endif(NOT CLR_CROSS_COMPONENTS_BUILD)
diff --git a/src/coreclr/System.Private.CoreLib/System.Private.CoreLib.csproj b/src/coreclr/System.Private.CoreLib/System.Private.CoreLib.csproj
index 9ef1024c449da8..6b3ddff0cc868c 100644
--- a/src/coreclr/System.Private.CoreLib/System.Private.CoreLib.csproj
+++ b/src/coreclr/System.Private.CoreLib/System.Private.CoreLib.csproj
@@ -1,4 +1,4 @@
-
+
false
@@ -46,7 +46,8 @@
$(ProductVersion)
$(ProductVersion)
- $(NoWarn),0419,0649
+
+ $(NoWarn),0419,0649;AD0001
enable
@@ -136,6 +137,8 @@
+
+
@@ -200,17 +203,18 @@
+
-
-
-
-
-
-
+
+
+
+
+
+
@@ -222,10 +226,12 @@
+
+
diff --git a/src/coreclr/System.Private.CoreLib/src/ILLink/ILLink.Substitutions.xml b/src/coreclr/System.Private.CoreLib/src/ILLink/ILLink.Substitutions.xml
index d7fd368adac7c2..fb07d67f37c68e 100644
--- a/src/coreclr/System.Private.CoreLib/src/ILLink/ILLink.Substitutions.xml
+++ b/src/coreclr/System.Private.CoreLib/src/ILLink/ILLink.Substitutions.xml
@@ -2,6 +2,11 @@
+
+
+
+
+
diff --git a/src/coreclr/System.Private.CoreLib/src/Internal/Runtime/InteropServices/ComActivator.cs b/src/coreclr/System.Private.CoreLib/src/Internal/Runtime/InteropServices/ComActivator.cs
index 77f64abd1b42c0..db8d4ead4659b9 100644
--- a/src/coreclr/System.Private.CoreLib/src/Internal/Runtime/InteropServices/ComActivator.cs
+++ b/src/coreclr/System.Private.CoreLib/src/Internal/Runtime/InteropServices/ComActivator.cs
@@ -203,6 +203,7 @@ private static void ClassRegistrationScenarioForType(ComActivationContext cxt, b
// Finally validate signature
ReadOnlySpan methParams = method.GetParametersAsSpan();
if (method.ReturnType != typeof(void)
+ || methParams == null
|| methParams.Length != 1
|| (methParams[0].ParameterType != typeof(string) && methParams[0].ParameterType != typeof(Type)))
{
diff --git a/src/coreclr/System.Private.CoreLib/src/System/ArgIterator.cs b/src/coreclr/System.Private.CoreLib/src/System/ArgIterator.cs
index d49d1dcb270b56..e7c2a99eeefa28 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/ArgIterator.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/ArgIterator.cs
@@ -24,7 +24,7 @@ private struct SigPointer
private int _remainingArgs; // # of remaining args.
#if TARGET_WINDOWS // Native Varargs are not supported on Unix
- // ArgIterator is a ref struct. It does not require pinning, therefore Unsafe.AsPointer is safe.
+ // ArgIterator is a ref struct. It does not require pinning.
// This method null checks the this pointer as a side-effect.
private ArgIterator* ThisPtr => (ArgIterator*)Unsafe.AsPointer(ref _argCookie);
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Array.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/Array.CoreCLR.cs
index de7b3021c458fe..16d9067567ee58 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Array.CoreCLR.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Array.CoreCLR.cs
@@ -74,7 +74,7 @@ private static unsafe void CopyImpl(Array sourceArray, int sourceIndex, Array de
if (pMT->ContainsGCPointers)
Buffer.BulkMoveWithWriteBarrier(ref dst, ref src, byteCount);
else
- SpanHelpers.Memmove(ref dst, ref src, byteCount);
+ Buffer.Memmove(ref dst, ref src, byteCount);
// GC.KeepAlive(sourceArray) not required. pMT kept alive via sourceArray
return;
@@ -184,7 +184,7 @@ private static unsafe void CopyImplUnBoxEachElement(Array sourceArray, int sourc
}
else
{
- SpanHelpers.Memmove(ref dest, ref obj.GetRawData(), destSize);
+ Buffer.Memmove(ref dest, ref obj.GetRawData(), destSize);
}
}
}
diff --git a/src/libraries/System.Private.CoreLib/src/System/Diagnostics/Tracing/EventPipe.Internal.cs b/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/Eventing/EventPipe.CoreCLR.cs
similarity index 98%
rename from src/libraries/System.Private.CoreLib/src/System/Diagnostics/Tracing/EventPipe.Internal.cs
rename to src/coreclr/System.Private.CoreLib/src/System/Diagnostics/Eventing/EventPipe.CoreCLR.cs
index 6039bcfaa4f7a0..db76c99413e6bb 100644
--- a/src/libraries/System.Private.CoreLib/src/System/Diagnostics/Tracing/EventPipe.Internal.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/Eventing/EventPipe.CoreCLR.cs
@@ -5,6 +5,8 @@
using System.Runtime.InteropServices;
using System.Threading;
+#if FEATURE_PERFTRACING
+
namespace System.Diagnostics.Tracing
{
internal static partial class EventPipeInternal
@@ -66,3 +68,5 @@ internal static unsafe partial IntPtr CreateProvider(string providerName,
internal static unsafe partial bool WaitForSessionSignal(ulong sessionID, int timeoutMs);
}
}
+
+#endif // FEATURE_PERFTRACING
diff --git a/src/libraries/System.Private.CoreLib/src/System/Diagnostics/Tracing/NativeRuntimeEventSource.Threading.NativeSinks.Internal.cs b/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/Eventing/NativeRuntimeEventSource.Threading.NativeSinks.CoreCLR.cs
similarity index 57%
rename from src/libraries/System.Private.CoreLib/src/System/Diagnostics/Tracing/NativeRuntimeEventSource.Threading.NativeSinks.Internal.cs
rename to src/coreclr/System.Private.CoreLib/src/System/Diagnostics/Eventing/NativeRuntimeEventSource.Threading.NativeSinks.CoreCLR.cs
index 7e9368dd3e929d..95942b6291c36a 100644
--- a/src/libraries/System.Private.CoreLib/src/System/Diagnostics/Tracing/NativeRuntimeEventSource.Threading.NativeSinks.Internal.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Diagnostics/Eventing/NativeRuntimeEventSource.Threading.NativeSinks.CoreCLR.cs
@@ -10,23 +10,12 @@ namespace System.Diagnostics.Tracing
// It contains the runtime specific interop to native event sinks.
internal sealed partial class NativeRuntimeEventSource : EventSource
{
-#if NATIVEAOT
- // We don't have these keywords defined from the genRuntimeEventSources.py, so we need to manually define them here.
- public static partial class Keywords
- {
- public const EventKeywords ContentionKeyword = (EventKeywords)0x4000;
- public const EventKeywords ThreadingKeyword = (EventKeywords)0x10000;
- public const EventKeywords ThreadTransferKeyword = (EventKeywords)0x80000000;
- public const EventKeywords WaitHandleKeyword = (EventKeywords)0x40000000000;
- }
-#endif
-
- [NonEvent]
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "NativeRuntimeEventSource_LogContentionLockCreated")]
+ [NonEvent]
+ [LibraryImport(RuntimeHelpers.QCall)]
private static partial void LogContentionLockCreated(nint LockID, nint AssociatedObjectID, ushort ClrInstanceID);
[NonEvent]
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "NativeRuntimeEventSource_LogContentionStart")]
+ [LibraryImport(RuntimeHelpers.QCall)]
private static partial void LogContentionStart(
ContentionFlagsMap ContentionFlags,
ushort ClrInstanceID,
@@ -35,38 +24,38 @@ private static partial void LogContentionStart(
ulong LockOwnerThreadID);
[NonEvent]
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "NativeRuntimeEventSource_LogContentionStop")]
+ [LibraryImport(RuntimeHelpers.QCall)]
private static partial void LogContentionStop(
ContentionFlagsMap ContentionFlags,
ushort ClrInstanceID,
double DurationNs);
[NonEvent]
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "NativeRuntimeEventSource_LogThreadPoolWorkerThreadStart")]
+ [LibraryImport(RuntimeHelpers.QCall)]
private static partial void LogThreadPoolWorkerThreadStart(uint ActiveWorkerThreadCount, uint RetiredWorkerThreadCount, ushort ClrInstanceID);
[NonEvent]
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "NativeRuntimeEventSource_LogThreadPoolWorkerThreadStop")]
+ [LibraryImport(RuntimeHelpers.QCall)]
private static partial void LogThreadPoolWorkerThreadStop(uint ActiveWorkerThreadCount, uint RetiredWorkerThreadCount, ushort ClrInstanceID);
[NonEvent]
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "NativeRuntimeEventSource_LogThreadPoolWorkerThreadWait")]
+ [LibraryImport(RuntimeHelpers.QCall)]
private static partial void LogThreadPoolWorkerThreadWait(uint ActiveWorkerThreadCount, uint RetiredWorkerThreadCount, ushort ClrInstanceID);
[NonEvent]
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "NativeRuntimeEventSource_LogThreadPoolMinMaxThreads")]
+ [LibraryImport(RuntimeHelpers.QCall)]
private static partial void LogThreadPoolMinMaxThreads(ushort MinWorkerThreads, ushort MaxWorkerThreads, ushort MinIOCompletionThreads, ushort MaxIOCompletionThreads, ushort ClrInstanceID);
[NonEvent]
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "NativeRuntimeEventSource_LogThreadPoolWorkerThreadAdjustmentSample")]
+ [LibraryImport(RuntimeHelpers.QCall)]
private static partial void LogThreadPoolWorkerThreadAdjustmentSample(double Throughput, ushort ClrInstanceID);
[NonEvent]
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "NativeRuntimeEventSource_LogThreadPoolWorkerThreadAdjustmentAdjustment")]
+ [LibraryImport(RuntimeHelpers.QCall)]
private static partial void LogThreadPoolWorkerThreadAdjustmentAdjustment(double AverageThroughput, uint NewWorkerThreadCount, ThreadAdjustmentReasonMap Reason, ushort ClrInstanceID);
[NonEvent]
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "NativeRuntimeEventSource_LogThreadPoolWorkerThreadAdjustmentStats")]
+ [LibraryImport(RuntimeHelpers.QCall)]
private static partial void LogThreadPoolWorkerThreadAdjustmentStats(
double Duration,
double Throughput,
@@ -81,7 +70,7 @@ private static partial void LogThreadPoolWorkerThreadAdjustmentStats(
ushort ClrInstanceID);
[NonEvent]
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "NativeRuntimeEventSource_LogThreadPoolIOEnqueue")]
+ [LibraryImport(RuntimeHelpers.QCall)]
private static partial void LogThreadPoolIOEnqueue(
IntPtr NativeOverlapped,
IntPtr Overlapped,
@@ -89,34 +78,37 @@ private static partial void LogThreadPoolIOEnqueue(
ushort ClrInstanceID);
[NonEvent]
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "NativeRuntimeEventSource_LogThreadPoolIODequeue")]
+ [LibraryImport(RuntimeHelpers.QCall)]
private static partial void LogThreadPoolIODequeue(
IntPtr NativeOverlapped,
IntPtr Overlapped,
ushort ClrInstanceID);
[NonEvent]
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "NativeRuntimeEventSource_LogThreadPoolWorkingThreadCount")]
+ [LibraryImport(RuntimeHelpers.QCall)]
private static partial void LogThreadPoolWorkingThreadCount(
uint Count,
- ushort ClrInstanceID);
+ ushort ClrInstanceID
+ );
[NonEvent]
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "NativeRuntimeEventSource_LogThreadPoolIOPack")]
+ [LibraryImport(RuntimeHelpers.QCall)]
private static partial void LogThreadPoolIOPack(
IntPtr NativeOverlapped,
IntPtr Overlapped,
ushort ClrInstanceID);
+#pragma warning disable IDE0060 // Remove unused parameter
[NonEvent]
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "NativeRuntimeEventSource_LogWaitHandleWaitStart")]
- private static partial void LogWaitHandleWaitStart(
+ private static void LogWaitHandleWaitStart(
WaitHandleWaitSourceMap WaitSource,
IntPtr AssociatedObjectID,
- ushort ClrInstanceID);
+ ushort ClrInstanceID) =>
+ Debug.Fail("This event is currently not expected to be raised by managed code in CoreCLR.");
[NonEvent]
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "NativeRuntimeEventSource_LogWaitHandleWaitStop")]
- private static partial void LogWaitHandleWaitStop(ushort ClrInstanceID);
+ private static void LogWaitHandleWaitStop(ushort ClrInstanceID) =>
+ Debug.Fail("This event is currently not expected to be raised by managed code in CoreCLR.");
+#pragma warning restore IDE0060
}
}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Environment.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/Environment.CoreCLR.cs
index e0a24a42ef3223..8bbd6e98ddaabb 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Environment.CoreCLR.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Environment.CoreCLR.cs
@@ -5,7 +5,6 @@
using System.Diagnostics.CodeAnalysis;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
-using System.Security;
using System.Threading;
namespace System
@@ -34,15 +33,12 @@ public static extern int ExitCode
set;
}
+ // Note: The CLR's Watson bucketization code looks at the caller of the FCALL method
+ // to assign blame for crashes. Don't mess with this, such as by making it call
+ // another managed helper method, unless you consult with some CLR Watson experts.
[DoesNotReturn]
- [DynamicSecurityMethod] // Methods containing StackCrawlMark local var has to be marked DynamicSecurityMethod
- public static void FailFast(string? message)
- {
- // Note: The CLR's Watson bucketization code looks at the our caller
- // to assign blame for crashes.
- StackCrawlMark mark = StackCrawlMark.LookForMyCaller;
- FailFast(ref mark, message, exception: null, errorMessage: null);
- }
+ [MethodImpl(MethodImplOptions.InternalCall)]
+ public static extern void FailFast(string? message);
// This overload of FailFast will allow you to specify the exception object
// whose bucket details *could* be used when undergoing the failfast process.
@@ -58,34 +54,12 @@ public static void FailFast(string? message)
// IP for bucketing. If the exception object is not preallocated, it will use the bucket
// details contained in the object (if any).
[DoesNotReturn]
- [DynamicSecurityMethod] // Methods containing StackCrawlMark local var has to be marked DynamicSecurityMethod
- public static void FailFast(string? message, Exception? exception)
- {
- // Note: The CLR's Watson bucketization code looks at the our caller
- // to assign blame for crashes.
- StackCrawlMark mark = StackCrawlMark.LookForMyCaller;
- FailFast(ref mark, message, exception, errorMessage: null);
- }
-
- [DoesNotReturn]
- [DynamicSecurityMethod] // Methods containing StackCrawlMark local var has to be marked DynamicSecurityMethod
- internal static void FailFast(string? message, Exception? exception, string? errorMessage)
- {
- // Note: The CLR's Watson bucketization code looks at the our caller
- // to assign blame for crashes.
- StackCrawlMark mark = StackCrawlMark.LookForMyCaller;
- FailFast(ref mark, message, exception, errorMessage);
- }
-
- [DoesNotReturn]
- private static void FailFast(ref StackCrawlMark mark, string? message, Exception? exception, string? errorMessage)
- {
- FailFast(new StackCrawlMarkHandle(ref mark), message, ObjectHandleOnStack.Create(ref exception), errorMessage);
- }
+ [MethodImpl(MethodImplOptions.InternalCall)]
+ public static extern void FailFast(string? message, Exception? exception);
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "Environment_FailFast", StringMarshalling = StringMarshalling.Utf16)]
[DoesNotReturn]
- private static partial void FailFast(StackCrawlMarkHandle mark, string? message, ObjectHandleOnStack exception, string? errorMessage);
+ [MethodImpl(MethodImplOptions.InternalCall)]
+ internal static extern void FailFast(string? message, Exception? exception, string? errorMessage);
private static unsafe string[] InitializeCommandLineArgs(char* exePath, int argc, char** argv) // invoked from VM
{
diff --git a/src/coreclr/System.Private.CoreLib/src/System/GC.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/GC.CoreCLR.cs
index 697788316ba031..590fd5b18cee09 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/GC.CoreCLR.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/GC.CoreCLR.cs
@@ -865,9 +865,7 @@ public static unsafe IReadOnlyDictionary GetConfigurationVariabl
Configurations = new Dictionary()
};
-#pragma warning disable CS8500 // takes address of managed type
- _EnumerateConfigurationValues(&context, &ConfigCallback);
-#pragma warning restore CS8500
+ _EnumerateConfigurationValues(Unsafe.AsPointer(ref context), &ConfigCallback);
return context.Configurations!;
}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Object.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/Object.CoreCLR.cs
index 940d1622bad188..70cff629fc28e6 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Object.CoreCLR.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Object.CoreCLR.cs
@@ -1,7 +1,6 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
-using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace System
@@ -20,9 +19,7 @@ public partial class Object
[Intrinsic]
protected internal unsafe object MemberwiseClone()
{
- object clone = this;
- RuntimeHelpers.AllocateUninitializedClone(ObjectHandleOnStack.Create(ref clone));
- Debug.Assert(clone != this);
+ object clone = RuntimeHelpers.AllocateUninitializedClone(this);
// copy contents of "this" to the clone
@@ -33,7 +30,7 @@ protected internal unsafe object MemberwiseClone()
if (RuntimeHelpers.GetMethodTable(clone)->ContainsGCPointers)
Buffer.BulkMoveWithWriteBarrier(ref dst, ref src, byteCount);
else
- SpanHelpers.Memmove(ref dst, ref src, byteCount);
+ Buffer.Memmove(ref dst, ref src, byteCount);
return clone;
}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Reflection/Emit/DynamicILGenerator.cs b/src/coreclr/System.Private.CoreLib/src/System/Reflection/Emit/DynamicILGenerator.cs
index 327113c63f9a3c..2b695f1baf5b0f 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Reflection/Emit/DynamicILGenerator.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Reflection/Emit/DynamicILGenerator.cs
@@ -417,7 +417,7 @@ private int GetMemberRefToken(MethodInfo methodInfo, Type[]? optionalParameterTy
throw new ArgumentException(SR.Argument_MustBeRuntimeMethodInfo, nameof(methodInfo));
ReadOnlySpan paramInfo = methodInfo.GetParametersAsSpan();
- if (paramInfo.Length != 0)
+ if (paramInfo != null && paramInfo.Length != 0)
{
parameterTypes = new Type[paramInfo.Length];
requiredCustomModifiers = new Type[parameterTypes.Length][];
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeAssembly.cs b/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeAssembly.cs
index 53f2690948df45..b5cff2f1e42ecd 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeAssembly.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Reflection/RuntimeAssembly.cs
@@ -645,29 +645,27 @@ public override Assembly GetSatelliteAssembly(CultureInfo culture, Version? vers
{
ArgumentNullException.ThrowIfNull(culture);
- return InternalGetSatelliteAssembly(this, culture, version, throwOnFileNotFound: true)!;
+ return InternalGetSatelliteAssembly(culture, version, throwOnFileNotFound: true)!;
}
[DynamicSecurityMethod] // Methods containing StackCrawlMark local var has to be marked DynamicSecurityMethod
- internal static Assembly? InternalGetSatelliteAssembly(Assembly assembly,
- CultureInfo culture,
+ internal Assembly? InternalGetSatelliteAssembly(CultureInfo culture,
Version? version,
bool throwOnFileNotFound)
{
var an = new AssemblyName();
- RuntimeAssembly runtimeAssembly = (RuntimeAssembly)assembly;
- an.SetPublicKey(runtimeAssembly.GetPublicKey());
- an.Flags = runtimeAssembly.GetFlags() | AssemblyNameFlags.PublicKey;
- an.Version = version ?? runtimeAssembly.GetVersion();
+ an.SetPublicKey(GetPublicKey());
+ an.Flags = GetFlags() | AssemblyNameFlags.PublicKey;
+ an.Version = version ?? GetVersion();
an.CultureInfo = culture;
- an.Name = runtimeAssembly.GetSimpleName() + ".resources";
+ an.Name = GetSimpleName() + ".resources";
// This stack crawl mark is never used because the requesting assembly is explicitly specified,
// so the value could be anything.
StackCrawlMark unused = default;
- RuntimeAssembly? retAssembly = InternalLoad(an, ref unused, requestingAssembly: runtimeAssembly, throwOnFileNotFound: throwOnFileNotFound);
+ RuntimeAssembly? retAssembly = InternalLoad(an, ref unused, requestingAssembly: this, throwOnFileNotFound: throwOnFileNotFound);
- if (retAssembly == runtimeAssembly)
+ if (retAssembly == this)
{
retAssembly = null;
}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Resources/ManifestBasedResourceGroveler.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/Resources/ManifestBasedResourceGroveler.CoreCLR.cs
new file mode 100644
index 00000000000000..05805072cd7cf3
--- /dev/null
+++ b/src/coreclr/System.Private.CoreLib/src/System/Resources/ManifestBasedResourceGroveler.CoreCLR.cs
@@ -0,0 +1,19 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Globalization;
+using System.Reflection;
+
+namespace System.Resources
+{
+ internal sealed partial class ManifestBasedResourceGroveler
+ {
+ // Internal version of GetSatelliteAssembly that avoids throwing FileNotFoundException
+ private static Assembly? InternalGetSatelliteAssembly(Assembly mainAssembly,
+ CultureInfo culture,
+ Version? version)
+ {
+ return ((RuntimeAssembly)mainAssembly).InternalGetSatelliteAssembly(culture, version, throwOnFileNotFound: false);
+ }
+ }
+}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Runtime/CompilerServices/RuntimeHelpers.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/Runtime/CompilerServices/RuntimeHelpers.CoreCLR.cs
index 733e3a664bcc52..4e75d7db895ce4 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Runtime/CompilerServices/RuntimeHelpers.CoreCLR.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Runtime/CompilerServices/RuntimeHelpers.CoreCLR.cs
@@ -139,32 +139,8 @@ public static unsafe void PrepareMethod(RuntimeMethodHandle method, RuntimeTypeH
[MethodImpl(MethodImplOptions.InternalCall)]
internal static extern int TryGetHashCode(object o);
- public static new unsafe bool Equals(object? o1, object? o2)
- {
- // Compare by ref for normal classes, by value for value types.
-
- if (ReferenceEquals(o1, o2))
- return true;
-
- if (o1 is null || o2 is null)
- return false;
-
- MethodTable* pMT = GetMethodTable(o1);
-
- // If it's not a value class, don't compare by value
- if (!pMT->IsValueType)
- return false;
-
- // Make sure they are the same type.
- if (pMT != GetMethodTable(o2))
- return false;
-
- // Compare the contents
- return ContentEquals(o1, o2);
- }
-
[MethodImpl(MethodImplOptions.InternalCall)]
- private static extern unsafe bool ContentEquals(object o1, object o2);
+ public static extern new bool Equals(object? o1, object? o2);
[Obsolete("OffsetToStringData has been deprecated. Use string.GetPinnableReference() instead.")]
public static int OffsetToStringData
@@ -218,8 +194,8 @@ public static object GetUninitializedObject(
return rt.GetUninitializedObject();
}
- [LibraryImport(QCall, EntryPoint = "ObjectNative_AllocateUninitializedClone")]
- internal static partial void AllocateUninitializedClone(ObjectHandleOnStack objHandle);
+ [MethodImpl(MethodImplOptions.InternalCall)]
+ internal static extern object AllocateUninitializedClone(object obj);
/// true if given type is reference type or value type that contains references
[Intrinsic]
@@ -486,13 +462,7 @@ internal unsafe struct MethodTable
// Additional conditional fields (see methodtable.h).
// m_pModule
-
- ///
- /// A pointer to auxiliary data that is cold for method table.
- ///
- [FieldOffset(AuxiliaryDataOffset)]
- public MethodTableAuxiliaryData* AuxiliaryData;
-
+ // m_pAuxiliaryData
// union {
// m_pEEClass (pointer to the EE class)
// m_pCanonMT (pointer to the canonical method table)
@@ -553,12 +523,6 @@ internal unsafe struct MethodTable
private const int ParentMethodTableOffset = 0x10 + DebugClassNamePtr;
-#if TARGET_64BIT
- private const int AuxiliaryDataOffset = 0x20 + DebugClassNamePtr;
-#else
- private const int AuxiliaryDataOffset = 0x18 + DebugClassNamePtr;
-#endif
-
#if TARGET_64BIT
private const int ElementTypeOffset = 0x30 + DebugClassNamePtr;
#else
@@ -646,28 +610,6 @@ public TypeHandle GetArrayElementTypeHandle()
public extern uint GetNumInstanceFieldBytes();
}
- // Subset of src\vm\methodtable.h
- [StructLayout(LayoutKind.Explicit)]
- internal unsafe struct MethodTableAuxiliaryData
- {
- [FieldOffset(0)]
- private uint Flags;
-
- private const uint enum_flag_CanCompareBitsOrUseFastGetHashCode = 0x0001; // Is any field type or sub field type overrode Equals or GetHashCode
- private const uint enum_flag_HasCheckedCanCompareBitsOrUseFastGetHashCode = 0x0002; // Whether we have checked the overridden Equals or GetHashCode
-
- public bool HasCheckedCanCompareBitsOrUseFastGetHashCode => (Flags & enum_flag_HasCheckedCanCompareBitsOrUseFastGetHashCode) != 0;
-
- public bool CanCompareBitsOrUseFastGetHashCode
- {
- get
- {
- Debug.Assert(HasCheckedCanCompareBitsOrUseFastGetHashCode);
- return (Flags & enum_flag_CanCompareBitsOrUseFastGetHashCode) != 0;
- }
- }
- }
-
///
/// A type handle, which can wrap either a pointer to a TypeDesc or to a .
///
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Runtime/ExceptionServices/AsmOffsets.cs b/src/coreclr/System.Private.CoreLib/src/System/Runtime/ExceptionServices/AsmOffsets.cs
index 7db188808e26a4..fb70ddcc703f13 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Runtime/ExceptionServices/AsmOffsets.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Runtime/ExceptionServices/AsmOffsets.cs
@@ -48,14 +48,14 @@ class AsmOffsets
#if TARGET_64BIT
public const int OFFSETOF__REGDISPLAY__m_pCurrentContext = 0x8;
- public const int SIZEOF__StackFrameIterator = 0x358;
- public const int OFFSETOF__StackFrameIterator__m_isRuntimeWrappedExceptions = 0x33A;
- public const int OFFSETOF__StackFrameIterator__m_AdjustedControlPC = 0x350;
+ public const int SIZEOF__StackFrameIterator = 0x370;
+ public const int OFFSETOF__StackFrameIterator__m_isRuntimeWrappedExceptions = 0x352;
+ public const int OFFSETOF__StackFrameIterator__m_AdjustedControlPC = 0x368;
#else // TARGET_64BIT
public const int OFFSETOF__REGDISPLAY__m_pCurrentContext = 0x4;
- public const int SIZEOF__StackFrameIterator = 0x2c8;
- public const int OFFSETOF__StackFrameIterator__m_isRuntimeWrappedExceptions = 0x2b6;
- public const int OFFSETOF__StackFrameIterator__m_AdjustedControlPC = 0x2c4;
+ public const int SIZEOF__StackFrameIterator = 0x2d8;
+ public const int OFFSETOF__StackFrameIterator__m_isRuntimeWrappedExceptions = 0x2c2;
+ public const int OFFSETOF__StackFrameIterator__m_AdjustedControlPC = 0x2d0;
#endif // TARGET_64BIT
#else // DEBUG
@@ -94,14 +94,14 @@ class AsmOffsets
#if TARGET_64BIT
public const int OFFSETOF__REGDISPLAY__m_pCurrentContext = 0x8;
- public const int SIZEOF__StackFrameIterator = 0x350;
- public const int OFFSETOF__StackFrameIterator__m_isRuntimeWrappedExceptions = 0x332;
- public const int OFFSETOF__StackFrameIterator__m_AdjustedControlPC = 0x348;
+ public const int SIZEOF__StackFrameIterator = 0x370;
+ public const int OFFSETOF__StackFrameIterator__m_isRuntimeWrappedExceptions = 0x34a;
+ public const int OFFSETOF__StackFrameIterator__m_AdjustedControlPC = 0x360;
#else // TARGET_64BIT
public const int OFFSETOF__REGDISPLAY__m_pCurrentContext = 0x4;
- public const int SIZEOF__StackFrameIterator = 0x2c0;
- public const int OFFSETOF__StackFrameIterator__m_isRuntimeWrappedExceptions = 0x2ae;
- public const int OFFSETOF__StackFrameIterator__m_AdjustedControlPC = 0x2bc;
+ public const int SIZEOF__StackFrameIterator = 0x2d0;
+ public const int OFFSETOF__StackFrameIterator__m_isRuntimeWrappedExceptions = 0x2ba;
+ public const int OFFSETOF__StackFrameIterator__m_AdjustedControlPC = 0x2c8;
#endif // TARGET_64BIT
#endif // DEBUG
@@ -155,7 +155,7 @@ class AsmOffsets
public const int OFFSETOF__ExInfo__m_kind = 0xd0;
public const int OFFSETOF__ExInfo__m_passNumber = 0xd1;
public const int OFFSETOF__ExInfo__m_idxCurClause = 0xd4;
- public const int OFFSETOF__ExInfo__m_frameIter = 0xd8;
+ public const int OFFSETOF__ExInfo__m_frameIter = 0xe0;
public const int OFFSETOF__ExInfo__m_notifyDebuggerSP = OFFSETOF__ExInfo__m_frameIter + SIZEOF__StackFrameIterator;
#else // TARGET_64BIT
public const int SIZEOF__EHEnum = 0x10;
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Runtime/ExceptionServices/InternalCalls.cs b/src/coreclr/System.Private.CoreLib/src/System/Runtime/ExceptionServices/InternalCalls.cs
index 228f58c0ea4d06..4ae608fc17d23d 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Runtime/ExceptionServices/InternalCalls.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Runtime/ExceptionServices/InternalCalls.cs
@@ -42,7 +42,7 @@ internal static unsafe partial bool RhpCallFilterFunclet(
[LibraryImport(RuntimeHelpers.QCall, EntryPoint = "EHEnumInitFromStackFrameIterator")]
[return: MarshalAs(UnmanagedType.Bool)]
- internal static unsafe partial bool RhpEHEnumInitFromStackFrameIterator(ref StackFrameIterator pFrameIter, out EH.MethodRegionInfo pMethodRegionInfo, void* pEHEnum);
+ internal static unsafe partial bool RhpEHEnumInitFromStackFrameIterator(ref StackFrameIterator pFrameIter, byte** pMethodStartAddress, void* pEHEnum);
[LibraryImport(RuntimeHelpers.QCall, EntryPoint = "EHEnumNext")]
[return: MarshalAs(UnmanagedType.Bool)]
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Runtime/InteropServices/Marshal.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/Runtime/InteropServices/Marshal.CoreCLR.cs
index bbdccc6cd2eed4..c04665aa6c22f4 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/Runtime/InteropServices/Marshal.CoreCLR.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/Runtime/InteropServices/Marshal.CoreCLR.cs
@@ -266,7 +266,7 @@ public static unsafe void StructureToPtr(object structure, IntPtr ptr, bool fDel
}
else
{
- SpanHelpers.Memmove(ref *(byte*)ptr, ref structure.GetRawData(), size);
+ Buffer.Memmove(ref *(byte*)ptr, ref structure.GetRawData(), size);
}
}
@@ -291,7 +291,7 @@ private static unsafe void PtrToStructureHelper(IntPtr ptr, object structure, bo
}
else
{
- SpanHelpers.Memmove(ref structure.GetRawData(), ref *(byte*)ptr, size);
+ Buffer.Memmove(ref structure.GetRawData(), ref *(byte*)ptr, size);
}
}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/Runtime/Versioning/CompatibilitySwitch.cs b/src/coreclr/System.Private.CoreLib/src/System/Runtime/Versioning/CompatibilitySwitch.cs
new file mode 100644
index 00000000000000..d90f81d48e986d
--- /dev/null
+++ b/src/coreclr/System.Private.CoreLib/src/System/Runtime/Versioning/CompatibilitySwitch.cs
@@ -0,0 +1,13 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Runtime.CompilerServices;
+
+namespace System.Runtime.Versioning
+{
+ internal static class CompatibilitySwitch
+ {
+ [MethodImpl(MethodImplOptions.InternalCall)]
+ internal static extern string? GetValueInternal(string compatibilitySwitchName);
+ }
+}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/RuntimeType.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/RuntimeType.CoreCLR.cs
index 00a8d78685d4d9..c74d76388b91a9 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/RuntimeType.CoreCLR.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/RuntimeType.CoreCLR.cs
@@ -2757,12 +2757,7 @@ public override InterfaceMapping GetInterfaceMap([DynamicallyAccessedMembers(Dyn
MethodBase? rtTypeMethodBase = GetMethodBase(reflectedType, classRtMethodHandle);
// a class may not implement all the methods of an interface (abstract class) so null is a valid value
Debug.Assert(rtTypeMethodBase is null || rtTypeMethodBase is RuntimeMethodInfo);
- RuntimeMethodInfo? targetMethod = (RuntimeMethodInfo?)rtTypeMethodBase;
- // the TargetMethod provided to us by runtime internals may be a generic method instance,
- // potentially with invalid arguments. TargetMethods in the InterfaceMap should never be
- // instances, only definitions.
- im.TargetMethods[i] = (targetMethod is { IsGenericMethod: true, IsGenericMethodDefinition: false })
- ? targetMethod.GetGenericMethodDefinition() : targetMethod!;
+ im.TargetMethods[i] = (MethodInfo)rtTypeMethodBase!;
}
return im;
diff --git a/src/libraries/System.Private.CoreLib/src/System/Security/DynamicSecurityMethodAttribute.cs b/src/coreclr/System.Private.CoreLib/src/System/Security/DynamicSecurityMethodAttribute.cs
similarity index 100%
rename from src/libraries/System.Private.CoreLib/src/System/Security/DynamicSecurityMethodAttribute.cs
rename to src/coreclr/System.Private.CoreLib/src/System/Security/DynamicSecurityMethodAttribute.cs
diff --git a/src/coreclr/System.Private.CoreLib/src/System/String.CoreCLR.cs b/src/coreclr/System.Private.CoreLib/src/System/String.CoreCLR.cs
index d19cb01034a74e..f15ad03d82182b 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/String.CoreCLR.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/String.CoreCLR.cs
@@ -23,12 +23,13 @@ public static string Intern(string str)
}
[LibraryImport(RuntimeHelpers.QCall, EntryPoint = "String_IsInterned")]
+ [return: MarshalAs(UnmanagedType.Bool)]
private static partial void IsInterned(StringHandleOnStack src);
public static string? IsInterned(string str)
{
ArgumentNullException.ThrowIfNull(str);
- IsInterned(new StringHandleOnStack(ref str!));
+ Intern(new StringHandleOnStack(ref str!));
return str;
}
@@ -38,7 +39,7 @@ internal static unsafe void InternalCopy(string src, IntPtr dest, int len)
{
if (len != 0)
{
- SpanHelpers.Memmove(ref *(byte*)dest, ref Unsafe.As(ref src.GetRawStringData()), (nuint)len);
+ Buffer.Memmove(ref *(byte*)dest, ref Unsafe.As(ref src.GetRawStringData()), (nuint)len);
}
}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/StubHelpers.cs b/src/coreclr/System.Private.CoreLib/src/System/StubHelpers.cs
index 81c0dd8e1afecd..9874eef6dc2292 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/StubHelpers.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/StubHelpers.cs
@@ -103,7 +103,7 @@ internal static unsafe IntPtr ConvertToNative(int flags, string strManaged, IntP
// + 1 for the null character from the user. + 1 for the null character we put in.
pbNativeBuffer = (byte*)Marshal.AllocCoTaskMem(nb + 2);
- SpanHelpers.Memmove(ref *pbNativeBuffer, ref MemoryMarshal.GetArrayDataReference(bytes), (nuint)nb);
+ Buffer.Memmove(ref *pbNativeBuffer, ref MemoryMarshal.GetArrayDataReference(bytes), (nuint)nb);
}
}
@@ -360,7 +360,7 @@ internal static unsafe IntPtr ConvertToNative(string strManaged, bool fBestFit,
Debug.Assert(nbytesused >= 0 && nbytesused < nbytes, "Insufficient buffer allocated in VBByValStrMarshaler.ConvertToNative");
- SpanHelpers.Memmove(ref *pNative, ref MemoryMarshal.GetArrayDataReference(bytes), (nuint)nbytesused);
+ Buffer.Memmove(ref *pNative, ref MemoryMarshal.GetArrayDataReference(bytes), (nuint)nbytesused);
pNative[nbytesused] = 0;
*pLength = nbytesused;
@@ -409,7 +409,7 @@ internal static unsafe IntPtr ConvertToNative(int flags, string strManaged)
IntPtr bstr = Marshal.AllocBSTRByteLen(length);
if (bytes != null)
{
- SpanHelpers.Memmove(ref *(byte*)bstr, ref MemoryMarshal.GetArrayDataReference(bytes), length);
+ Buffer.Memmove(ref *(byte*)bstr, ref MemoryMarshal.GetArrayDataReference(bytes), length);
}
return bstr;
@@ -1484,7 +1484,7 @@ internal static unsafe void FmtClassUpdateNativeInternal(object obj, byte* pNati
}
else
{
- SpanHelpers.Memmove(ref *pNative, ref obj.GetRawData(), size);
+ Buffer.Memmove(ref *pNative, ref obj.GetRawData(), size);
}
}
@@ -1503,7 +1503,7 @@ internal static unsafe void FmtClassUpdateCLRInternal(object obj, byte* pNative)
}
else
{
- SpanHelpers.Memmove(ref obj.GetRawData(), ref *pNative, size);
+ Buffer.Memmove(ref obj.GetRawData(), ref *pNative, size);
}
}
diff --git a/src/coreclr/System.Private.CoreLib/src/System/ValueType.cs b/src/coreclr/System.Private.CoreLib/src/System/ValueType.cs
index f4c3acb31adf88..cc13e37e083f01 100644
--- a/src/coreclr/System.Private.CoreLib/src/System/ValueType.cs
+++ b/src/coreclr/System.Private.CoreLib/src/System/ValueType.cs
@@ -10,17 +10,15 @@
**
===========================================================*/
-using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Reflection;
using System.Runtime.CompilerServices;
-using System.Runtime.InteropServices;
namespace System
{
[Serializable]
[TypeForwardedFrom("mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")]
- public abstract partial class ValueType
+ public abstract class ValueType
{
[UnconditionalSuppressMessage("ReflectionAnalysis", "IL2075:UnrecognizedReflectionPattern",
Justification = "Trimmed fields don't make a difference for equality")]
@@ -38,7 +36,7 @@ public override unsafe bool Equals([NotNullWhen(true)] object? obj)
// if there are no GC references in this object we can avoid reflection
// and do a fast memcmp
- if (CanCompareBitsOrUseFastGetHashCode(RuntimeHelpers.GetMethodTable(obj))) // MethodTable kept alive by access to object below
+ if (CanCompareBits(this))
{
return SpanHelpers.SequenceEqual(
ref RuntimeHelpers.GetRawData(this),
@@ -68,23 +66,8 @@ ref RuntimeHelpers.GetRawData(obj),
return true;
}
- // Return true if the valuetype does not contain pointer, is tightly packed,
- // does not have floating point number field and does not override Equals method.
- private static unsafe bool CanCompareBitsOrUseFastGetHashCode(MethodTable* pMT)
- {
- MethodTableAuxiliaryData* pAuxData = pMT->AuxiliaryData;
-
- if (pAuxData->HasCheckedCanCompareBitsOrUseFastGetHashCode)
- {
- return pAuxData->CanCompareBitsOrUseFastGetHashCode;
- }
-
- return CanCompareBitsOrUseFastGetHashCodeHelper(pMT);
- }
-
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "MethodTable_CanCompareBitsOrUseFastGetHashCode")]
- [return: MarshalAs(UnmanagedType.Bool)]
- private static unsafe partial bool CanCompareBitsOrUseFastGetHashCodeHelper(MethodTable* pMT);
+ [MethodImpl(MethodImplOptions.InternalCall)]
+ private static extern bool CanCompareBits(object obj);
/*=================================GetHashCode==================================
**Action: Our algorithm for returning the hashcode is a little bit complex. We look
@@ -96,74 +79,8 @@ private static unsafe bool CanCompareBitsOrUseFastGetHashCode(MethodTable* pMT)
**Arguments: None.
**Exceptions: None.
==============================================================================*/
- public override unsafe int GetHashCode()
- {
- // The default implementation of GetHashCode() for all value types.
- // Note that this implementation reveals the value of the fields.
- // So if the value type contains any sensitive information it should
- // implement its own GetHashCode().
-
- MethodTable* pMT = RuntimeHelpers.GetMethodTable(this);
- ref byte rawData = ref RuntimeHelpers.GetRawData(this);
- HashCode hashCode = default;
-
- // To get less colliding and more evenly distributed hash codes,
- // we munge the class index into the hashcode
- hashCode.Add((IntPtr)pMT);
-
- if (CanCompareBitsOrUseFastGetHashCode(pMT))
- {
- // this is a struct with no refs and no "strange" offsets
- uint size = pMT->GetNumInstanceFieldBytes();
- hashCode.AddBytes(MemoryMarshal.CreateReadOnlySpan(ref rawData, (int)size));
- }
- else
- {
- object thisRef = this;
- switch (GetHashCodeStrategy(pMT, ObjectHandleOnStack.Create(ref thisRef), out uint fieldOffset, out uint fieldSize, out MethodTable* fieldMT))
- {
- case ValueTypeHashCodeStrategy.ReferenceField:
- hashCode.Add(Unsafe.As(ref Unsafe.AddByteOffset(ref rawData, fieldOffset)).GetHashCode());
- break;
-
- case ValueTypeHashCodeStrategy.DoubleField:
- hashCode.Add(Unsafe.As(ref Unsafe.AddByteOffset(ref rawData, fieldOffset)).GetHashCode());
- break;
-
- case ValueTypeHashCodeStrategy.SingleField:
- hashCode.Add(Unsafe.As(ref Unsafe.AddByteOffset(ref rawData, fieldOffset)).GetHashCode());
- break;
-
- case ValueTypeHashCodeStrategy.FastGetHashCode:
- Debug.Assert(fieldSize != 0);
- hashCode.AddBytes(MemoryMarshal.CreateReadOnlySpan(ref Unsafe.AddByteOffset(ref rawData, fieldOffset), (int)fieldSize));
- break;
-
- case ValueTypeHashCodeStrategy.ValueTypeOverride:
- Debug.Assert(fieldMT != null);
- // Box the field to handle complicated cases like mutable method and shared generic
- hashCode.Add(RuntimeHelpers.Box(fieldMT, ref Unsafe.AddByteOffset(ref rawData, fieldOffset))?.GetHashCode() ?? 0);
- break;
- }
- }
-
- return hashCode.ToHashCode();
- }
-
- // Must match the definition in src\vm\comutilnative.cpp
- private enum ValueTypeHashCodeStrategy
- {
- None,
- ReferenceField,
- DoubleField,
- SingleField,
- FastGetHashCode,
- ValueTypeOverride,
- }
-
- [LibraryImport(RuntimeHelpers.QCall, EntryPoint = "ValueType_GetHashCodeStrategy")]
- private static unsafe partial ValueTypeHashCodeStrategy GetHashCodeStrategy(
- MethodTable* pMT, ObjectHandleOnStack objHandle, out uint fieldOffset, out uint fieldSize, out MethodTable* fieldMT);
+ [MethodImpl(MethodImplOptions.InternalCall)]
+ public extern override int GetHashCode();
public override string? ToString()
{
diff --git a/src/coreclr/classlibnative/bcltype/objectnative.cpp b/src/coreclr/classlibnative/bcltype/objectnative.cpp
index afbda5fad99167..4622955b44adf6 100644
--- a/src/coreclr/classlibnative/bcltype/objectnative.cpp
+++ b/src/coreclr/classlibnative/bcltype/objectnative.cpp
@@ -123,22 +123,48 @@ FCIMPL1(INT32, ObjectNative::TryGetHashCode, Object* obj) {
}
FCIMPLEND
-FCIMPL2(FC_BOOL_RET, ObjectNative::ContentEquals, Object *pThisRef, Object *pCompareRef)
+//
+// Compare by ref for normal classes, by value for value types.
+//
+// @todo: it would be nice to customize this method based on the
+// defining class rather than doing a runtime check whether it is
+// a value type.
+//
+
+FCIMPL2(FC_BOOL_RET, ObjectNative::Equals, Object *pThisRef, Object *pCompareRef)
{
- FCALL_CONTRACT;
+ CONTRACTL
+ {
+ FCALL_CHECK;
+ INJECT_FAULT(FCThrow(kOutOfMemoryException););
+ }
+ CONTRACTL_END;
+
+ if (pThisRef == pCompareRef)
+ FC_RETURN_BOOL(TRUE);
- // Should be ensured by caller
- _ASSERTE(pThisRef != NULL);
- _ASSERTE(pCompareRef != NULL);
- _ASSERTE(pThisRef->GetMethodTable() == pCompareRef->GetMethodTable());
+ // Since we are in FCALL, we must handle NULL specially.
+ if (pThisRef == NULL || pCompareRef == NULL)
+ FC_RETURN_BOOL(FALSE);
MethodTable *pThisMT = pThisRef->GetMethodTable();
- // Compare the contents
+ // If it's not a value class, don't compare by value
+ if (!pThisMT->IsValueType())
+ FC_RETURN_BOOL(FALSE);
+
+ // Make sure they are the same type.
+ if (pThisMT != pCompareRef->GetMethodTable())
+ FC_RETURN_BOOL(FALSE);
+
+ // Compare the contents (size - vtable - sync block index).
+ DWORD dwBaseSize = pThisMT->GetBaseSize();
+ if(pThisMT == g_pStringClass)
+ dwBaseSize -= sizeof(WCHAR);
BOOL ret = memcmp(
- pThisRef->GetData(),
- pCompareRef->GetData(),
- pThisMT->GetNumInstanceFieldBytes()) == 0;
+ (void *) (pThisRef+1),
+ (void *) (pCompareRef+1),
+ dwBaseSize - sizeof(Object) - sizeof(int)) == 0;
FC_GC_POLL_RET();
@@ -189,34 +215,36 @@ FCIMPL1(Object*, ObjectNative::GetClass, Object* pThis)
}
FCIMPLEND
-extern "C" void QCALLTYPE ObjectNative_AllocateUninitializedClone(QCall::ObjectHandleOnStack objHandle)
+FCIMPL1(Object*, ObjectNative::AllocateUninitializedClone, Object* pObjUNSAFE)
{
- QCALL_CONTRACT;
+ FCALL_CONTRACT;
- BEGIN_QCALL;
+ // Delegate error handling to managed side (it will throw NullReferenceException)
+ if (pObjUNSAFE == NULL)
+ return NULL;
- GCX_COOP();
+ OBJECTREF refClone = ObjectToOBJECTREF(pObjUNSAFE);
+
+ HELPER_METHOD_FRAME_BEGIN_RET_1(refClone);
- OBJECTREF refClone = objHandle.Get();
- _ASSERTE(refClone != NULL); // Should be handled at managed side
MethodTable* pMT = refClone->GetMethodTable();
-
+
// assert that String has overloaded the Clone() method
_ASSERTE(pMT != g_pStringClass);
-
- if (pMT->IsArray())
- {
- objHandle.Set(DupArrayForCloning((BASEARRAYREF)refClone));
- }
- else
- {
+
+ if (pMT->IsArray()) {
+ refClone = DupArrayForCloning((BASEARRAYREF)refClone);
+ } else {
// We don't need to call the because we know
// that it has been called....(It was called before this was created)
- objHandle.Set(AllocateObject(pMT));
+ refClone = AllocateObject(pMT);
}
- END_QCALL;
+ HELPER_METHOD_FRAME_END();
+
+ return OBJECTREFToObject(refClone);
}
+FCIMPLEND
extern "C" BOOL QCALLTYPE Monitor_Wait(QCall::ObjectHandleOnStack pThis, INT32 Timeout)
{
diff --git a/src/coreclr/classlibnative/bcltype/objectnative.h b/src/coreclr/classlibnative/bcltype/objectnative.h
index 418fd2561d7cc8..d8948922dd0b74 100644
--- a/src/coreclr/classlibnative/bcltype/objectnative.h
+++ b/src/coreclr/classlibnative/bcltype/objectnative.h
@@ -27,12 +27,12 @@ class ObjectNative
static FCDECL1(INT32, GetHashCode, Object* vThisRef);
static FCDECL1(INT32, TryGetHashCode, Object* vThisRef);
- static FCDECL2(FC_BOOL_RET, ContentEquals, Object *pThisRef, Object *pCompareRef);
+ static FCDECL2(FC_BOOL_RET, Equals, Object *pThisRef, Object *pCompareRef);
+ static FCDECL1(Object*, AllocateUninitializedClone, Object* pObjUNSAFE);
static FCDECL1(Object*, GetClass, Object* pThis);
static FCDECL1(FC_BOOL_RET, IsLockHeld, Object* pThisUNSAFE);
};
-extern "C" void QCALLTYPE ObjectNative_AllocateUninitializedClone(QCall::ObjectHandleOnStack objHandle);
extern "C" BOOL QCALLTYPE Monitor_Wait(QCall::ObjectHandleOnStack pThis, INT32 Timeout);
extern "C" void QCALLTYPE Monitor_Pulse(QCall::ObjectHandleOnStack pThis);
extern "C" void QCALLTYPE Monitor_PulseAll(QCall::ObjectHandleOnStack pThis);
diff --git a/src/coreclr/classlibnative/bcltype/system.cpp b/src/coreclr/classlibnative/bcltype/system.cpp
index 5d2f00cd849db5..ef02743b36696f 100644
--- a/src/coreclr/classlibnative/bcltype/system.cpp
+++ b/src/coreclr/classlibnative/bcltype/system.cpp
@@ -133,67 +133,124 @@ extern "C" INT32 QCALLTYPE Environment_GetProcessorCount()
return processorCount;
}
-struct FindFailFastCallerStruct {
- StackCrawlMark* pStackMark;
- UINT_PTR retAddress;
-};
-
-// This method is called by the GetMethod function and will crawl backward
-// up the stack for integer methods.
-static StackWalkAction FindFailFastCallerCallback(CrawlFrame* frame, VOID* data) {
+// FailFast is supported in BCL.small as internal to support failing fast in places where EEE used to be thrown.
+//
+// Static message buffer used by SystemNative::FailFast to avoid reliance on a
+// managed string object buffer. This buffer is not always used, see comments in
+// the method below.
+WCHAR g_szFailFastBuffer[256];
+WCHAR *g_pFailFastBuffer = g_szFailFastBuffer;
+
+#define FAIL_FAST_STATIC_BUFFER_LENGTH (sizeof(g_szFailFastBuffer) / sizeof(WCHAR))
+
+// This is the common code for FailFast processing that is wrapped by the two
+// FailFast FCalls below.
+void SystemNative::GenericFailFast(STRINGREF refMesgString, EXCEPTIONREF refExceptionForWatsonBucketing, UINT_PTR retAddress, UINT exitCode, STRINGREF refErrorSourceString)
+{
CONTRACTL
{
- NOTHROW;
- GC_NOTRIGGER;
- MODE_ANY;
- }
- CONTRACTL_END;
-
- FindFailFastCallerStruct* pFindCaller = (FindFailFastCallerStruct*) data;
-
- // The check here is between the address of a local variable
- // (the stack mark) and a pointer to the EIP for a frame
- // (which is actually the pointer to the return address to the
- // function from the previous frame). So we'll actually notice
- // which frame the stack mark was in one frame later. This is
- // fine since we only implement LookForMyCaller.
- _ASSERTE(*pFindCaller->pStackMark == LookForMyCaller);
- if (!frame->IsInCalleesFrames(pFindCaller->pStackMark))
- return SWA_CONTINUE;
-
- pFindCaller->retAddress = GetControlPC(frame->GetRegisterSet());
- return SWA_ABORT;
-}
+ THROWS;
+ GC_TRIGGERS;
+ MODE_COOPERATIVE;
+ }CONTRACTL_END;
-extern "C" void QCALLTYPE Environment_FailFast(QCall::StackCrawlMarkHandle mark, PCWSTR message, QCall::ObjectHandleOnStack exception, PCWSTR errorSource)
-{
- QCALL_CONTRACT;
+ struct
+ {
+ STRINGREF refMesgString;
+ EXCEPTIONREF refExceptionForWatsonBucketing;
+ STRINGREF refErrorSourceString;
+ } gc;
+ gc.refMesgString = refMesgString;
+ gc.refExceptionForWatsonBucketing = refExceptionForWatsonBucketing;
+ gc.refErrorSourceString = refErrorSourceString;
+
+ GCPROTECT_BEGIN(gc);
+
+ // Managed code injected FailFast maps onto the unmanaged version
+ // (EEPolicy::HandleFatalError) in the following manner: the exit code is
+ // always set to COR_E_FAILFAST and the address passed (usually a failing
+ // EIP) is in fact the address of a unicode message buffer (explaining the
+ // reason for the fault).
+ // The message string comes from a managed string object so we can't rely on
+ // the buffer remaining in place below our feet. But equally we don't want
+ // to inject failure points (by, for example, allocating a heap buffer or a
+ // pinning handle) when we have a much higher chance than usual of actually
+ // tripping those failure points and eradicating useful debugging info.
+ // We employ various strategies to deal with this:
+ // o If the message is small enough we copy it into a static buffer
+ // (g_szFailFastBuffer).
+ // o Otherwise we try to allocate a buffer of the required size on the
+ // heap. This buffer will be leaked.
+ // o If the allocation above fails we return to the static buffer and
+ // truncate the message.
+ //
+ // Another option would seem to be to implement a new frame type that
+ // protects object references as pinned, but that seems like overkill for
+ // just this problem.
+ WCHAR *pszMessageBuffer = NULL;
+ DWORD cchMessage = (gc.refMesgString == NULL) ? 0 : gc.refMesgString->GetStringLength();
+
+ WCHAR * errorSourceString = NULL;
+
+ if (gc.refErrorSourceString != NULL)
+ {
+ DWORD cchErrorSource = gc.refErrorSourceString->GetStringLength();
+ errorSourceString = new (nothrow) WCHAR[cchErrorSource + 1];
- BEGIN_QCALL;
+ if (errorSourceString != NULL)
+ {
+ memcpyNoGCRefs(errorSourceString, gc.refErrorSourceString->GetBuffer(), cchErrorSource * sizeof(WCHAR));
+ errorSourceString[cchErrorSource] = W('\0');
+ }
+ }
- GCX_COOP();
+ if (cchMessage < FAIL_FAST_STATIC_BUFFER_LENGTH)
+ {
+ // The static buffer can be used only once to avoid race condition with other threads
+ pszMessageBuffer = InterlockedExchangeT(&g_pFailFastBuffer, NULL);
+ }
- FindFailFastCallerStruct findCallerData;
- findCallerData.pStackMark = mark;
- findCallerData.retAddress = 0;
- GetThread()->StackWalkFrames(FindFailFastCallerCallback, &findCallerData, FUNCTIONSONLY | QUICKUNWIND);
+ if (pszMessageBuffer == NULL)
+ {
+ // We can fail here, but we can handle the fault.
+ CONTRACT_VIOLATION(FaultViolation);
+ pszMessageBuffer = new (nothrow) WCHAR[cchMessage + 1];
+ if (pszMessageBuffer == NULL)
+ {
+ // Truncate the message to what will fit in the static buffer.
+ cchMessage = FAIL_FAST_STATIC_BUFFER_LENGTH - 1;
+ pszMessageBuffer = InterlockedExchangeT(&g_pFailFastBuffer, NULL);
+ }
+ }
- if (message == NULL || message[0] == W('\0'))
+ const WCHAR *pszMessage;
+ if (pszMessageBuffer != NULL)
{
- WszOutputDebugString(W("CLR: Managed code called FailFast without specifying a reason.\r\n"));
+ if (cchMessage > 0)
+ memcpyNoGCRefs(pszMessageBuffer, gc.refMesgString->GetBuffer(), cchMessage * sizeof(WCHAR));
+ pszMessageBuffer[cchMessage] = W('\0');
+ pszMessage = pszMessageBuffer;
}
else
{
+ pszMessage = W("There is not enough memory to print the supplied FailFast message.");
+ cchMessage = (DWORD)u16_strlen(pszMessage);
+ }
+
+ if (cchMessage == 0) {
+ WszOutputDebugString(W("CLR: Managed code called FailFast without specifying a reason.\r\n"));
+ }
+ else {
WszOutputDebugString(W("CLR: Managed code called FailFast.\r\n"));
- WszOutputDebugString(message);
+ WszOutputDebugString(pszMessage);
WszOutputDebugString(W("\r\n"));
}
LPCWSTR argExceptionString = NULL;
StackSString msg;
- if (exception.Get() != NULL)
+ if (gc.refExceptionForWatsonBucketing != NULL)
{
- GetExceptionMessage(exception.Get(), msg);
+ GetExceptionMessage(gc.refExceptionForWatsonBucketing, msg);
argExceptionString = msg.GetUnicode();
}
@@ -206,11 +263,11 @@ extern "C" void QCALLTYPE Environment_FailFast(QCall::StackCrawlMarkHandle mark,
// skip this, if required.
if (IsWatsonEnabled())
{
- if ((exception.Get() == NULL) || !SetupWatsonBucketsForFailFast((EXCEPTIONREF)exception.Get()))
+ if ((gc.refExceptionForWatsonBucketing == NULL) || !SetupWatsonBucketsForFailFast(gc.refExceptionForWatsonBucketing))
{
PTR_EHWatsonBucketTracker pUEWatsonBucketTracker = pThread->GetExceptionState()->GetUEWatsonBucketTracker();
_ASSERTE(pUEWatsonBucketTracker != NULL);
- pUEWatsonBucketTracker->SaveIpForWatsonBucket(findCallerData.retAddress);
+ pUEWatsonBucketTracker->SaveIpForWatsonBucket(retAddress);
pUEWatsonBucketTracker->CaptureUnhandledInfoForWatson(TypeOfReportedError::FatalError, pThread, NULL);
if (pUEWatsonBucketTracker->RetrieveWatsonBuckets() == NULL)
{
@@ -222,13 +279,90 @@ extern "C" void QCALLTYPE Environment_FailFast(QCall::StackCrawlMarkHandle mark,
// stash the user-provided exception object. this will be used as
// the inner exception object to the FatalExecutionEngineException.
- if (exception.Get() != NULL)
- pThread->SetLastThrownObject(exception.Get());
+ if (gc.refExceptionForWatsonBucketing != NULL)
+ pThread->SetLastThrownObject(gc.refExceptionForWatsonBucketing);
- EEPolicy::HandleFatalError(COR_E_FAILFAST, findCallerData.retAddress, message, NULL, errorSource, argExceptionString);
+ EEPolicy::HandleFatalError(exitCode, retAddress, pszMessage, NULL, errorSourceString, argExceptionString);
- END_QCALL;
+ GCPROTECT_END();
+}
+
+// Note: Do not merge this FCALL method with any other FailFast overloads.
+// Watson uses the managed FailFast method with one String for crash dump bucketization.
+FCIMPL1(VOID, SystemNative::FailFast, StringObject* refMessageUNSAFE)
+{
+ FCALL_CONTRACT;
+
+ STRINGREF refMessage = (STRINGREF)refMessageUNSAFE;
+
+ HELPER_METHOD_FRAME_BEGIN_1(refMessage);
+
+ // The HelperMethodFrame knows how to get the return address.
+ UINT_PTR retaddr = HELPER_METHOD_FRAME_GET_RETURN_ADDRESS();
+
+ // Call the actual worker to perform failfast
+ GenericFailFast(refMessage, NULL, retaddr, COR_E_FAILFAST, NULL);
+
+ HELPER_METHOD_FRAME_END();
+}
+FCIMPLEND
+
+FCIMPL2(VOID, SystemNative::FailFastWithExitCode, StringObject* refMessageUNSAFE, UINT exitCode)
+{
+ FCALL_CONTRACT;
+
+ STRINGREF refMessage = (STRINGREF)refMessageUNSAFE;
+
+ HELPER_METHOD_FRAME_BEGIN_1(refMessage);
+
+ // The HelperMethodFrame knows how to get the return address.
+ UINT_PTR retaddr = HELPER_METHOD_FRAME_GET_RETURN_ADDRESS();
+
+ // Call the actual worker to perform failfast
+ GenericFailFast(refMessage, NULL, retaddr, exitCode, NULL);
+
+ HELPER_METHOD_FRAME_END();
}
+FCIMPLEND
+
+FCIMPL2(VOID, SystemNative::FailFastWithException, StringObject* refMessageUNSAFE, ExceptionObject* refExceptionUNSAFE)
+{
+ FCALL_CONTRACT;
+
+ STRINGREF refMessage = (STRINGREF)refMessageUNSAFE;
+ EXCEPTIONREF refException = (EXCEPTIONREF)refExceptionUNSAFE;
+
+ HELPER_METHOD_FRAME_BEGIN_2(refMessage, refException);
+
+ // The HelperMethodFrame knows how to get the return address.
+ UINT_PTR retaddr = HELPER_METHOD_FRAME_GET_RETURN_ADDRESS();
+
+ // Call the actual worker to perform failfast
+ GenericFailFast(refMessage, refException, retaddr, COR_E_FAILFAST, NULL);
+
+ HELPER_METHOD_FRAME_END();
+}
+FCIMPLEND
+
+FCIMPL3(VOID, SystemNative::FailFastWithExceptionAndSource, StringObject* refMessageUNSAFE, ExceptionObject* refExceptionUNSAFE, StringObject* errorSourceUNSAFE)
+{
+ FCALL_CONTRACT;
+
+ STRINGREF refMessage = (STRINGREF)refMessageUNSAFE;
+ EXCEPTIONREF refException = (EXCEPTIONREF)refExceptionUNSAFE;
+ STRINGREF errorSource = (STRINGREF)errorSourceUNSAFE;
+
+ HELPER_METHOD_FRAME_BEGIN_3(refMessage, refException, errorSource);
+
+ // The HelperMethodFrame knows how to get the return address.
+ UINT_PTR retaddr = HELPER_METHOD_FRAME_GET_RETURN_ADDRESS();
+
+ // Call the actual worker to perform failfast
+ GenericFailFast(refMessage, refException, retaddr, COR_E_FAILFAST, errorSource);
+
+ HELPER_METHOD_FRAME_END();
+}
+FCIMPLEND
FCIMPL0(FC_BOOL_RET, SystemNative::IsServerGC)
{
diff --git a/src/coreclr/classlibnative/bcltype/system.h b/src/coreclr/classlibnative/bcltype/system.h
index 9c5ab7ada84a4b..b4a773a847c398 100644
--- a/src/coreclr/classlibnative/bcltype/system.h
+++ b/src/coreclr/classlibnative/bcltype/system.h
@@ -43,16 +43,23 @@ class SystemNative
static FCDECL1(VOID,SetExitCode,INT32 exitcode);
static FCDECL0(INT32, GetExitCode);
+ static FCDECL1(VOID, FailFast, StringObject* refMessageUNSAFE);
+ static FCDECL2(VOID, FailFastWithExitCode, StringObject* refMessageUNSAFE, UINT exitCode);
+ static FCDECL2(VOID, FailFastWithException, StringObject* refMessageUNSAFE, ExceptionObject* refExceptionUNSAFE);
+ static FCDECL3(VOID, FailFastWithExceptionAndSource, StringObject* refMessageUNSAFE, ExceptionObject* refExceptionUNSAFE, StringObject* errorSourceUNSAFE);
+
static FCDECL0(FC_BOOL_RET, IsServerGC);
// Return a method info for the method were the exception was thrown
static FCDECL1(ReflectMethodObject*, GetMethodFromStackTrace, ArrayBase* pStackTraceUNSAFE);
+
+private:
+ // Common processing code for FailFast
+ static void GenericFailFast(STRINGREF refMesgString, EXCEPTIONREF refExceptionForWatsonBucketing, UINT_PTR retAddress, UINT exitCode, STRINGREF errorSource);
};
extern "C" void QCALLTYPE Environment_Exit(INT32 exitcode);
-extern "C" void QCALLTYPE Environment_FailFast(QCall::StackCrawlMarkHandle mark, PCWSTR message, QCall::ObjectHandleOnStack exception, PCWSTR errorSource);
-
// Returns the number of logical processors that can be used by managed code
extern "C" INT32 QCALLTYPE Environment_GetProcessorCount();
diff --git a/src/coreclr/clr.featuredefines.props b/src/coreclr/clr.featuredefines.props
index 7905f8a573d738..dccd5d0f150c14 100644
--- a/src/coreclr/clr.featuredefines.props
+++ b/src/coreclr/clr.featuredefines.props
@@ -1,26 +1,37 @@
true
+ true
true
true
true
true
+ true
true
true
+
true
true
+ true
+ true
+ true
true
true
true
+ true
true
true
true
+ true
+ true
+ true
+ true
@@ -34,16 +45,23 @@
$(DefineConstants);FEATURE_ARRAYSTUB_AS_IL
$(DefineConstants);FEATURE_MULTICASTSTUB_AS_IL
+ $(DefineConstants);FEATURE_INSTANTIATINGSTUB_AS_IL
+ $(DefineConstants);FEATURE_STUBS_AS_IL
+ $(DefineConstants);FEATURE_COLLECTIBLE_ALC
$(DefineConstants);FEATURE_COMWRAPPERS
$(DefineConstants);FEATURE_COMINTEROP
$(DefineConstants);FEATURE_COMINTEROP_APARTMENT_SUPPORT
$(DefineConstants);FEATURE_OBJCMARSHAL
$(DefineConstants);FEATURE_PERFTRACING
$(DefineConstants);FEATURE_EVENTSOURCE_XPLAT
+ $(DefineConstants);FEATURE_WIN32_REGISTRY
$(DefineConstants);FEATURE_TYPEEQUIVALENCE
+ $(DefineConstants);FEATURE_BASICFREEZE
+ $(DefineConstants);FEATURE_PORTABLE_SHUFFLE_THUNKS
$(DefineConstants);FEATURE_ICASTABLE
$(DefineConstants);FEATURE_EH_FUNCLETS
$(DefineConstants);PROFILING_SUPPORTED
+ $(DefineConstants);FEATURE_PROFAPI_ATTACH_DETACH
diff --git a/src/coreclr/clrdefinitions.cmake b/src/coreclr/clrdefinitions.cmake
index 2ffcfb00c0c05e..fb8d095b5606d7 100644
--- a/src/coreclr/clrdefinitions.cmake
+++ b/src/coreclr/clrdefinitions.cmake
@@ -203,6 +203,9 @@ if(CLR_CMAKE_TARGET_ARCH_AMD64 OR CLR_CMAKE_TARGET_ARCH_ARM64 OR CLR_CMAKE_TARGE
add_definitions(-DFEATURE_MANUALLY_MANAGED_CARD_BUNDLES)
endif(CLR_CMAKE_TARGET_ARCH_AMD64 OR CLR_CMAKE_TARGET_ARCH_ARM64 OR CLR_CMAKE_TARGET_ARCH_LOONGARCH64 OR CLR_CMAKE_TARGET_ARCH_RISCV64)
+if(NOT CLR_CMAKE_TARGET_UNIX)
+ add_definitions(-DFEATURE_WIN32_REGISTRY)
+endif(NOT CLR_CMAKE_TARGET_UNIX)
add_definitions(-D_SECURE_SCL=0)
add_definitions(-DUNICODE)
add_definitions(-D_UNICODE)
@@ -255,7 +258,7 @@ function(set_target_definitions_to_custom_os_and_arch)
if (TARGETDETAILS_OS STREQUAL "unix_anyos")
target_compile_definitions(${TARGETDETAILS_TARGET} PRIVATE TARGET_UNIX_ANYOS)
endif()
- elseif (TARGETDETAILS_OS MATCHES "^win")
+ elseif (TARGETDETAILS_OS STREQUAL "win")
target_compile_definitions(${TARGETDETAILS_TARGET} PRIVATE TARGET_WINDOWS)
endif((TARGETDETAILS_OS MATCHES "^unix"))
@@ -284,7 +287,7 @@ function(set_target_definitions_to_custom_os_and_arch)
target_compile_definitions(${TARGETDETAILS_TARGET} PRIVATE ARM_SOFTFP)
endif()
- if (NOT (TARGETDETAILS_ARCH STREQUAL "x86") OR (TARGETDETAILS_OS MATCHES "^unix") OR (TARGETDETAILS_OS MATCHES "win_aot"))
+ if (NOT (TARGETDETAILS_ARCH STREQUAL "x86") OR (TARGETDETAILS_OS MATCHES "^unix"))
target_compile_definitions(${TARGETDETAILS_TARGET} PRIVATE FEATURE_EH_FUNCLETS)
- endif (NOT (TARGETDETAILS_ARCH STREQUAL "x86") OR (TARGETDETAILS_OS MATCHES "^unix") OR (TARGETDETAILS_OS MATCHES "win_aot"))
+ endif (NOT (TARGETDETAILS_ARCH STREQUAL "x86") OR (TARGETDETAILS_OS MATCHES "^unix"))
endfunction()
diff --git a/src/coreclr/crosscomponents.cmake b/src/coreclr/crosscomponents.cmake
index b06b7060704892..11e923805a6ea0 100644
--- a/src/coreclr/crosscomponents.cmake
+++ b/src/coreclr/crosscomponents.cmake
@@ -25,13 +25,6 @@ if (CLR_CMAKE_HOST_OS STREQUAL CLR_CMAKE_TARGET_OS OR CLR_CMAKE_TARGET_IOS OR CL
DESTINATIONS .
COMPONENT crosscomponents
)
- if (CLR_CMAKE_TARGET_ARCH_I386)
- install_clr (TARGETS
- clrjit_win_aot_${ARCH_TARGET_NAME}_${ARCH_HOST_NAME}
- DESTINATIONS .
- COMPONENT crosscomponents
- )
- endif()
endif()
endif()
endif()
diff --git a/src/coreclr/crossgen-corelib.proj b/src/coreclr/crossgen-corelib.proj
index 1d0a6e2262ef21..ae2741ea9c9cac 100644
--- a/src/coreclr/crossgen-corelib.proj
+++ b/src/coreclr/crossgen-corelib.proj
@@ -23,6 +23,7 @@
true
false
+ false
false
true
diff --git a/src/coreclr/debug/daccess/dacdbiimpl.cpp b/src/coreclr/debug/daccess/dacdbiimpl.cpp
index b316a0769d3f67..bc1e6ad8475459 100644
--- a/src/coreclr/debug/daccess/dacdbiimpl.cpp
+++ b/src/coreclr/debug/daccess/dacdbiimpl.cpp
@@ -7455,13 +7455,13 @@ HRESULT DacDbiInterfaceImpl::GetILCodeVersionNodeData(VMPTR_ILCodeVersionNode vm
#ifdef FEATURE_REJIT
ILCodeVersion ilCode(vmILCodeVersionNode.GetDacPtr());
pData->m_state = ilCode.GetRejitState();
- pData->m_pbIL = PTR_TO_CORDB_ADDRESS(dac_cast(ilCode.GetIL()));
+ pData->m_pbIL = PTR_TO_CORDB_ADDRESS(dac_cast(ilCode.GetIL()));
pData->m_dwCodegenFlags = ilCode.GetJitFlags();
const InstrumentedILOffsetMapping* pMapping = ilCode.GetInstrumentedILMap();
if (pMapping)
{
pData->m_cInstrumentedMapEntries = (ULONG)pMapping->GetCount();
- pData->m_rgInstrumentedMapEntries = PTR_TO_CORDB_ADDRESS(dac_cast(pMapping->GetOffsets()));
+ pData->m_rgInstrumentedMapEntries = PTR_TO_CORDB_ADDRESS(dac_cast(pMapping->GetOffsets()));
}
else
{
diff --git a/src/coreclr/debug/daccess/request.cpp b/src/coreclr/debug/daccess/request.cpp
index 3993d8a6f52ee4..1e0912ea05cdde 100644
--- a/src/coreclr/debug/daccess/request.cpp
+++ b/src/coreclr/debug/daccess/request.cpp
@@ -2577,7 +2577,7 @@ ClrDataAccess::GetAssemblyData(CLRDATA_ADDRESS cdBaseDomainPtr, CLRDATA_ADDRESS
}
assemblyData->AssemblyPtr = HOST_CDADDR(pAssembly);
- assemblyData->ClassLoader = 0;
+ assemblyData->ClassLoader = HOST_CDADDR(pAssembly->GetLoader());
assemblyData->ParentDomain = HOST_CDADDR(AppDomain::GetCurrentDomain());
assemblyData->isDynamic = pAssembly->IsDynamic();
assemblyData->ModuleCount = 0;
@@ -3810,13 +3810,9 @@ ClrDataAccess::GetJumpThunkTarget(T_CONTEXT *ctx, CLRDATA_ADDRESS *targetIP, CLR
#ifdef TARGET_AMD64
SOSDacEnter();
- TADDR tempTargetIP, tempTargetMD;
- if (!GetAnyThunkTarget(ctx, &tempTargetIP, &tempTargetMD))
+ if (!GetAnyThunkTarget(ctx, targetIP, targetMD))
hr = E_FAIL;
- *targetIP = TO_CDADDR(tempTargetIP);
- *targetMD = TO_CDADDR(tempTargetMD);
-
SOSDacLeave();
return hr;
#else
diff --git a/src/coreclr/debug/di/rspriv.h b/src/coreclr/debug/di/rspriv.h
index 68080a65cb8a6f..ceadc7eedafe77 100644
--- a/src/coreclr/debug/di/rspriv.h
+++ b/src/coreclr/debug/di/rspriv.h
@@ -7325,8 +7325,7 @@ class CordbJITILFrame : public CordbBase, public ICorDebugILFrame, public ICorDe
GENERICS_TYPE_TOKEN exactGenericArgsToken,
DWORD dwExactGenericArgsTokenIndex,
bool fVarArgFnx,
- CordbReJitILCode * pReJitCode,
- bool fAdjustedIP);
+ CordbReJitILCode * pReJitCode);
HRESULT Init();
virtual ~CordbJITILFrame();
virtual void Neuter();
@@ -7437,7 +7436,6 @@ class CordbJITILFrame : public CordbBase, public ICorDebugILFrame, public ICorDe
CordbILCode* GetOriginalILCode();
CordbReJitILCode* GetReJitILCode();
- void AdjustIPAfterException();
private:
void RefreshCachedVarArgSigParserIfNeeded();
@@ -7505,7 +7503,6 @@ class CordbJITILFrame : public CordbBase, public ICorDebugILFrame, public ICorDe
// if this frame is instrumented with rejit, this will point to the instrumented IL code
RSSmartPtr m_pReJitCode;
- BOOL m_adjustedIP;
};
/* ------------------------------------------------------------------------- *
diff --git a/src/coreclr/debug/di/rsstackwalk.cpp b/src/coreclr/debug/di/rsstackwalk.cpp
index f2bf3777bb6bb3..751d18dcc17972 100644
--- a/src/coreclr/debug/di/rsstackwalk.cpp
+++ b/src/coreclr/debug/di/rsstackwalk.cpp
@@ -776,8 +776,7 @@ HRESULT CordbStackWalk::GetFrameWorker(ICorDebugFrame ** ppFrame)
frameData.v.exactGenericArgsToken,
frameData.v.dwExactGenericArgsTokenIndex,
!!frameData.v.fVarArgs,
- pReJitCode,
- pJITFuncData->justAfterILThrow));
+ pReJitCode));
// Initialize the frame. This is a nop if the method is not a vararg method.
hr = pJITILFrame->Init();
diff --git a/src/coreclr/debug/di/rsthread.cpp b/src/coreclr/debug/di/rsthread.cpp
index 3c5024fc80fab4..7b969ee65d3d55 100644
--- a/src/coreclr/debug/di/rsthread.cpp
+++ b/src/coreclr/debug/di/rsthread.cpp
@@ -7396,8 +7396,7 @@ CordbJITILFrame::CordbJITILFrame(CordbNativeFrame * pNativeFrame,
GENERICS_TYPE_TOKEN exactGenericArgsToken,
DWORD dwExactGenericArgsTokenIndex,
bool fVarArgFnx,
- CordbReJitILCode * pRejitCode,
- bool fAdjustedIP)
+ CordbReJitILCode * pRejitCode)
: CordbBase(pNativeFrame->GetProcess(), 0, enumCordbJITILFrame),
m_nativeFrame(pNativeFrame),
m_ilCode(pCode),
@@ -7412,8 +7411,7 @@ CordbJITILFrame::CordbJITILFrame(CordbNativeFrame * pNativeFrame,
m_genericArgsLoaded(false),
m_frameParamsToken(exactGenericArgsToken),
m_dwFrameParamsTokenIndex(dwExactGenericArgsTokenIndex),
- m_pReJitCode(pRejitCode),
- m_adjustedIP(fAdjustedIP)
+ m_pReJitCode(pRejitCode)
{
// We'll initialize the SigParser in CordbJITILFrame::Init().
m_sigParserCached = SigParser(NULL, 0);
@@ -9029,21 +9027,6 @@ CordbReJitILCode* CordbJITILFrame::GetReJitILCode()
return m_pReJitCode;
}
-void CordbJITILFrame::AdjustIPAfterException()
-{
- CordbNativeFrame* nativeFrameToAdjustIP = m_nativeFrame;
- if (!m_adjustedIP)
- {
- DWORD nativeOffsetToMap = (DWORD)nativeFrameToAdjustIP->m_ip - STACKWALK_CONTROLPC_ADJUST_OFFSET;
- CorDebugMappingResult mappingType;
- ULONG uILOffset = nativeFrameToAdjustIP->m_nativeCode->GetSequencePoints()->MapNativeOffsetToIL(
- nativeOffsetToMap,
- &mappingType);
- m_ip= uILOffset;
- m_adjustedIP = true;
- }
-}
-
/* ------------------------------------------------------------------------- *
* Eval class
* ------------------------------------------------------------------------- */
diff --git a/src/coreclr/debug/di/shimpriv.h b/src/coreclr/debug/di/shimpriv.h
index ff0f16436a1f2c..1ce2f6857d4808 100644
--- a/src/coreclr/debug/di/shimpriv.h
+++ b/src/coreclr/debug/di/shimpriv.h
@@ -780,8 +780,6 @@ class ShimStackWalk
// Indicate whether we are processing a converted frame.
bool m_fHasConvertedFrame;
-
- bool m_fHasException;
};
// A ShimStackWalk is deleted when a process is continued, or when the stack is changed in any way
diff --git a/src/coreclr/debug/di/shimstackwalk.cpp b/src/coreclr/debug/di/shimstackwalk.cpp
index 46213d4ca36491..c47620c7bc0904 100644
--- a/src/coreclr/debug/di/shimstackwalk.cpp
+++ b/src/coreclr/debug/di/shimstackwalk.cpp
@@ -312,7 +312,6 @@ void ShimStackWalk::Populate()
// because of the leaf STUBFRAME_EXCEPTION.
chainInfo.CancelUMChain();
swInfo.m_fSkipChain = true;
- swInfo.m_fHasException = true;
}
}
@@ -989,20 +988,6 @@ CorDebugInternalFrameType ShimStackWalk::GetInternalFrameType(ICorDebugInternalF
void ShimStackWalk::AppendFrame(ICorDebugFrame * pFrame, StackWalkInfo * pStackWalkInfo)
{
- // We've detected we're in a stackwalk where we have an exception and no further managed frames
- // are on top of this frame. To ensure our IP points to the user line that threw the exception,
- // we ask the frame to adjust the IP to the call instruction as currently it points to the instruction after it.
- if (pStackWalkInfo->m_fHasException && pStackWalkInfo->m_cFrame == 0)
- {
- RSExtSmartPtr pNFrame3;
- HRESULT hr = pFrame->QueryInterface(IID_ICorDebugILFrame, reinterpret_cast(&pNFrame3));
- if (pNFrame3 != NULL)
- {
- CordbJITILFrame* JITILFrameToAdjustIP = (static_cast(pNFrame3.GetValue()));
- JITILFrameToAdjustIP->AdjustIPAfterException();
- pStackWalkInfo->m_fHasException = false;
- }
- }
// grow the
ICorDebugFrame ** ppFrame = m_stackFrames.AppendThrowing();
@@ -1484,8 +1469,7 @@ ShimStackWalk::StackWalkInfo::StackWalkInfo()
m_fProcessingInternalFrame(false),
m_fSkipChain(false),
m_fLeafFrame(true),
- m_fHasConvertedFrame(false),
- m_fHasException(false)
+ m_fHasConvertedFrame(false)
{
m_pChildFrame.Assign(NULL);
m_pConvertedInternalFrame2.Assign(NULL);
diff --git a/src/coreclr/debug/ee/controller.h b/src/coreclr/debug/ee/controller.h
index b838e11c0f85a3..a2d8dc2e2602f3 100644
--- a/src/coreclr/debug/ee/controller.h
+++ b/src/coreclr/debug/ee/controller.h
@@ -827,7 +827,7 @@ class DebuggerPatchTable : private CHashTableAndData
DebuggerControllerPatch * GetPatch(PTR_CORDB_ADDRESS_TYPE address)
{
SUPPORTS_DAC;
- ARM_ONLY(_ASSERTE(dac_cast(address) & THUMB_CODE));
+ ARM_ONLY(_ASSERTE(dac_cast(address) & THUMB_CODE));
DebuggerControllerPatch * pPatch =
dac_cast(Find(HashAddress(address), (SIZE_T)(dac_cast(address))));
diff --git a/src/coreclr/debug/ee/debugger.h b/src/coreclr/debug/ee/debugger.h
index 2b8573e31b3656..ac2a3218f73569 100644
--- a/src/coreclr/debug/ee/debugger.h
+++ b/src/coreclr/debug/ee/debugger.h
@@ -3892,6 +3892,8 @@ HANDLE OpenWin32EventOrThrow(
// Returns true if the specified IL offset has a special meaning (eg. prolog, etc.)
bool DbgIsSpecialILOffset(DWORD offset);
+#if !defined(TARGET_X86)
void FixupDispatcherContext(T_DISPATCHER_CONTEXT* pDispatcherContext, T_CONTEXT* pContext, PEXCEPTION_ROUTINE pUnwindPersonalityRoutine = NULL);
+#endif
#endif /* DEBUGGER_H_ */
diff --git a/src/coreclr/debug/inc/arm64/primitives.h b/src/coreclr/debug/inc/arm64/primitives.h
index 5f8b5262d993e4..05c03c7b3094f8 100644
--- a/src/coreclr/debug/inc/arm64/primitives.h
+++ b/src/coreclr/debug/inc/arm64/primitives.h
@@ -153,9 +153,9 @@ inline void CORDbgSetInstruction(CORDB_ADDRESS_TYPE* address,
#if !defined(DBI_COMPILE) && !defined(DACCESS_COMPILE) && defined(HOST_OSX)
ExecutableWriterHolder instructionWriterHolder((LPVOID)address, sizeof(PRD_TYPE));
- TADDR ptraddr = dac_cast(instructionWriterHolder.GetRW());
+ ULONGLONG ptraddr = dac_cast(instructionWriterHolder.GetRW());
#else // !DBI_COMPILE && !DACCESS_COMPILE && HOST_OSX
- TADDR ptraddr = dac_cast(address);
+ ULONGLONG ptraddr = dac_cast(address);
#endif // !DBI_COMPILE && !DACCESS_COMPILE && HOST_OSX
*(PRD_TYPE *)ptraddr = instruction;
FlushInstructionCache(GetCurrentProcess(),
@@ -167,7 +167,7 @@ inline PRD_TYPE CORDbgGetInstruction(UNALIGNED CORDB_ADDRESS_TYPE* address)
{
LIMITED_METHOD_CONTRACT;
- TADDR ptraddr = dac_cast(address);
+ ULONGLONG ptraddr = dac_cast(address);
return *(PRD_TYPE *)ptraddr;
}
diff --git a/src/coreclr/debug/inc/loongarch64/primitives.h b/src/coreclr/debug/inc/loongarch64/primitives.h
index b30e7dcdd2ea91..97e4fb9541a2ab 100644
--- a/src/coreclr/debug/inc/loongarch64/primitives.h
+++ b/src/coreclr/debug/inc/loongarch64/primitives.h
@@ -135,7 +135,7 @@ inline void CORDbgSetInstruction(CORDB_ADDRESS_TYPE* address,
// In a DAC build, this function assumes the input is an host address.
LIMITED_METHOD_DAC_CONTRACT;
- TADDR ptraddr = dac_cast(address);
+ ULONGLONG ptraddr = dac_cast(address);
*(PRD_TYPE *)ptraddr = instruction;
FlushInstructionCache(GetCurrentProcess(),
address,
@@ -146,7 +146,7 @@ inline PRD_TYPE CORDbgGetInstruction(UNALIGNED CORDB_ADDRESS_TYPE* address)
{
LIMITED_METHOD_CONTRACT;
- TADDR ptraddr = dac_cast(address);
+ ULONGLONG ptraddr = dac_cast(address);
return *(PRD_TYPE *)ptraddr;
}
diff --git a/src/coreclr/debug/inc/riscv64/primitives.h b/src/coreclr/debug/inc/riscv64/primitives.h
index 17ace22981c77d..066397fcda7146 100644
--- a/src/coreclr/debug/inc/riscv64/primitives.h
+++ b/src/coreclr/debug/inc/riscv64/primitives.h
@@ -137,7 +137,7 @@ inline void CORDbgSetInstruction(CORDB_ADDRESS_TYPE* address,
// In a DAC build, this function assumes the input is an host address.
LIMITED_METHOD_DAC_CONTRACT;
- TADDR ptraddr = dac_cast(address);
+ ULONGLONG ptraddr = dac_cast(address);
*(PRD_TYPE *)ptraddr = instruction;
FlushInstructionCache(GetCurrentProcess(),
address,
@@ -148,7 +148,7 @@ inline PRD_TYPE CORDbgGetInstruction(UNALIGNED CORDB_ADDRESS_TYPE* address)
{
LIMITED_METHOD_CONTRACT;
- TADDR ptraddr = dac_cast(address);
+ ULONGLONG ptraddr = dac_cast(address);
return *(PRD_TYPE *)ptraddr;
}
diff --git a/src/coreclr/gc/env/common.h b/src/coreclr/gc/env/common.h
index a3f6539aa3a491..78562ef0438b40 100644
--- a/src/coreclr/gc/env/common.h
+++ b/src/coreclr/gc/env/common.h
@@ -22,7 +22,6 @@
#include
#include
#include
-#include
#include
diff --git a/src/coreclr/gc/gc.cpp b/src/coreclr/gc/gc.cpp
index 4d281b16251a1e..0471326c0af5f7 100644
--- a/src/coreclr/gc/gc.cpp
+++ b/src/coreclr/gc/gc.cpp
@@ -2367,7 +2367,6 @@ int gc_heap::conserve_mem_setting = 0;
bool gc_heap::spin_count_unit_config_p = false;
uint64_t gc_heap::suspended_start_time = 0;
-uint64_t gc_heap::change_heap_count_time = 0;
uint64_t gc_heap::end_gc_time = 0;
uint64_t gc_heap::total_suspended_time = 0;
uint64_t gc_heap::process_start_time = 0;
@@ -22016,7 +22015,7 @@ void gc_heap::update_end_gc_time_per_heap()
if (heap_number == 0)
{
- dprintf (3, ("prev gen%d GC end time: prev start %I64d + prev gc elapsed %Id = %I64d",
+ dprintf (6666, ("prev gen%d GC end time: prev start %I64d + prev gc elapsed %Id = %I64d",
gen_number, dd_previous_time_clock (dd), dd_gc_elapsed_time (dd), (dd_previous_time_clock (dd) + dd_gc_elapsed_time (dd))));
}
@@ -22024,53 +22023,45 @@ void gc_heap::update_end_gc_time_per_heap()
if (heap_number == 0)
{
- dprintf (3, ("updated NGC%d %Id elapsed time to %I64d - %I64d = %I64d", gen_number, dd_gc_clock (dd), end_gc_time, dd_time_clock (dd), dd_gc_elapsed_time (dd)));
+ dprintf (6666, ("updated NGC%d %Id elapsed time to %I64d - %I64d = %I64d", gen_number, dd_gc_clock (dd), end_gc_time, dd_time_clock (dd), dd_gc_elapsed_time (dd)));
}
}
#ifdef DYNAMIC_HEAP_COUNT
if ((heap_number == 0) && (dynamic_adaptation_mode == dynamic_adaptation_to_application_sizes))
{
- if (settings.gc_index > 1)
- {
- dynamic_heap_count_data_t::sample& sample = dynamic_heap_count_data.samples[dynamic_heap_count_data.sample_index];
- sample.elapsed_between_gcs = end_gc_time - last_suspended_end_time;
- sample.gc_pause_time = dd_gc_elapsed_time (dynamic_data_of (0));
- sample.msl_wait_time = get_msl_wait_time ();
- // could cache this - we will get it again soon in do_post_gc
- sample.gc_survived_size = get_total_promoted ();
+ dynamic_heap_count_data_t::sample& sample = dynamic_heap_count_data.samples[dynamic_heap_count_data.sample_index];
+ sample.elapsed_between_gcs = end_gc_time - last_suspended_end_time;
+ sample.gc_pause_time = dd_gc_elapsed_time (dynamic_data_of (0));
+ sample.msl_wait_time = get_msl_wait_time();
- dprintf (6666, ("sample#%d: this GC end %I64d - last sus end %I64d = %I64d, this GC pause %I64d, msl wait %I64d",
- dynamic_heap_count_data.sample_index, end_gc_time, last_suspended_end_time, sample.elapsed_between_gcs, sample.gc_pause_time, sample.msl_wait_time));
+ dprintf (6666, ("sample#%d: this GC end %I64d - last sus end %I64d = %I64d, this GC pause %I64d, msl wait %I64d",
+ dynamic_heap_count_data.sample_index, end_gc_time, last_suspended_end_time, sample.elapsed_between_gcs, sample.gc_pause_time, sample.msl_wait_time));
- GCEventFireHeapCountSample_V1 (
- (uint64_t)VolatileLoadWithoutBarrier (&settings.gc_index),
- sample.elapsed_between_gcs,
- sample.gc_pause_time,
- sample.msl_wait_time);
+ last_suspended_end_time = end_gc_time;
- dynamic_heap_count_data.sample_index = (dynamic_heap_count_data.sample_index + 1) % dynamic_heap_count_data_t::sample_size;
- (dynamic_heap_count_data.current_samples_count)++;
+ GCEventFireHeapCountSample_V1 (
+ (uint64_t)VolatileLoadWithoutBarrier (&settings.gc_index),
+ sample.elapsed_between_gcs,
+ sample.gc_pause_time,
+ sample.msl_wait_time);
- if (settings.condemned_generation == max_generation)
- {
- gc_index_full_gc_end = dd_gc_clock (dynamic_data_of (0));
- size_t elapsed_between_gen2_gcs = end_gc_time - prev_gen2_end_time;
- size_t gen2_elapsed_time = sample.gc_pause_time;
- dynamic_heap_count_data_t::gen2_sample& g2_sample = dynamic_heap_count_data.gen2_samples[dynamic_heap_count_data.gen2_sample_index];
- g2_sample.gc_index = VolatileLoadWithoutBarrier (&(settings.gc_index));
- g2_sample.gc_percent = (float)gen2_elapsed_time * 100.0f / elapsed_between_gen2_gcs;
- (dynamic_heap_count_data.current_gen2_samples_count)++;
+ dynamic_heap_count_data.sample_index = (dynamic_heap_count_data.sample_index + 1) % dynamic_heap_count_data_t::sample_size;
- dprintf (6666, ("gen2 sample#%d: this GC end %I64d - last gen2 end %I64d = %I64d, GC elapsed %I64d, percent %.3f",
- dynamic_heap_count_data.gen2_sample_index, end_gc_time, prev_gen2_end_time, elapsed_between_gen2_gcs, gen2_elapsed_time, g2_sample.gc_percent));
- dynamic_heap_count_data.gen2_sample_index = (dynamic_heap_count_data.gen2_sample_index + 1) % dynamic_heap_count_data_t::sample_size;
- }
+ if (settings.condemned_generation == max_generation)
+ {
+ gc_index_full_gc_end = dd_gc_clock (dynamic_data_of (0));
+ size_t elapsed_between_gen2_gcs = end_gc_time - prev_gen2_end_time;
+ size_t gen2_elapsed_time = sample.gc_pause_time;
+ dynamic_heap_count_data.gen2_gc_percents[dynamic_heap_count_data.gen2_sample_index] = (float)gen2_elapsed_time * 100.0f / elapsed_between_gen2_gcs;
- calculate_new_heap_count ();
+ dprintf (6666, ("gen2 sample#%d: this GC end %I64d - last gen2 end %I64d = %I64d, GC elapsed %I64d, percent %.3f",
+ dynamic_heap_count_data.gen2_sample_index, end_gc_time, prev_gen2_end_time, elapsed_between_gen2_gcs,
+ gen2_elapsed_time, dynamic_heap_count_data.gen2_gc_percents[dynamic_heap_count_data.gen2_sample_index]));
+ dynamic_heap_count_data.gen2_sample_index = (dynamic_heap_count_data.gen2_sample_index + 1) % dynamic_heap_count_data_t::sample_size;
}
- last_suspended_end_time = end_gc_time;
+ calculate_new_heap_count ();
}
#endif //DYNAMIC_HEAP_COUNT
}
@@ -22237,16 +22228,11 @@ void gc_heap::gc1()
dprintf (6666, ("updating BGC %Id elapsed time to %I64d - %I64d = %I64d", dd_gc_clock (dd), end_gc_time, dd_time_clock (dd), dd_gc_elapsed_time (dd)));
float bgc_percent = (float)dd_gc_elapsed_time (dd) * 100.0f / (float)time_since_last_gen2;
- dynamic_heap_count_data_t::gen2_sample& g2_sample = dynamic_heap_count_data.gen2_samples[dynamic_heap_count_data.gen2_sample_index];
- g2_sample.gc_index = VolatileLoadWithoutBarrier (&(settings.gc_index));
- g2_sample.gc_percent = bgc_percent;
+ dynamic_heap_count_data.gen2_gc_percents[dynamic_heap_count_data.gen2_sample_index] = bgc_percent;
dprintf (6666, ("gen2 sample %d elapsed %Id * 100 / time inbetween gen2 %Id = %.3f",
dynamic_heap_count_data.gen2_sample_index, dd_gc_elapsed_time (dd), time_since_last_gen2, bgc_percent));
dynamic_heap_count_data.gen2_sample_index = (dynamic_heap_count_data.gen2_sample_index + 1) % dynamic_heap_count_data_t::sample_size;
- (dynamic_heap_count_data.current_gen2_samples_count)++;
gc_index_full_gc_end = dd_gc_clock (dynamic_data_of (0));
-
- calculate_new_heap_count ();
}
#endif //DYNAMIC_HEAP_COUNT
@@ -25089,6 +25075,7 @@ void gc_heap::recommission_heap()
// copy some fields from heap0
+
// this is copied to dd_previous_time_clock at the start of GC
dd_time_clock (dd) = dd_time_clock (heap0_dd);
@@ -25165,90 +25152,37 @@ float median_of_3 (float a, float b, float c)
return b;
}
-float log_with_base (float x, float base)
+size_t gc_heap::get_num_completed_gcs ()
{
- assert (x > base);
-
- return (float)(log(x) / log(base));
-}
-
-float mean (float* arr, int size)
-{
- float sum = 0.0;
-
- for (int i = 0; i < size; i++)
- {
- sum += arr[i];
- }
- return (sum / size);
-}
-
-// Change it to a desired number if you want to print.
-int max_times_to_print_tcp = 0;
-
-// Return the slope, and the average values in the avg arg.
-float slope (float* y, int n, float* avg)
-{
- assert (n > 0);
-
- if (n == 1)
- {
- dprintf (6666, ("only 1 tcp: %.3f, no slope", y[0]));
- *avg = y[0];
- return 0.0;
- }
-
- int sum_x = 0;
-
- for (int i = 0; i < n; i++)
- {
- sum_x += i;
-
- if (max_times_to_print_tcp >= 0)
- {
- dprintf (6666, ("%.3f, ", y[i]));
- }
- }
-
- float avg_x = (float)sum_x / n;
- float avg_y = mean (y, n);
- *avg = avg_y;
-
- float numerator = 0.0;
- float denominator = 0.0;
-
- for (int i = 0; i < n; ++i)
+ size_t num_completed_gcs = settings.gc_index;
+#ifdef BACKGROUND_GC
+ if (g_heaps[0]->is_bgc_in_progress ())
{
- numerator += ((float)i - avg_x) * (y[i] - avg_y);
- denominator += ((float)i - avg_x) * (i - avg_x);
+ num_completed_gcs--;
+ dprintf (6666, ("BGC in prog, completed GCs -> %Id", num_completed_gcs));
}
+#endif //BACKGROUND_GC
- max_times_to_print_tcp--;
-
- return (numerator / denominator);
+ return num_completed_gcs;
}
int gc_heap::calculate_new_heap_count ()
{
assert (dynamic_adaptation_mode == dynamic_adaptation_to_application_sizes);
- dprintf (6666, ("current num of samples %Id (g2: %Id) prev processed %Id (g2: %Id), last full GC happened at index %Id",
- dynamic_heap_count_data.current_samples_count, dynamic_heap_count_data.current_gen2_samples_count,
- dynamic_heap_count_data.processed_samples_count, dynamic_heap_count_data.processed_gen2_samples_count, gc_index_full_gc_end));
+ size_t num_completed_gcs = get_num_completed_gcs ();
+
+ dprintf (6666, ("current GC %Id(completed: %Id), prev completed GCs %Id, last full GC happened at index %Id",
+ VolatileLoadWithoutBarrier (&settings.gc_index), num_completed_gcs, dynamic_heap_count_data.prev_num_completed_gcs, gc_index_full_gc_end));
- if ((dynamic_heap_count_data.current_samples_count < (dynamic_heap_count_data.processed_samples_count + dynamic_heap_count_data_t::sample_size)) &&
- (dynamic_heap_count_data.current_gen2_samples_count < (dynamic_heap_count_data.processed_gen2_samples_count + dynamic_heap_count_data_t::sample_size)))
+ if (num_completed_gcs < (dynamic_heap_count_data.prev_num_completed_gcs + dynamic_heap_count_data_t::sample_size))
{
dprintf (6666, ("not enough GCs, skipping"));
return n_heaps;
}
- bool process_eph_samples_p = (dynamic_heap_count_data.current_samples_count >= (dynamic_heap_count_data.processed_samples_count + dynamic_heap_count_data_t::sample_size));
- bool process_gen2_samples_p = (dynamic_heap_count_data.current_gen2_samples_count >= (dynamic_heap_count_data.processed_gen2_samples_count + dynamic_heap_count_data_t::sample_size));
-
- size_t current_gc_index = VolatileLoadWithoutBarrier (&settings.gc_index);
float median_gen2_tcp_percent = 0.0f;
- if (dynamic_heap_count_data.current_gen2_samples_count >= (dynamic_heap_count_data.processed_gen2_samples_count + dynamic_heap_count_data_t::sample_size))
+ if (gc_index_full_gc_end >= (settings.gc_index - dynamic_heap_count_data_t::sample_size))
{
median_gen2_tcp_percent = dynamic_heap_count_data.get_median_gen2_gc_percent ();
}
@@ -25268,43 +25202,6 @@ int gc_heap::calculate_new_heap_count ()
}
float median_throughput_cost_percent = median_of_3 (throughput_cost_percents[0], throughput_cost_percents[1], throughput_cost_percents[2]);
- float avg_throughput_cost_percent = (float)((throughput_cost_percents[0] + throughput_cost_percents[1] + throughput_cost_percents[2]) / 3.0);
-
- // One of the reasons for outliers is something temporarily affected GC work. We pick the min tcp if the survival is very stable to avoid counting these outliers.
- float min_tcp = throughput_cost_percents[0];
- size_t min_survived = dynamic_heap_count_data.samples[0].gc_survived_size;
- uint64_t min_pause = dynamic_heap_count_data.samples[0].gc_pause_time;
- for (int i = 1; i < dynamic_heap_count_data_t::sample_size; i++)
- {
- min_tcp = min (throughput_cost_percents[i], min_tcp);
- min_survived = min (dynamic_heap_count_data.samples[i].gc_survived_size, min_survived);
- min_pause = min (dynamic_heap_count_data.samples[i].gc_pause_time, min_pause);
- }
-
- dprintf (6666, ("checking if samples are stable %Id %Id %Id, min tcp %.3f, min pause %I64d",
- dynamic_heap_count_data.samples[0].gc_survived_size, dynamic_heap_count_data.samples[1].gc_survived_size, dynamic_heap_count_data.samples[2].gc_survived_size,
- min_tcp, min_pause));
-
- bool survived_stable_p = true;
- if (min_survived > 0)
- {
- for (int i = 0; i < dynamic_heap_count_data_t::sample_size; i++)
- {
- dynamic_heap_count_data_t::sample& sample = dynamic_heap_count_data.samples[i];
- float diff = (float)(sample.gc_survived_size - min_survived) / (float)min_survived;
- dprintf (6666, ("sample %d diff from min is %Id -> %.3f", i, (sample.gc_survived_size - min_survived), diff));
- if (diff >= 0.15)
- {
- survived_stable_p = false;
- }
- }
- }
-
- if (survived_stable_p)
- {
- dprintf (6666, ("survived is stable, so we pick min tcp %.3f", min_tcp));
- median_throughput_cost_percent = min_tcp;
- }
// apply exponential smoothing and use 1/3 for the smoothing factor
const float smoothing = 3;
@@ -25319,13 +25216,10 @@ int gc_heap::calculate_new_heap_count ()
smoothed_median_throughput_cost_percent = median_throughput_cost_percent;
}
- dprintf (6666, ("median tcp: %.3f, smoothed tcp: %.3f, avg tcp: %.3f, gen2 tcp %.3f(%.3f, %.3f, %.3f)",
- median_throughput_cost_percent, smoothed_median_throughput_cost_percent, avg_throughput_cost_percent, median_gen2_tcp_percent,
- dynamic_heap_count_data.gen2_samples[0].gc_percent, dynamic_heap_count_data.gen2_samples[1].gc_percent, dynamic_heap_count_data.gen2_samples[2].gc_percent));
+ dprintf (6666, ("median tcp: %.3f, smoothed tcp: %.3f, gen2 tcp %.3f(%.3f, %.3f, %.3f)",
+ median_throughput_cost_percent, smoothed_median_throughput_cost_percent, median_gen2_tcp_percent,
+ dynamic_heap_count_data.gen2_gc_percents[0], dynamic_heap_count_data.gen2_gc_percents[1], dynamic_heap_count_data.gen2_gc_percents[2]));
- //
- // I'm keeping the old logic for now just to handle gen2.
- //
size_t heap_size = 0;
for (int i = 0; i < n_heaps; i++)
{
@@ -25353,9 +25247,7 @@ int gc_heap::calculate_new_heap_count ()
// we don't go all the way to the number of CPUs, but stay 1 or 2 short
int step_up = (n_heaps + 1) / 2;
int extra_heaps = 1 + (n_max_heaps >= 32);
- int actual_n_max_heaps = n_max_heaps - extra_heaps;
- int max_growth = max ((n_max_heaps / 4), 2);
- step_up = min (step_up, (actual_n_max_heaps - n_heaps));
+ step_up = min (step_up, n_max_heaps - extra_heaps - n_heaps);
// on the way down, we essentially divide the heap count by 1.5
int step_down = (n_heaps + 1) / 3;
@@ -25393,310 +25285,49 @@ int gc_heap::calculate_new_heap_count ()
dprintf (6666, ("stress %d -> %d", n_heaps, new_n_heaps));
#else //STRESS_DYNAMIC_HEAP_COUNT
int new_n_heaps = n_heaps;
-
- // target_tcp should be configurable.
- float target_tcp = 5.0;
- float target_gen2_tcp = 10.0;
- float log_base = (float)1.1;
-
- dynamic_heap_count_data.add_to_recorded_tcp (median_throughput_cost_percent);
-
- // This is the average of whatever is in the recorded tcp buffer.
- float avg_recorded_tcp = 0.0;
-
- if (process_eph_samples_p)
+ if (median_throughput_cost_percent > 10.0f)
{
- dynamic_heap_count_data.last_processed_stcp = smoothed_median_throughput_cost_percent;
-
- if ((median_throughput_cost_percent > 10.0f) || (smoothed_median_throughput_cost_percent > target_tcp))
+ // ramp up more agressively - use as many heaps as it would take to bring
+ // the tcp down to 5%
+ new_n_heaps = (int)(n_heaps * (median_throughput_cost_percent / 5.0));
+ dprintf (6666, ("[CHP0] tcp %.3f -> %d * %.3f = %d", median_throughput_cost_percent, n_heaps, (median_throughput_cost_percent / 5.0), new_n_heaps));
+ new_n_heaps = min (new_n_heaps, n_max_heaps - extra_heaps);
+ }
+ // if the median tcp is 10% or less, react slower
+ else if ((smoothed_median_throughput_cost_percent > 5.0f) || (median_gen2_tcp_percent > 10.0f))
+ {
+ if (smoothed_median_throughput_cost_percent > 5.0f)
{
- // If median is high but stcp is lower than target, and if this situation continues, stcp will quickly be above target anyway; otherwise
- // we treat it as an outlier.
- if (smoothed_median_throughput_cost_percent > target_tcp)
- {
- float step_up_percent = log_with_base ((smoothed_median_throughput_cost_percent - target_tcp + log_base), log_base);
- float step_up_float = (float)(step_up_percent / 100.0 * actual_n_max_heaps);
- int step_up_int = (int)step_up_float;
-
- dprintf (6666, ("[CHP0] inc %d(%.3f), last inc %d, %Id GCs elapsed, last stcp %.3f",
- step_up_int, step_up_float, (int)dynamic_heap_count_data.last_changed_count,
- (current_gc_index - dynamic_heap_count_data.last_changed_gc_index), dynamic_heap_count_data.last_changed_stcp));
-
- // Don't adjust if we just adjusted last time we checked, unless we are in an extreme situation.
- if ((smoothed_median_throughput_cost_percent < 20.0f) &&
- (avg_throughput_cost_percent < 20.0f) &&
- ((current_gc_index - dynamic_heap_count_data.last_changed_gc_index) < (2 * dynamic_heap_count_data_t::sample_size)))
- {
- dprintf (6666, ("[CHP0] we just adjusted %Id GCs ago, skipping", (current_gc_index - dynamic_heap_count_data.last_changed_gc_index)));
- }
- else
- {
- if (step_up_int)
- {
- if (dynamic_heap_count_data.dec_failure_count)
- {
- dprintf (6666, ("[CHP0] intending to grow, reset dec failure count (was %d)", dynamic_heap_count_data.dec_failure_count));
- dynamic_heap_count_data.dec_failure_count = 0;
- }
-
- if (((int)dynamic_heap_count_data.last_changed_count > 0) && (dynamic_heap_count_data.last_changed_gc_index > 0.0) &&
- ((current_gc_index - dynamic_heap_count_data.last_changed_gc_index) <= (3 * dynamic_heap_count_data_t::sample_size)))
- {
- dprintf (6666, ("[CHP0-0] just grew %d GCs ago, no change", (current_gc_index - dynamic_heap_count_data.last_changed_gc_index)));
- step_up_int = 0;
- }
- else
- {
- // If the calculation tells us to grow, we should check to see if the slope has been coming down rapidly, if so there's no reason to grow.
- int above_target_tcp_count = dynamic_heap_count_data.rearrange_recorded_tcp ();
- float above_target_tcp_slope = slope (dynamic_heap_count_data.recorded_tcp_rearranged, above_target_tcp_count, &avg_recorded_tcp);
- float diff_pct = (target_tcp - avg_recorded_tcp) / target_tcp;
- float adjusted_target_tcp = dynamic_heap_count_data.get_range_upper (target_tcp);
-
- dprintf (6666, ("[CHP0] slope of last %d samples is %.3f. avg %.3f (%.3f%%), current tcp %.3f, adjusted target is %.3f, failure count is %d",
- above_target_tcp_count, above_target_tcp_slope, avg_recorded_tcp, (diff_pct * 100.0),
- median_throughput_cost_percent, adjusted_target_tcp, dynamic_heap_count_data.inc_failure_count));
-
- if (dynamic_heap_count_data.is_tcp_in_range (diff_pct, above_target_tcp_slope))
- {
- step_up_int = 0;
- dprintf (6666, ("[CHP0-1] slope %.3f and already close to target %.3f (%.3f%%), no change", above_target_tcp_slope, avg_recorded_tcp, (diff_pct * 100.0)));
- }
- else
- {
- if (above_target_tcp_slope < 0.0)
- {
- // If we are already trending down and the tcp is small enough, just wait.
- if ((median_throughput_cost_percent < adjusted_target_tcp) || (avg_recorded_tcp < adjusted_target_tcp))
- {
- step_up_int = 0;
- dprintf (6666, ("[CHP0-2] trending down, slope is %.3f, tcp is %.3f, avg is %.3f, already below adjusted target %.3f, no change",
- above_target_tcp_slope, median_throughput_cost_percent, avg_recorded_tcp, adjusted_target_tcp));
- }
- }
- else
- {
- // We are trending up, but we have too few samples and the avg is already small enough.
- if ((above_target_tcp_count <= dynamic_heap_count_data.inc_recheck_threshold) && (avg_recorded_tcp < adjusted_target_tcp))
- {
- step_up_int = 0;
- dprintf (6666, ("[CHP0-3] trending up, only %d samples, slope is %.3f, avg is %.3f already below adjusted target %.3f, no change",
- above_target_tcp_count, above_target_tcp_slope, avg_recorded_tcp, adjusted_target_tcp));
- }
- }
- }
- }
-
- // If we still decided to grow, check if we need to grow aggressively.
- if (step_up_int)
- {
- if (((int)dynamic_heap_count_data.last_changed_count > 0) && (dynamic_heap_count_data.last_changed_gc_index > 0.0))
- {
- (dynamic_heap_count_data.inc_failure_count)++;
- dprintf (6666, ("[CHP0-4] just grew %d GCs ago, grow more aggressively from %d -> %d more heaps",
- (current_gc_index - dynamic_heap_count_data.last_changed_gc_index), step_up_int, (step_up_int * (dynamic_heap_count_data.inc_failure_count + 1))));
- step_up_int *= dynamic_heap_count_data.inc_failure_count + 1;
- }
- }
- }
-
- step_up_int = min (step_up_int, max_growth);
-
- new_n_heaps = n_heaps + step_up_int;
- new_n_heaps = min (new_n_heaps, actual_n_max_heaps);
-
- // If we are going to grow to be very close to max heap, it's better to just grow to it.
- if ((new_n_heaps < actual_n_max_heaps) && dynamic_heap_count_data.is_close_to_max (new_n_heaps, actual_n_max_heaps))
- {
- dprintf (6666, ("[CHP0-5] %d is close to max heaps %d, grow to max", new_n_heaps, actual_n_max_heaps));
- new_n_heaps = actual_n_max_heaps;
- }
-
- if (new_n_heaps > n_heaps)
- {
- dynamic_heap_count_data.last_changed_gc_index = current_gc_index;
- dynamic_heap_count_data.last_changed_count = step_up_float;
- dynamic_heap_count_data.last_changed_stcp = smoothed_median_throughput_cost_percent;
- }
-
- dprintf (6666, ("[CHP0] tcp %.3f, stcp %.3f -> (%d * %.3f%% = %.3f) -> %d + %d = %d -> %d",
- median_throughput_cost_percent, smoothed_median_throughput_cost_percent,
- actual_n_max_heaps, step_up_percent, step_up_float, step_up_int, n_heaps, (n_heaps + step_up_int), new_n_heaps));
- }
- }
+ dprintf (6666, ("[CHP1] stcp %.3f > 5, %d + %d = %d", smoothed_median_throughput_cost_percent, n_heaps, step_up, (n_heaps + step_up)));
}
else
{
- // When we are below target, we accumulate the distance to target and only adjust when we've accumulated enough in this state. Note that
- // this can include tcp's that are slightly above target, as long as it's not high enough for us to adjust the heap count. If we are just
- // oscillating around target, this makes those tcp's cancel each other out.
- if (dynamic_heap_count_data.below_target_accumulation == 0)
- {
- dynamic_heap_count_data.first_below_target_gc_index = current_gc_index;
- dynamic_heap_count_data.init_recorded_tcp ();
- dynamic_heap_count_data.add_to_recorded_tcp (median_throughput_cost_percent);
- }
- dprintf (6666, ("[CHP1] last time adjusted %s by %d at GC#%Id (%Id GCs since), stcp was %.3f, now stcp is %.3f",
- ((dynamic_heap_count_data.last_changed_count > 0.0) ? "up" : "down"), (int)dynamic_heap_count_data.last_changed_count,
- dynamic_heap_count_data.last_changed_gc_index, (current_gc_index - dynamic_heap_count_data.last_changed_gc_index),
- dynamic_heap_count_data.last_changed_stcp, smoothed_median_throughput_cost_percent));
-
- float below_target_diff = target_tcp - median_throughput_cost_percent;
- dynamic_heap_count_data.below_target_accumulation += below_target_diff;
-
- dprintf (6666, ("[CHP1] below target for the past %Id GCs, accumulated %.3f, min (10%% of max is %.2f, 20%% of hc is %.2f)",
- (current_gc_index - dynamic_heap_count_data.first_below_target_gc_index), dynamic_heap_count_data.below_target_accumulation,
- (actual_n_max_heaps * 0.1), (n_heaps * 0.2)));
-
- if (dynamic_heap_count_data.below_target_accumulation >= dynamic_heap_count_data.below_target_threshold)
- {
- int below_target_tcp_count = dynamic_heap_count_data.rearrange_recorded_tcp ();
- float below_target_tcp_slope = slope (dynamic_heap_count_data.recorded_tcp, below_target_tcp_count, &avg_recorded_tcp);
- float diff_pct = (target_tcp - smoothed_median_throughput_cost_percent) / target_tcp;
- int step_down_int = (int)(diff_pct / 2.0 * n_heaps);
- dprintf (6666, ("[CHP1] observed %d tcp's <= or ~ target, avg %.3f, slope %.3f, stcp %.3f below target, shrink by %.3f * %d = %d heaps",
- below_target_tcp_count, avg_recorded_tcp, below_target_tcp_slope, (diff_pct * 100.0), (diff_pct * 50.0), n_heaps, step_down_int));
-
- bool shrink_p = false;
- if (dynamic_heap_count_data.is_tcp_in_range (diff_pct, below_target_tcp_slope))
- {
- step_down_int = 0;
- dprintf (6666, ("[CHP1-0] slope %.3f is flat and stcp is already close to target %.3f (%.3f%%), no change",
- below_target_tcp_slope, smoothed_median_throughput_cost_percent, (diff_pct * 100.0)));
- }
- else
- {
- // If we adjusted last time and it was unsuccessful, we need to inc our failure count.
- // If we have a non zero failure count, we don't want to adjust for a while if we continue to be in that same situation.
- bool last_dec_p = (dynamic_heap_count_data.last_changed_gc_index > 0) && (dynamic_heap_count_data.last_changed_count < 0.0);
- float last_dec_tcp_diff_pct = (last_dec_p ?
- ((smoothed_median_throughput_cost_percent - dynamic_heap_count_data.last_changed_stcp) / dynamic_heap_count_data.last_changed_stcp) : 0.0f);
- bool stable_p = last_dec_p && ((last_dec_tcp_diff_pct <= 0.2) && (last_dec_tcp_diff_pct >= -0.2));
- dprintf (6666, ("[CHP1] since last adjustment stcp changed %.3f->%.3f = %.3f%%, %s, dec_failure_count is %d",
- dynamic_heap_count_data.last_changed_stcp, smoothed_median_throughput_cost_percent, (last_dec_tcp_diff_pct * 100.0),
- (stable_p ? "stable" : "not stable"), dynamic_heap_count_data.dec_failure_count));
-
- bool check_dec_p = true;
-
- if (stable_p)
- {
- if (dynamic_heap_count_data.dec_failure_count)
- {
- (dynamic_heap_count_data.dec_failure_count)++;
- }
- else
- {
- dynamic_heap_count_data.dec_failure_count = 1;
- }
-
- if (dynamic_heap_count_data.dec_failure_count <= dynamic_heap_count_data.dec_failure_recheck_threshold)
- {
- check_dec_p = false;
- dprintf (6666, ("[CHP1-1] dec was still unsuccessful, <= %d, no change", dynamic_heap_count_data.dec_failure_recheck_threshold));
- }
- }
-
- if (check_dec_p)
- {
- dynamic_heap_count_data.dec_failure_count = 0;
-
- if (below_target_tcp_slope <= 0.0)
- {
- shrink_p = true;
- }
- else
- {
- // It's trending upwards, but if takes too many samples to get to target, we do want to shrink.
- int num_samples_to_goal = (int)((target_tcp + below_target_tcp_slope - median_throughput_cost_percent) / below_target_tcp_slope);
- bool far_below_goal_p = (num_samples_to_goal > (3 * dynamic_heap_count_data_t::sample_size));
- dprintf (6666, ("[CHP1] it'll take ((%.3f + %.3f - %.3f) / %.3f = %d) samples to get to target, %s",
- target_tcp, below_target_tcp_slope, median_throughput_cost_percent, below_target_tcp_slope,
- num_samples_to_goal, (far_below_goal_p ? "shrink" : "no change")));
-
- if (far_below_goal_p)
- {
- // We could be in a situation where the slope changes directions but since we only compute one number, we take another look at
- // the samples to make a better assessment by looking at the highest tcps and if their average is close to target, we don't shrink.
- //
- // TODO - we only check this when the slope is going up but since this includes the situation where the slope changes directions
- // we should really be checking this regardless of the slope to handle that.
- float highest_avg_tcp = 0.0;
- int highest_count = dynamic_heap_count_data.highest_avg_recorded_tcp (below_target_tcp_count, avg_recorded_tcp, &highest_avg_tcp);
- float highest_count_pct = (float)highest_count / (float)below_target_tcp_count;
-
- shrink_p = (highest_count_pct < 0.3) || (highest_avg_tcp < (target_tcp * 0.8));
- dprintf (6666, ("[CHP1-2] %d samples were above avg (%.3f%%), their avg is %.3f (%s)",
- highest_count, (highest_count_pct * 100.0), highest_avg_tcp, (shrink_p ? "shrink" : "no change")));
- }
- }
- }
- }
-
- if (shrink_p && step_down_int && (new_n_heaps > step_down_int))
- {
- // TODO - if we see that it wants to shrink by 1 heap too many times, we do want to shrink.
- if (step_down_int == 1)
- {
- step_down_int = 0;
- dprintf (6666, ("[CHP1-3] don't shrink if it's just one heap. not worth it"));
- }
-
- new_n_heaps -= step_down_int;
- dprintf (6666, ("[CHP1] shrink by %d heaps -> %d", step_down_int, new_n_heaps));
- }
-
- // Always reinit the buffer as we want to look at the more recent history.
- dynamic_heap_count_data.init_recorded_tcp ();
- dynamic_heap_count_data.below_target_accumulation = 0;
- }
-
- if (new_n_heaps < n_heaps)
- {
- dynamic_heap_count_data.last_changed_gc_index = current_gc_index;
- dynamic_heap_count_data.last_changed_count = (float)(new_n_heaps - n_heaps);
- dynamic_heap_count_data.last_changed_stcp = smoothed_median_throughput_cost_percent;
- dprintf (6666, ("[CHP1] setting last changed gc index to %Id, count to %.3f, stcp to %.3f",
- dynamic_heap_count_data.last_changed_gc_index, dynamic_heap_count_data.last_changed_count, dynamic_heap_count_data.last_changed_stcp));
-
- if (dynamic_heap_count_data.inc_failure_count)
- {
- dprintf (6666, ("[CHP1] shrink, reset inc failure count (was %d)", dynamic_heap_count_data.inc_failure_count));
- dynamic_heap_count_data.inc_failure_count = 0;
- }
- }
+ dprintf (6666, ("[CHP2] tcp %.3f > 10, %d + %d = %d", median_gen2_tcp_percent, n_heaps, step_up, (n_heaps + step_up)));
}
+ new_n_heaps += step_up;
}
-
- if ((new_n_heaps == n_heaps) && !process_eph_samples_p && process_gen2_samples_p)
+ // if we can save at least 1% more in time than we spend in space, increase number of heaps
+ else if ((tcp_reduction_per_step_up - scp_increase_per_step_up) >= 1.0f)
{
- // The gen2 samples only serve as a backstop so this is quite crude.
- if (median_gen2_tcp_percent > target_gen2_tcp)
- {
- float step_up_percent = log_with_base ((median_gen2_tcp_percent - target_gen2_tcp + log_base), log_base);
- float step_up_float = (float)(step_up_percent / 100.0 * actual_n_max_heaps);
- new_n_heaps += (int)step_up_float;
- new_n_heaps = min (new_n_heaps, actual_n_max_heaps);
- dprintf (6666, ("[CHP2-0] gen2 tcp: %.3f, inc by %.3f%% = %d, %d -> %d", median_gen2_tcp_percent, step_up_percent, (int)step_up_float, n_heaps, new_n_heaps));
-
- if ((new_n_heaps < actual_n_max_heaps) && dynamic_heap_count_data.is_close_to_max (new_n_heaps, actual_n_max_heaps))
- {
- dprintf (6666, ("[CHP2-1] %d is close to max heaps %d, grow to max", new_n_heaps, actual_n_max_heaps));
- new_n_heaps = actual_n_max_heaps;
- }
- }
- else if ((dynamic_heap_count_data.last_processed_stcp < 1.0) &&
- (median_gen2_tcp_percent < (target_gen2_tcp / 2)) &&
- (scp_decrease_per_step_down - tcp_increase_per_step_down >= 1.0f))
- {
- new_n_heaps -= step_down;
- dprintf (6666, ("[CHP3-0] last eph stcp: %.3f, gen2 tcp: %.3f, dec by %d, %d -> %d",
- dynamic_heap_count_data.last_processed_stcp, median_gen2_tcp_percent, step_down, n_heaps, new_n_heaps));
- }
+ dprintf (6666, ("[CHP3] % .3f - % .3f = % .3f, % d + % d = % d",
+ tcp_reduction_per_step_up, scp_increase_per_step_up, (tcp_reduction_per_step_up - scp_increase_per_step_up),
+ n_heaps, step_up, (n_heaps + step_up)));
+ new_n_heaps += step_up;
+ }
+ // if we can save at least 1% more in space than we spend in time, decrease number of heaps
+ else if ((smoothed_median_throughput_cost_percent < 1.0f) &&
+ (median_gen2_tcp_percent < 5.0f) &&
+ ((scp_decrease_per_step_down - tcp_increase_per_step_down) >= 1.0f))
+ {
+ dprintf (6666, ("[CHP4] stcp %.3f tcp %.3f, %.3f - %.3f = %.3f, %d + %d = %d",
+ smoothed_median_throughput_cost_percent, median_gen2_tcp_percent,
+ scp_decrease_per_step_down, tcp_increase_per_step_down, (scp_decrease_per_step_down - tcp_increase_per_step_down),
+ n_heaps, step_up, (n_heaps + step_up)));
+ new_n_heaps -= step_down;
}
assert (new_n_heaps >= 1);
- assert (new_n_heaps <= actual_n_max_heaps);
-
+ assert (new_n_heaps <= n_max_heaps);
#endif //STRESS_DYNAMIC_HEAP_COUNT
// store data used for decision to emit in ETW event
@@ -25719,28 +25350,13 @@ int gc_heap::calculate_new_heap_count ()
dynamic_heap_count_data.scp_decrease_per_step_down
);
- if (process_eph_samples_p)
- {
- dprintf (6666, ("processed eph samples, updating processed %Id -> %Id", dynamic_heap_count_data.processed_samples_count, dynamic_heap_count_data.current_samples_count));
- dynamic_heap_count_data.processed_samples_count = dynamic_heap_count_data.current_samples_count;
- }
-
- if (process_gen2_samples_p)
- {
- dprintf (6666, ("processed gen2 samples, updating processed %Id -> %Id", dynamic_heap_count_data.processed_gen2_samples_count, dynamic_heap_count_data.current_gen2_samples_count));
- dynamic_heap_count_data.processed_gen2_samples_count = dynamic_heap_count_data.current_gen2_samples_count;
- }
+ dynamic_heap_count_data.prev_num_completed_gcs = num_completed_gcs;
if (new_n_heaps != n_heaps)
{
- dprintf (6666, ("GC#%Id should change! %d->%d (%s)",
- VolatileLoadWithoutBarrier (&settings.gc_index), n_heaps, new_n_heaps, ((n_heaps < new_n_heaps) ? "INC" : "DEC")));
+ dprintf (6666, ("should change! %d->%d", n_heaps, new_n_heaps));
dynamic_heap_count_data.heap_count_to_change_to = new_n_heaps;
dynamic_heap_count_data.should_change_heap_count = true;
- dynamic_heap_count_data.init_recorded_tcp ();
- dynamic_heap_count_data.below_target_accumulation = 0;
- dynamic_heap_count_data.first_below_target_gc_index = current_gc_index;
- dprintf (6666, ("CHANGING HC, resetting tcp index, below target"));
}
return new_n_heaps;
@@ -25773,7 +25389,7 @@ void gc_heap::check_heap_count ()
if (dynamic_heap_count_data.new_n_heaps != n_heaps)
{
- dprintf (6666, ("prep to change from %d to %d at GC#%Id", n_heaps, dynamic_heap_count_data.new_n_heaps, VolatileLoadWithoutBarrier (&settings.gc_index)));
+ dprintf (6666, ("prep to change from %d to %d", n_heaps, dynamic_heap_count_data.new_n_heaps));
if (!prepare_to_change_heap_count (dynamic_heap_count_data.new_n_heaps))
{
// we don't have sufficient resources - reset the new heap count
@@ -25783,15 +25399,11 @@ void gc_heap::check_heap_count ()
if (dynamic_heap_count_data.new_n_heaps == n_heaps)
{
- dynamic_heap_count_data.last_changed_gc_index = 0;
- dynamic_heap_count_data.last_changed_count = 0.0;
-
- dynamic_heap_count_data.processed_samples_count = dynamic_heap_count_data.current_samples_count;
- dynamic_heap_count_data.processed_gen2_samples_count = dynamic_heap_count_data.current_gen2_samples_count;
+ // heap count stays the same, no work to do
+ dynamic_heap_count_data.prev_num_completed_gcs = get_num_completed_gcs ();
dynamic_heap_count_data.should_change_heap_count = false;
- dprintf (6666, ("heap count stays the same %d, no work to do, set processed sample count to %Id",
- dynamic_heap_count_data.new_n_heaps, dynamic_heap_count_data.current_samples_count));
+ dprintf (6666, ("heap count stays the same %d, no work to do, set prev completed to %Id", dynamic_heap_count_data.new_n_heaps, dynamic_heap_count_data.prev_num_completed_gcs));
return;
}
@@ -25831,14 +25443,17 @@ void gc_heap::check_heap_count ()
int old_n_heaps = n_heaps;
+ (dynamic_heap_count_data.heap_count_change_count)++;
change_heap_count (dynamic_heap_count_data.new_n_heaps);
GCToEEInterface::RestartEE(TRUE);
dprintf (9999, ("h0 restarted EE"));
- dynamic_heap_count_data.smoothed_median_throughput_cost_percent = 0.0;
+ // we made changes to the heap count that will change the overhead,
+ // so change the smoothed overhead to reflect that
+ dynamic_heap_count_data.smoothed_median_throughput_cost_percent = dynamic_heap_count_data.smoothed_median_throughput_cost_percent / n_heaps * old_n_heaps;
- dprintf (6666, ("h0 finished changing, set should change to false!\n"));
+ dprintf (6666, ("h0 finished changing, set should change to false!"));
dynamic_heap_count_data.should_change_heap_count = false;
}
@@ -25978,8 +25593,6 @@ bool gc_heap::prepare_to_change_heap_count (int new_n_heaps)
bool gc_heap::change_heap_count (int new_n_heaps)
{
- uint64_t start_time = 0;
-
dprintf (9999, ("BEG heap%d changing %d->%d", heap_number, n_heaps, new_n_heaps));
// use this variable for clarity - n_heaps will change during the transition
@@ -26004,9 +25617,11 @@ bool gc_heap::change_heap_count (int new_n_heaps)
assert (dynamic_heap_count_data.new_n_heaps != old_n_heaps);
+ dprintf (9999, ("Waiting h0 heap%d changing %d->%d", heap_number, n_heaps, new_n_heaps));
+
if (heap_number == 0)
{
- start_time = GetHighPrecisionTimeStamp ();
+ dprintf (3, ("switching heap count from %d to %d heaps", old_n_heaps, new_n_heaps));
// spread finalization data out to heaps coming into service
// if this step fails, we can still continue
@@ -26212,7 +25827,6 @@ bool gc_heap::change_heap_count (int new_n_heaps)
gc_t_join.restart ();
}
}
-
#ifdef BACKGROUND_GC
// there should be no items in the bgc_alloc_lock
bgc_alloc_lock->check();
@@ -26223,31 +25837,23 @@ bool gc_heap::change_heap_count (int new_n_heaps)
{
// compute the total budget per generation over the old heaps
// and figure out what the new budget per heap is
- ptrdiff_t new_alloc_per_heap[total_generation_count];
- size_t desired_alloc_per_heap[total_generation_count];
+ ptrdiff_t budget_per_heap[total_generation_count];
for (int gen_idx = 0; gen_idx < total_generation_count; gen_idx++)
{
- ptrdiff_t total_new_alloc = 0;
- size_t total_desired_alloc = 0;
+ ptrdiff_t total_budget = 0;
for (int i = 0; i < old_n_heaps; i++)
{
gc_heap* hp = g_heaps[i];
dynamic_data* dd = hp->dynamic_data_of (gen_idx);
- total_new_alloc += dd_new_allocation (dd);
- total_desired_alloc += dd_desired_allocation (dd);
+ total_budget += dd_new_allocation (dd);
}
// distribute the total budget for this generation over all new heaps if we are increasing heap count,
// but keep the budget per heap if we are decreasing heap count
int max_n_heaps = max (old_n_heaps, new_n_heaps);
- new_alloc_per_heap[gen_idx] = Align (total_new_alloc / max_n_heaps, get_alignment_constant (gen_idx <= max_generation));
- desired_alloc_per_heap[gen_idx] = Align (total_desired_alloc / max_n_heaps, get_alignment_constant (gen_idx <= max_generation));
- size_t allocated_in_budget = total_desired_alloc - total_new_alloc;
- dprintf (6666, ("g%d: total budget %zd (%zd / heap), left in budget: %zd (%zd / heap), (allocated %Id, %.3f%%), min %zd",
- gen_idx, total_desired_alloc, desired_alloc_per_heap[gen_idx],
- total_new_alloc, new_alloc_per_heap[gen_idx],
- allocated_in_budget, ((double)allocated_in_budget * 100.0 / (double)total_desired_alloc),
- dd_min_size (g_heaps[0]->dynamic_data_of (gen_idx))));
+ budget_per_heap[gen_idx] = Align (total_budget/max_n_heaps, get_alignment_constant (gen_idx <= max_generation));
+
+ dprintf (6666, ("g%d: total budget: %zd budget per heap: %zd", gen_idx, total_budget, budget_per_heap[gen_idx]));
}
// distribute the new budget per heap over the new heaps
@@ -26258,10 +25864,10 @@ bool gc_heap::change_heap_count (int new_n_heaps)
for (int gen_idx = 0; gen_idx < total_generation_count; gen_idx++)
{
- // distribute the total leftover budget over all heaps.
+ // distribute the total budget over all heaps, but don't go below the min budget
dynamic_data* dd = hp->dynamic_data_of (gen_idx);
- dd_new_allocation (dd) = new_alloc_per_heap[gen_idx];
- dd_desired_allocation (dd) = max (desired_alloc_per_heap[gen_idx], dd_min_size (dd));
+ dd_new_allocation (dd) = max (budget_per_heap[gen_idx], (ptrdiff_t)dd_min_size (dd));
+ dd_desired_allocation (dd) = dd_new_allocation (dd);
// recompute dd_fragmentation and dd_current_size
generation* gen = hp->generation_of (gen_idx);
@@ -26270,11 +25876,10 @@ bool gc_heap::change_heap_count (int new_n_heaps)
assert (gen_size >= dd_fragmentation (dd));
dd_current_size (dd) = gen_size - dd_fragmentation (dd);
- dprintf (3, ("h%d g%d: budget: %zd, left in budget: %zd, %zd generation_size: %zd fragmentation: %zd current_size: %zd",
+ dprintf (6666, ("h%d g%d: new allocation: %zd generation_size: %zd fragmentation: %zd current_size: %zd",
i,
gen_idx,
- desired_alloc_per_heap[gen_idx],
- new_alloc_per_heap[gen_idx],
+ dd_new_allocation (dd),
gen_size,
dd_fragmentation (dd),
dd_current_size (dd)));
@@ -26311,11 +25916,6 @@ bool gc_heap::change_heap_count (int new_n_heaps)
}
}
- if (heap_number == 0)
- {
- change_heap_count_time = GetHighPrecisionTimeStamp() - start_time;
- }
-
return true;
}
@@ -48805,10 +48405,6 @@ HRESULT GCHeap::Initialize()
// This needs to be different from our initial heap count so we can make sure we wait for
// the idle threads correctly in gc_thread_function.
gc_heap::dynamic_heap_count_data.last_n_heaps = 0;
- // This should be adjusted based on the target tcp. See comments in gcpriv.h
- gc_heap::dynamic_heap_count_data.below_target_threshold = 10.0;
- gc_heap::dynamic_heap_count_data.inc_recheck_threshold = 5;
- gc_heap::dynamic_heap_count_data.dec_failure_recheck_threshold = 5;
}
#endif //DYNAMIC_HEAP_COUNT
GCScan::GcRuntimeStructuresValid (TRUE);
diff --git a/src/coreclr/gc/gcpriv.h b/src/coreclr/gc/gcpriv.h
index 788cbff9f5e507..71dc19b9f0c676 100644
--- a/src/coreclr/gc/gcpriv.h
+++ b/src/coreclr/gc/gcpriv.h
@@ -2556,6 +2556,8 @@ class gc_heap
// re-initialize a heap in preparation to putting it back into service
PER_HEAP_METHOD void recommission_heap();
+ PER_HEAP_ISOLATED_METHOD size_t get_num_completed_gcs();
+
PER_HEAP_ISOLATED_METHOD int calculate_new_heap_count();
// check if we should change the heap count
@@ -4236,166 +4238,21 @@ class gc_heap
struct dynamic_heap_count_data_t
{
static const int sample_size = 3;
- static const int recorded_tcp_array_size = 64;
struct sample
{
uint64_t elapsed_between_gcs; // time between gcs in microseconds (this should really be between_pauses)
uint64_t gc_pause_time; // pause time for this GC
uint64_t msl_wait_time;
- size_t gc_survived_size;
};
uint32_t sample_index;
sample samples[sample_size];
-
- size_t current_samples_count;
- size_t processed_samples_count;
-
- //
- // We need to observe the history of tcp's so record them in a small buffer.
- //
- float recorded_tcp_rearranged[recorded_tcp_array_size];
- float recorded_tcp[recorded_tcp_array_size];
- int recorded_tcp_index;
- int total_recorded_tcp;
-
- int add_to_recorded_tcp (float tcp)
- {
- total_recorded_tcp++;
-
- recorded_tcp[recorded_tcp_index] = tcp;
- recorded_tcp_index++;
- if (recorded_tcp_index == recorded_tcp_array_size)
- {
- recorded_tcp_index = 0;
- }
-
- return recorded_tcp_index;
- }
-
- int rearrange_recorded_tcp ()
- {
- int count = recorded_tcp_array_size;
- int copied_count = 0;
-
- if (total_recorded_tcp >= recorded_tcp_array_size)
- {
- int earlier_entry_size = recorded_tcp_array_size - recorded_tcp_index;
- memcpy (recorded_tcp_rearranged, (recorded_tcp + recorded_tcp_index), (earlier_entry_size * sizeof (float)));
-
- copied_count = earlier_entry_size;
- }
-
- if (recorded_tcp_index)
- {
- memcpy ((recorded_tcp_rearranged + copied_count), recorded_tcp, (recorded_tcp_index * sizeof (float)));
- copied_count += recorded_tcp_index;
- }
-
- return copied_count;
- }
-
- int highest_avg_recorded_tcp (int count, float avg, float* highest_avg)
- {
- float highest_sum = 0.0;
- int highest_count = 0;
-
- for (int i = 0; i < count; i++)
- {
- if (recorded_tcp_rearranged[i] > avg)
- {
- highest_count++;
- highest_sum += recorded_tcp_rearranged[i];
- }
- }
-
- if (highest_count)
- {
- *highest_avg = highest_sum / highest_count;
- }
-
- return highest_count;
- }
-
- void init_recorded_tcp ()
- {
- total_recorded_tcp = 0;
- recorded_tcp_index = 0;
- dprintf (6666, ("INIT tcp buffer"));
- }
-
- int get_recorded_tcp_count () { return total_recorded_tcp; }
-
- //
- // Maintain some info about last time we did change heap count.
- //
- size_t last_changed_gc_index;
- // This is intentionally kept as a float for precision.
- float last_changed_count;
- float last_changed_stcp;
-
- //
- // For tuning above/below target tcp.
- //
- // If we just increased the heap count and immediately need to grow again, that counts as a failure.
- // The higher the failure count, the more aggressive we should grow.
- int inc_failure_count;
-
- // If we are trending up and the tcp is already close enough to target, we need this many samples
- // before we adjust.
- int inc_recheck_threshold;
-
- // If we shrink and the stcp doesn't change much, that counts as a failure. For the below target case
- // it's fine to stay here for a while. Either it'll naturally change and break out of this situation
- // or we wait for a while before we re-evaluate. How long we wait is defined by dec_recheck_threshold
- // each time our calculation tells us to shrink.
- int dec_failure_count;
- int dec_failure_recheck_threshold;
-
- // If we continue to be below target for an extended period of time, ie, we've accumulated more than
- // below_target_threshold, we want to reduce the heap count.
- float below_target_accumulation;
- float below_target_threshold;
-
- // Currently only used for dprintf.
- size_t first_below_target_gc_index;
-
- float get_range_upper (float t)
- {
- return (t * 1.2f);
- }
-
- bool is_tcp_in_range (float diff_pct, float slope)
- {
- return ((diff_pct <= 0.2) && (diff_pct >= -0.2) && (slope <= 0.1) && (slope >= -0.1));
- }
-
- bool is_close_to_max (int new_n, int max)
- {
- return ((max - new_n) <= (max / 10));
- }
-
- //
- // gen2 GCs are handled separately only as a backstop.
- //
- struct gen2_sample
- {
- // Recording the gen2 GC indices so we know how far apart they are. Currently unused
- // but we should consider how much value there is if they are very far apart.
- size_t gc_index;
- // This is (gc_elapsed_time / time inbetween this and the last gen2 GC)
- float gc_percent;
- };
+ size_t prev_num_completed_gcs;
uint32_t gen2_sample_index;
- gen2_sample gen2_samples[sample_size];
-
- size_t current_gen2_samples_count;
- size_t processed_gen2_samples_count;
-
- // This records the stcp last time we processed ephemeral samples. We use it
- float last_processed_stcp;
+ // This is (gc_elapsed_time / time inbetween this and the last gen2 GC)
+ float gen2_gc_percents[sample_size];
float median_throughput_cost_percent; // estimated overhead of allocator + gc
float smoothed_median_throughput_cost_percent; // exponentially smoothed version
@@ -4414,13 +4271,14 @@ class gc_heap
bool should_change_heap_count;
int heap_count_to_change_to;
+ int heap_count_change_count;
#ifdef STRESS_DYNAMIC_HEAP_COUNT
int lowest_heap_with_msl_uoh;
#endif //STRESS_DYNAMIC_HEAP_COUNT
float get_median_gen2_gc_percent()
{
- return median_of_3 (gen2_samples[0].gc_percent, gen2_samples[1].gc_percent, gen2_samples[2].gc_percent);
+ return median_of_3 (gen2_gc_percents[0], gen2_gc_percents[1], gen2_gc_percents[2]);
}
};
PER_HEAP_ISOLATED_FIELD_MAINTAINED dynamic_heap_count_data_t dynamic_heap_count_data;
@@ -4617,9 +4475,6 @@ class gc_heap
// at the beginning of a BGC and the PM triggered full GCs
// fall into this case.
PER_HEAP_ISOLATED_FIELD_DIAG_ONLY uint64_t suspended_start_time;
- // Right now this is diag only but may be used functionally later.
- PER_HEAP_ISOLATED_FIELD_DIAG_ONLY uint64_t change_heap_count_time;
- // TEMP END
PER_HEAP_ISOLATED_FIELD_DIAG_ONLY uint64_t end_gc_time;
PER_HEAP_ISOLATED_FIELD_DIAG_ONLY uint64_t total_suspended_time;
PER_HEAP_ISOLATED_FIELD_DIAG_ONLY uint64_t process_start_time;
diff --git a/src/coreclr/gc/unix/cgroup.cpp b/src/coreclr/gc/unix/cgroup.cpp
index 3be77727363920..d2ad75bbf787b5 100644
--- a/src/coreclr/gc/unix/cgroup.cpp
+++ b/src/coreclr/gc/unix/cgroup.cpp
@@ -39,6 +39,7 @@ Module Name:
#endif
#define CGROUP2_SUPER_MAGIC 0x63677270
+#define TMPFS_MAGIC 0x01021994
#define PROC_MOUNTINFO_FILENAME "/proc/self/mountinfo"
#define PROC_CGROUP_FILENAME "/proc/self/cgroup"
@@ -128,16 +129,12 @@ class CGroup
if (result != 0)
return 0;
- if (stats.f_type == CGROUP2_SUPER_MAGIC)
+ switch (stats.f_type)
{
- return 2;
- }
- else
- {
- // Assume that if /sys/fs/cgroup exists and the file system type is not cgroup2fs,
- // it is cgroup v1. Typically the file system type is tmpfs, but other values have
- // been seen in the wild.
- return 1;
+ case TMPFS_MAGIC: return 1;
+ case CGROUP2_SUPER_MAGIC: return 2;
+ default:
+ return 0;
}
#endif
}
diff --git a/src/coreclr/ilasm/asmparse.y b/src/coreclr/ilasm/asmparse.y
index c9861d58d79740..73ef9a892b5efb 100644
--- a/src/coreclr/ilasm/asmparse.y
+++ b/src/coreclr/ilasm/asmparse.y
@@ -486,7 +486,7 @@ typarAttrib : '+' { $$ = gpCovariant;
| '-' { $$ = gpContravariant; }
| CLASS_ { $$ = gpReferenceTypeConstraint; }
| VALUETYPE_ { $$ = gpNotNullableValueTypeConstraint; }
- | BYREFLIKE_ { $$ = gpAllowByRefLike; }
+ | BYREFLIKE_ { $$ = gpAcceptByRefLike; }
| _CTOR { $$ = gpDefaultConstructorConstraint; }
| FLAGS_ '(' int32 ')' { $$ = (CorGenericParamAttr)$3; }
;
diff --git a/src/coreclr/ilasm/prebuilt/asmparse.cpp b/src/coreclr/ilasm/prebuilt/asmparse.cpp
index 08f686f290187e..6bf91f56c57f44 100644
--- a/src/coreclr/ilasm/prebuilt/asmparse.cpp
+++ b/src/coreclr/ilasm/prebuilt/asmparse.cpp
@@ -2523,7 +2523,7 @@ case 152:
{ yyval.int32 = gpNotNullableValueTypeConstraint; } break;
case 153:
#line 489 "asmparse.y"
-{ yyval.int32 = gpAllowByRefLike; } break;
+{ yyval.int32 = gpAcceptByRefLike; } break;
case 154:
#line 490 "asmparse.y"
{ yyval.int32 = gpDefaultConstructorConstraint; } break;
diff --git a/src/coreclr/ildasm/dasm.cpp b/src/coreclr/ildasm/dasm.cpp
index 21dff99a381233..be95e36fa5d53f 100644
--- a/src/coreclr/ildasm/dasm.cpp
+++ b/src/coreclr/ildasm/dasm.cpp
@@ -3081,7 +3081,7 @@ char *DumpGenericPars(_Inout_updates_(SZSTRING_SIZE) char* szString, mdToken tok
if ((attr & gpNotNullableValueTypeConstraint) != 0)
szptr += sprintf_s(szptr,SZSTRING_REMAINING_SIZE(szptr), "valuetype ");
CHECK_REMAINING_SIZE;
- if ((attr & gpAllowByRefLike) != 0)
+ if ((attr & gpAcceptByRefLike) != 0)
szptr += sprintf_s(szptr,SZSTRING_REMAINING_SIZE(szptr), "byreflike ");
CHECK_REMAINING_SIZE;
if ((attr & gpDefaultConstructorConstraint) != 0)
diff --git a/src/coreclr/inc/CrstTypes.def b/src/coreclr/inc/CrstTypes.def
index 7f94e9e0996a83..3bccb73e03a599 100644
--- a/src/coreclr/inc/CrstTypes.def
+++ b/src/coreclr/inc/CrstTypes.def
@@ -367,7 +367,7 @@ Crst PendingTypeLoadEntry
DomainLocalBlock Exception ExecuteManRangeLock FuncPtrStubs
FusionAppCtx GlobalStrLiteralMap HandleTable IbcProfile
IJWFixupData IJWHash ISymUnmanagedReader Jit JumpStubCache LoaderHeap
- Module ModuleLookupTable PEImage
+ Module ModuleLookupTable PEImage SecurityStackwalkCache
SigConvert SingleUseLock StubDispatchCache StubUnwindInfoHeapSegments
SyncBlockCache SystemDomain ThreadIdDispenser ThreadStore TypeIDMap UnresolvedClassLock
SameLevelAs PendingTypeLoadEntry
@@ -426,6 +426,9 @@ End
Crst SaveModuleProfileData
End
+Crst SecurityStackwalkCache
+End
+
Crst SigConvert
AcquiredBefore LoaderHeap
End
diff --git a/src/coreclr/inc/bitvector.h b/src/coreclr/inc/bitvector.h
index 0f17697dddce74..df06b4c75c66ec 100644
--- a/src/coreclr/inc/bitvector.h
+++ b/src/coreclr/inc/bitvector.h
@@ -32,9 +32,7 @@
#define UNDEF_ASSERTE
#endif
-#ifndef FEATURE_NATIVEAOT
#define USE_BITVECTOR 1
-#endif
#if USE_BITVECTOR
/* The bitvector class is meant to be a drop in replacement for an integer
diff --git a/src/coreclr/inc/check.h b/src/coreclr/inc/check.h
index 6951e2a41837b6..c1ac08016d836f 100644
--- a/src/coreclr/inc/check.h
+++ b/src/coreclr/inc/check.h
@@ -684,9 +684,6 @@ CHECK CheckAligned(UINT value, UINT alignment);
CHECK CheckAligned(ULONG value, UINT alignment);
#endif
CHECK CheckAligned(UINT64 value, UINT alignment);
-#ifdef __APPLE__
-CHECK CheckAligned(SIZE_T value, UINT alignment);
-#endif
CHECK CheckAligned(const void *address, UINT alignment);
CHECK CheckOverflow(UINT value1, UINT value2);
@@ -694,9 +691,6 @@ CHECK CheckOverflow(UINT value1, UINT value2);
CHECK CheckOverflow(ULONG value1, ULONG value2);
#endif
CHECK CheckOverflow(UINT64 value1, UINT64 value2);
-#ifdef __APPLE__
-CHECK CheckOverflow(SIZE_T value1, SIZE_T value2);
-#endif
CHECK CheckOverflow(PTR_CVOID address, UINT offset);
#if defined(_MSC_VER)
CHECK CheckOverflow(const void *address, ULONG offset);
@@ -708,17 +702,11 @@ CHECK CheckUnderflow(UINT value1, UINT value2);
CHECK CheckUnderflow(ULONG value1, ULONG value2);
#endif
CHECK CheckUnderflow(UINT64 value1, UINT64 value2);
-#ifdef __APPLE__
-CHECK CheckUnderflow(SIZE_T value1, SIZE_T value2);
-#endif
CHECK CheckUnderflow(const void *address, UINT offset);
#if defined(_MSC_VER)
CHECK CheckUnderflow(const void *address, ULONG offset);
#endif
CHECK CheckUnderflow(const void *address, UINT64 offset);
-#ifdef __APPLE__
-CHECK CheckUnderflow(const void *address, SIZE_T offset);
-#endif
CHECK CheckUnderflow(const void *address, void *address2);
CHECK CheckZeroedMemory(const void *memory, SIZE_T size);
diff --git a/src/coreclr/inc/check.inl b/src/coreclr/inc/check.inl
index 34a2956d1be6e2..9296c48f7a7a3d 100644
--- a/src/coreclr/inc/check.inl
+++ b/src/coreclr/inc/check.inl
@@ -156,15 +156,6 @@ inline CHECK CheckAligned(UINT64 value, UINT alignment)
CHECK_OK;
}
-#ifdef __APPLE__
-inline CHECK CheckAligned(SIZE_T value, UINT alignment)
-{
- STATIC_CONTRACT_WRAPPER;
- CHECK(AlignmentTrim(value, alignment) == 0);
- CHECK_OK;
-}
-#endif
-
inline CHECK CheckAligned(const void *address, UINT alignment)
{
STATIC_CONTRACT_WRAPPER;
@@ -192,14 +183,6 @@ inline CHECK CheckOverflow(UINT64 value1, UINT64 value2)
CHECK_OK;
}
-#ifdef __APPLE__
-inline CHECK CheckOverflow(SIZE_T value1, SIZE_T value2)
-{
- CHECK(value1 + value2 >= value1);
- CHECK_OK;
-}
-#endif
-
inline CHECK CheckOverflow(PTR_CVOID address, UINT offset)
{
TADDR targetAddr = dac_cast(address);
@@ -271,15 +254,6 @@ inline CHECK CheckUnderflow(UINT64 value1, UINT64 value2)
CHECK_OK;
}
-#ifdef __APPLE__
-inline CHECK CheckUnderflow(SIZE_T value1, SIZE_T value2)
-{
- CHECK(value1 - value2 <= value1);
-
- CHECK_OK;
-}
-#endif
-
inline CHECK CheckUnderflow(const void *address, UINT offset)
{
#if POINTER_BITS == 32
@@ -316,20 +290,6 @@ inline CHECK CheckUnderflow(const void *address, UINT64 offset)
CHECK_OK;
}
-#ifdef __APPLE__
-inline CHECK CheckUnderflow(const void *address, SIZE_T offset)
-{
-#if POINTER_BITS == 32
- CHECK(offset >> 32 == 0);
- CHECK((UINT) (SIZE_T) address - (UINT) offset <= (UINT) (SIZE_T) address);
-#else
- CHECK((UINT64) address - offset <= (UINT64) address);
-#endif
-
- CHECK_OK;
-}
-#endif
-
inline CHECK CheckUnderflow(const void *address, void *address2)
{
#if POINTER_BITS == 32
diff --git a/src/coreclr/inc/clr_std/type_traits b/src/coreclr/inc/clr_std/type_traits
index ba007c32d9fef2..12af99d5c4fee1 100644
--- a/src/coreclr/inc/clr_std/type_traits
+++ b/src/coreclr/inc/clr_std/type_traits
@@ -358,7 +358,7 @@ namespace std
// On Unix 'long' is a 64-bit type (same as __int64) and the following two definitions
// conflict with _Is_integral and _Is_integral.
-#if !defined(HOST_UNIX) || defined(__APPLE__)
+#ifndef HOST_UNIX
template<>
struct _Is_integral
: true_type
@@ -370,7 +370,7 @@ namespace std
: true_type
{ // determine whether _Ty is integral
};
-#endif /* !HOST_UNIX || __APPLE__ */
+#endif /* HOST_UNIX */
#if _HAS_CHAR16_T_LANGUAGE_SUPPORT
template<>
diff --git a/src/coreclr/inc/clrconfignocache.h b/src/coreclr/inc/clrconfignocache.h
index 01675a24201d14..f75504a2289af0 100644
--- a/src/coreclr/inc/clrconfignocache.h
+++ b/src/coreclr/inc/clrconfignocache.h
@@ -46,8 +46,6 @@ class CLRConfigNoCache
{
return false;
}
-
- result = (DWORD)rawResult;
bool fSuccess = endPtr != _value;
return fSuccess;
}
diff --git a/src/coreclr/inc/clrconfigvalues.h b/src/coreclr/inc/clrconfigvalues.h
index d9571f0776456f..30956bf4a67418 100644
--- a/src/coreclr/inc/clrconfigvalues.h
+++ b/src/coreclr/inc/clrconfigvalues.h
@@ -259,7 +259,7 @@ RETAIL_CONFIG_DWORD_INFO(UNSUPPORTED_legacyCorruptedStateExceptionsPolicy, W("le
CONFIG_DWORD_INFO(INTERNAL_SuppressLostExceptionTypeAssert, W("SuppressLostExceptionTypeAssert"), 0, "")
RETAIL_CONFIG_DWORD_INFO(INTERNAL_UseEntryPointFilter, W("UseEntryPointFilter"), 0, "")
RETAIL_CONFIG_DWORD_INFO(INTERNAL_Corhost_Swallow_Uncaught_Exceptions, W("Corhost_Swallow_Uncaught_Exceptions"), 0, "")
-RETAIL_CONFIG_DWORD_INFO(EXTERNAL_LegacyExceptionHandling, W("LegacyExceptionHandling"), 1, "Enable legacy exception handling.");
+RETAIL_CONFIG_DWORD_INFO(EXTERNAL_EnableNewExceptionHandling, W("EnableNewExceptionHandling"), 0, "Enable new exception handling.");
///
diff --git a/src/coreclr/inc/clrnt.h b/src/coreclr/inc/clrnt.h
index fb7d8102545625..56245ea46f25e7 100644
--- a/src/coreclr/inc/clrnt.h
+++ b/src/coreclr/inc/clrnt.h
@@ -184,17 +184,12 @@ RtlVirtualUnwind_Unsafe(
#ifdef HOST_X86
typedef struct _RUNTIME_FUNCTION {
DWORD BeginAddress;
- // NOTE: R2R doesn't include EndAddress (see docs/design/coreclr/botr/readytorun-format.md).
- // NativeAOT does include the EndAddress because the Microsoft linker expects it. In NativeAOT
- // the info is generated in the managed ObjectWriter, so the structures don't have to match.
- // DWORD EndAddress;
DWORD UnwindData;
} RUNTIME_FUNCTION, *PRUNTIME_FUNCTION;
typedef struct _DISPATCHER_CONTEXT {
_EXCEPTION_REGISTRATION_RECORD* RegistrationPointer;
} DISPATCHER_CONTEXT, *PDISPATCHER_CONTEXT;
-
#endif // HOST_X86
#endif // !HOST_UNIX
@@ -212,7 +207,7 @@ RtlpGetFunctionEndAddress (
_In_ TADDR ImageBase
)
{
- PUNWIND_INFO pUnwindInfo = (PUNWIND_INFO)(ImageBase + FunctionEntry->UnwindData);
+ PTR_UNWIND_INFO pUnwindInfo = (PTR_UNWIND_INFO)(ImageBase + FunctionEntry->UnwindData);
return FunctionEntry->BeginAddress + pUnwindInfo->FunctionLength;
}
@@ -223,7 +218,10 @@ RtlpGetFunctionEndAddress (
#define RUNTIME_FUNCTION__SetUnwindInfoAddress(prf, addr) do { (prf)->UnwindData = (addr); } while(0)
#ifdef HOST_X86
+EXTERN_C
+NTSYSAPI
PEXCEPTION_ROUTINE
+NTAPI
RtlVirtualUnwind (
_In_ DWORD HandlerType,
_In_ DWORD ImageBase,
diff --git a/src/coreclr/inc/clrtypes.h b/src/coreclr/inc/clrtypes.h
index 9094e4932a2527..19e9720b34d90b 100644
--- a/src/coreclr/inc/clrtypes.h
+++ b/src/coreclr/inc/clrtypes.h
@@ -370,15 +370,6 @@ inline UINT64 AlignDown(UINT64 value, UINT alignment)
return (value&~(UINT64)(alignment-1));
}
-#ifdef __APPLE__
-inline SIZE_T AlignDown(SIZE_T value, UINT alignment)
-{
- STATIC_CONTRACT_LEAF;
- STATIC_CONTRACT_SUPPORTS_DAC;
- return (value&~(SIZE_T)(alignment-1));
-}
-#endif // __APPLE__
-
inline UINT AlignmentPad(UINT value, UINT alignment)
{
STATIC_CONTRACT_WRAPPER;
diff --git a/src/coreclr/inc/cordebuginfo.h b/src/coreclr/inc/cordebuginfo.h
index b3125060f308eb..09fb19f4bb8734 100644
--- a/src/coreclr/inc/cordebuginfo.h
+++ b/src/coreclr/inc/cordebuginfo.h
@@ -2,7 +2,7 @@
// The .NET Foundation licenses this file to you under the MIT license.
//
-// Keep in sync with src\coreclr\tools\Common\JitInterface\CorInfoTypes.VarInfo.cs
+// Keep in sync with llvm/tools/objwriter/cordebuginfo.h in current objwriter branch in https://github.com/dotnet/llvm-project repo
//
/**********************************************************************************/
diff --git a/src/coreclr/inc/corhdr.h b/src/coreclr/inc/corhdr.h
index c12c1cfdd4f710..3f67b33da9162a 100644
--- a/src/coreclr/inc/corhdr.h
+++ b/src/coreclr/inc/corhdr.h
@@ -847,7 +847,7 @@ typedef enum CorGenericParamAttr
gpReferenceTypeConstraint = 0x0004, // type argument must be a reference type
gpNotNullableValueTypeConstraint = 0x0008, // type argument must be a value type but not Nullable
gpDefaultConstructorConstraint = 0x0010, // type argument must have a public default constructor
- gpAllowByRefLike = 0x0020, // type argument can be ByRefLike
+ gpAcceptByRefLike = 0x0020, // type argument can be ByRefLike
} CorGenericParamAttr;
// structures and enums moved from COR.H
diff --git a/src/coreclr/inc/corinfo.h b/src/coreclr/inc/corinfo.h
index b9ebaa08546014..86680d6e20c91e 100644
--- a/src/coreclr/inc/corinfo.h
+++ b/src/coreclr/inc/corinfo.h
@@ -572,10 +572,7 @@ enum CorInfoHelpFunc
CORINFO_HELP_INIT_PINVOKE_FRAME, // initialize an inlined PInvoke Frame for the JIT-compiler
CORINFO_HELP_MEMSET, // Init block of memory
- CORINFO_HELP_MEMZERO, // Init block of memory with zeroes
CORINFO_HELP_MEMCPY, // Copy block of memory
- CORINFO_HELP_NATIVE_MEMSET, // Init block of memory using native memset (not safe for pDst being null,
- // not safe for unbounded size, does not trigger GC)
CORINFO_HELP_RUNTIMEHANDLE_METHOD, // determine a type/field/method handle at run-time
CORINFO_HELP_RUNTIMEHANDLE_METHOD_LOG, // determine a type/field/method handle at run-time, with IBC logging
@@ -1982,16 +1979,6 @@ enum class GetTypeLayoutResult
Failure,
};
-#define MAX_SWIFT_LOWERED_ELEMENTS 4
-
-struct CORINFO_SWIFT_LOWERING
-{
- bool byReference;
- CorInfoType loweredElements[MAX_SWIFT_LOWERED_ELEMENTS];
- uint32_t offsets[MAX_SWIFT_LOWERED_ELEMENTS];
- size_t numLoweredElements;
-};
-
#define SIZEOF__CORINFO_Object TARGET_POINTER_SIZE /* methTable */
#define CORINFO_Array_MaxLength 0x7FFFFFC7
@@ -2071,7 +2058,7 @@ class ICorStaticInfo
// Example of a scenario addressed by notifyMethodInfoUsage:
// 1) Crossgen (with --opt-cross-module=MyLib) attempts to inline a call from MyLib.dll into MyApp.dll
// and realizes that the call always throws.
- // 2) JIT aborts the inlining attempt and marks the call as no-return instead. The code that follows the call is
+ // 2) JIT aborts the inlining attempt and marks the call as no-return instead. The code that follows the call is
// replaced with a breakpoint instruction that is expected to be unreachable.
// 3) MyLib is updated to a new version so it's no longer within the same version bubble with MyApp.dll
// and the new version of the call no longer throws and does some work.
@@ -2231,7 +2218,6 @@ class ICorStaticInfo
// should be looked up at runtime.
virtual void expandRawHandleIntrinsic(
CORINFO_RESOLVED_TOKEN * pResolvedToken,
- CORINFO_METHOD_HANDLE callerHandle,
CORINFO_GENERICHANDLE_RESULT * pResult) = 0;
// Is the given type in System.Private.Corelib and marked with IntrinsicAttribute?
@@ -2648,7 +2634,6 @@ class ICorStaticInfo
CORINFO_RESOLVED_TOKEN * pResolvedToken,
CORINFO_LOOKUP_KIND * pGenericLookupKind,
CorInfoHelpFunc id,
- CORINFO_METHOD_HANDLE callerHandle,
CORINFO_CONST_LOOKUP * pLookup
) = 0;
@@ -2656,7 +2641,6 @@ class ICorStaticInfo
CORINFO_RESOLVED_TOKEN * pTargetMethod,
mdToken targetConstraint,
CORINFO_CLASS_HANDLE delegateType,
- CORINFO_METHOD_HANDLE callerHandle,
CORINFO_LOOKUP * pLookup
) = 0;
@@ -2932,13 +2916,6 @@ class ICorStaticInfo
uint32_t numMappings // [IN] Number of rich mappings
) = 0;
- // Report back some metadata about the compilation to the EE -- for
- // example, metrics about the compilation.
- virtual void reportMetadata(
- const char* key,
- const void* value,
- size_t length) = 0;
-
/*-------------------------- Misc ---------------------------------------*/
// Used to allocate memory that needs to handed to the EE.
@@ -3083,9 +3060,6 @@ class ICorStaticInfo
SYSTEMV_AMD64_CORINFO_STRUCT_REG_PASSING_DESCRIPTOR* structPassInRegDescPtr /* OUT */
) = 0;
- // Classifies a swift structure into primitives or an implicit byref for ABI purposes.
- virtual void getSwiftLowering(CORINFO_CLASS_HANDLE structHnd, CORINFO_SWIFT_LOWERING* pLowering) = 0;
-
virtual uint32_t getLoongArch64PassStructInRegisterFlags(CORINFO_CLASS_HANDLE cls) = 0;
virtual uint32_t getRISCV64PassStructInRegisterFlags(CORINFO_CLASS_HANDLE cls) = 0;
};
@@ -3196,7 +3170,6 @@ class ICorDynamicInfo : public ICorStaticInfo
virtual void embedGenericHandle(
CORINFO_RESOLVED_TOKEN * pResolvedToken,
bool fEmbedParent, // `true` - embeds parent type handle of the field/method handle
- CORINFO_METHOD_HANDLE callerHandle,
CORINFO_GENERICHANDLE_RESULT * pResult
) = 0;
diff --git a/src/coreclr/inc/crsttypes_generated.h b/src/coreclr/inc/crsttypes_generated.h
index 79864b97db018c..70847a5b367fcd 100644
--- a/src/coreclr/inc/crsttypes_generated.h
+++ b/src/coreclr/inc/crsttypes_generated.h
@@ -107,33 +107,34 @@ enum CrstType
CrstRetThunkCache = 89,
CrstSavedExceptionInfo = 90,
CrstSaveModuleProfileData = 91,
- CrstSigConvert = 92,
- CrstSingleUseLock = 93,
- CrstSpecialStatics = 94,
- CrstStackSampler = 95,
- CrstStaticBoxInit = 96,
- CrstStressLog = 97,
- CrstStubCache = 98,
- CrstStubDispatchCache = 99,
- CrstStubUnwindInfoHeapSegments = 100,
- CrstSyncBlockCache = 101,
- CrstSyncHashLock = 102,
- CrstSystemBaseDomain = 103,
- CrstSystemDomain = 104,
- CrstSystemDomainDelayedUnloadList = 105,
- CrstThreadIdDispenser = 106,
- CrstThreadStore = 107,
- CrstTieredCompilation = 108,
- CrstTypeEquivalenceMap = 109,
- CrstTypeIDMap = 110,
- CrstUMEntryThunkCache = 111,
- CrstUMEntryThunkFreeListLock = 112,
- CrstUniqueStack = 113,
- CrstUnresolvedClassLock = 114,
- CrstUnwindInfoTableLock = 115,
- CrstVSDIndirectionCellLock = 116,
- CrstWrapperTemplate = 117,
- kNumberOfCrstTypes = 118
+ CrstSecurityStackwalkCache = 92,
+ CrstSigConvert = 93,
+ CrstSingleUseLock = 94,
+ CrstSpecialStatics = 95,
+ CrstStackSampler = 96,
+ CrstStaticBoxInit = 97,
+ CrstStressLog = 98,
+ CrstStubCache = 99,
+ CrstStubDispatchCache = 100,
+ CrstStubUnwindInfoHeapSegments = 101,
+ CrstSyncBlockCache = 102,
+ CrstSyncHashLock = 103,
+ CrstSystemBaseDomain = 104,
+ CrstSystemDomain = 105,
+ CrstSystemDomainDelayedUnloadList = 106,
+ CrstThreadIdDispenser = 107,
+ CrstThreadStore = 108,
+ CrstTieredCompilation = 109,
+ CrstTypeEquivalenceMap = 110,
+ CrstTypeIDMap = 111,
+ CrstUMEntryThunkCache = 112,
+ CrstUMEntryThunkFreeListLock = 113,
+ CrstUniqueStack = 114,
+ CrstUnresolvedClassLock = 115,
+ CrstUnwindInfoTableLock = 116,
+ CrstVSDIndirectionCellLock = 117,
+ CrstWrapperTemplate = 118,
+ kNumberOfCrstTypes = 119
};
#endif // __CRST_TYPES_INCLUDED
@@ -236,6 +237,7 @@ int g_rgCrstLevelMap[] =
4, // CrstRetThunkCache
3, // CrstSavedExceptionInfo
0, // CrstSaveModuleProfileData
+ 0, // CrstSecurityStackwalkCache
4, // CrstSigConvert
5, // CrstSingleUseLock
0, // CrstSpecialStatics
@@ -359,6 +361,7 @@ LPCSTR g_rgCrstNameMap[] =
"CrstRetThunkCache",
"CrstSavedExceptionInfo",
"CrstSaveModuleProfileData",
+ "CrstSecurityStackwalkCache",
"CrstSigConvert",
"CrstSingleUseLock",
"CrstSpecialStatics",
diff --git a/src/coreclr/inc/daccess.h b/src/coreclr/inc/daccess.h
index fcd5f5bbf1ff18..699947a02cdd42 100644
--- a/src/coreclr/inc/daccess.h
+++ b/src/coreclr/inc/daccess.h
@@ -614,7 +614,8 @@ struct DacTableHeader
// Define TADDR as a non-pointer value so use of it as a pointer
// will not work properly. Define it as unsigned so
// pointer comparisons aren't affected by sign.
-typedef uintptr_t TADDR;
+// This requires special casting to ULONG64 to sign-extend if necessary.
+typedef ULONG_PTR TADDR;
// TSIZE_T used for counts or ranges that need to span the size of a
// target pointer. For cross-plat, this may be different than SIZE_T
@@ -806,6 +807,7 @@ struct COR_ILMETHOD* DacGetIlMethod(TADDR methAddr);
struct _UNWIND_INFO * DacGetUnwindInfo(TADDR taUnwindInfo);
// virtually unwind a CONTEXT out-of-process
+struct _KNONVOLATILE_CONTEXT_POINTERS;
BOOL DacUnwindStackFrame(T_CONTEXT * pContext, T_KNONVOLATILE_CONTEXT_POINTERS* pContextPointers);
#endif // FEATURE_EH_FUNCLETS
@@ -2126,7 +2128,7 @@ inline void DACCOP_IGNORE(DacCopWarningCode code, const char * szReasonString)
// Declare TADDR as a non-pointer type so that arithmetic
// can be done on it directly, as with the DACCESS_COMPILE definition.
// This also helps expose pointer usage that may need to be changed.
-typedef uintptr_t TADDR;
+typedef ULONG_PTR TADDR;
typedef void* PTR_VOID;
typedef LPVOID* PTR_PTR_VOID;
@@ -2373,7 +2375,6 @@ typedef DPTR(int32_t) PTR_int32_t;
typedef DPTR(uint32_t) PTR_uint32_t;
typedef DPTR(uint64_t) PTR_uint64_t;
typedef DPTR(uintptr_t) PTR_uintptr_t;
-typedef DPTR(TADDR) PTR_TADDR;
#ifndef NATIVEAOT
typedef ArrayDPTR(BYTE) PTR_BYTE;
@@ -2395,6 +2396,7 @@ typedef DPTR(ULONG64) PTR_ULONG64;
typedef DPTR(INT64) PTR_INT64;
typedef DPTR(UINT64) PTR_UINT64;
typedef DPTR(SIZE_T) PTR_SIZE_T;
+typedef DPTR(TADDR) PTR_TADDR;
typedef DPTR(int) PTR_int;
typedef DPTR(BOOL) PTR_BOOL;
typedef DPTR(unsigned) PTR_unsigned;
@@ -2432,7 +2434,7 @@ typedef DPTR(IMAGE_TLS_DIRECTORY) PTR_IMAGE_TLS_DIRECTORY;
#endif
#ifndef NATIVEAOT
-#if defined(TARGET_X86) && defined(FEATURE_EH_FUNCLETS)
+#if defined(TARGET_X86) && defined(TARGET_UNIX)
typedef DPTR(struct _UNWIND_INFO) PTR_UNWIND_INFO;
#endif
diff --git a/src/coreclr/inc/dacvars.h b/src/coreclr/inc/dacvars.h
index 8f710c8fde1255..b632887e86d0f8 100644
--- a/src/coreclr/inc/dacvars.h
+++ b/src/coreclr/inc/dacvars.h
@@ -124,6 +124,8 @@ DEFINE_DACVAR(PTR_SString, SString__s_Empty, SString::s_Empty)
DEFINE_DACVAR(INT32, ArrayBase__s_arrayBoundsZero, ArrayBase::s_arrayBoundsZero)
+DEFINE_DACVAR(BOOL, StackwalkCache__s_Enabled, StackwalkCache::s_Enabled)
+
DEFINE_DACVAR(PTR_JITNotification, dac__g_pNotificationTable, ::g_pNotificationTable)
DEFINE_DACVAR(ULONG32, dac__g_dacNotificationFlags, ::g_dacNotificationFlags)
DEFINE_DACVAR(PTR_GcNotification, dac__g_pGcNotificationTable, ::g_pGcNotificationTable)
diff --git a/src/coreclr/inc/eetwain.h b/src/coreclr/inc/eetwain.h
index 71729a9182f3a1..9beca3f3729007 100644
--- a/src/coreclr/inc/eetwain.h
+++ b/src/coreclr/inc/eetwain.h
@@ -35,8 +35,8 @@
#define USE_GC_INFO_DECODER
#endif
-#ifdef TARGET_AMD64
-#define HAS_LIGHTUNWIND
+#if (defined(TARGET_X86) && !defined(TARGET_UNIX)) || defined(TARGET_AMD64)
+#define HAS_QUICKUNWIND
#endif
#define CHECK_APP_DOMAIN 0
@@ -103,8 +103,6 @@ enum ICodeManagerFlags
NoReportUntracked
= 0x0080, // EnumGCRefs/EnumerateLiveSlots should *not* include
// any untracked slots
-
- LightUnwind = 0x0100, // Unwind just enough to get return addresses
};
//*****************************************************************************
@@ -203,7 +201,8 @@ virtual ULONG32 GetStackParameterSize(EECodeInfo* pCodeInfo) = 0;
virtual bool UnwindStackFrame(PREGDISPLAY pContext,
EECodeInfo *pCodeInfo,
unsigned flags,
- CodeManState *pState) = 0;
+ CodeManState *pState,
+ StackwalkCacheUnwindInfo *pUnwindInfo) = 0;
/*
Is the function currently at a "GC safe point" ?
@@ -426,10 +425,11 @@ bool UnwindStackFrame(
PREGDISPLAY pContext,
EECodeInfo *pCodeInfo,
unsigned flags,
- CodeManState *pState);
+ CodeManState *pState,
+ StackwalkCacheUnwindInfo *pUnwindInfo);
-#ifdef HAS_LIGHTUNWIND
-enum LightUnwindFlag
+#ifdef HAS_QUICKUNWIND
+enum QuickUnwindFlag
{
UnwindCurrentStackFrame,
EnsureCallerStackFrameIsValid
@@ -441,11 +441,11 @@ enum LightUnwindFlag
*/
static
-void LightUnwindStackFrame(
+void QuickUnwindStackFrame(
PREGDISPLAY pRD,
- EECodeInfo *pCodeInfo,
- LightUnwindFlag flag);
-#endif // HAS_LIGHTUNWIND
+ StackwalkCacheEntry *pCacheEntry,
+ QuickUnwindFlag flag);
+#endif // HAS_QUICKUNWIND
/*
Is the function currently at a "GC safe point" ?
@@ -615,7 +615,7 @@ HRESULT FixContextForEnC(PCONTEXT pCtx,
#endif // #ifndef DACCESS_COMPILE
#ifdef FEATURE_EH_FUNCLETS
- static void EnsureCallerContextIsValid( PREGDISPLAY pRD, EECodeInfo * pCodeInfo = NULL, unsigned flags = 0);
+ static void EnsureCallerContextIsValid( PREGDISPLAY pRD, StackwalkCacheEntry* pCacheEntry, EECodeInfo * pCodeInfo = NULL );
static size_t GetCallerSp( PREGDISPLAY pRD );
#ifdef TARGET_X86
static size_t GetResumeSp( PCONTEXT pContext );
@@ -629,7 +629,124 @@ HRESULT FixContextForEnC(PCONTEXT pCtx,
};
#ifdef TARGET_X86
-#include "gc_unwind_x86.h"
+bool UnwindStackFrame(PREGDISPLAY pContext,
+ EECodeInfo *pCodeInfo,
+ unsigned flags,
+ CodeManState *pState,
+ StackwalkCacheUnwindInfo *pUnwindInfo);
+
+size_t DecodeGCHdrInfo(GCInfoToken gcInfoToken,
+ unsigned curOffset,
+ hdrInfo * infoPtr);
+#endif
+
+/*****************************************************************************
+ ToDo: Do we want to include JIT/IL/target.h?
+ */
+
+enum regNum
+{
+ REGI_EAX, REGI_ECX, REGI_EDX, REGI_EBX,
+ REGI_ESP, REGI_EBP, REGI_ESI, REGI_EDI,
+ REGI_COUNT,
+ REGI_NA = REGI_COUNT
+};
+
+/*****************************************************************************
+ Register masks
+ */
+
+enum RegMask
+{
+ RM_EAX = 0x01,
+ RM_ECX = 0x02,
+ RM_EDX = 0x04,
+ RM_EBX = 0x08,
+ RM_ESP = 0x10,
+ RM_EBP = 0x20,
+ RM_ESI = 0x40,
+ RM_EDI = 0x80,
+
+ RM_NONE = 0x00,
+ RM_ALL = (RM_EAX|RM_ECX|RM_EDX|RM_EBX|RM_ESP|RM_EBP|RM_ESI|RM_EDI),
+ RM_CALLEE_SAVED = (RM_EBP|RM_EBX|RM_ESI|RM_EDI),
+ RM_CALLEE_TRASHED = (RM_ALL & ~RM_CALLEE_SAVED),
+};
+
+/*****************************************************************************
+ *
+ * Helper to extract basic info from a method info block.
+ */
+
+struct hdrInfo
+{
+ unsigned int methodSize; // native code bytes
+ unsigned int argSize; // in bytes
+ unsigned int stackSize; // including callee saved registers
+ unsigned int rawStkSize; // excluding callee saved registers
+ ReturnKind returnKind; // The ReturnKind for this method.
+
+ unsigned int prologSize;
+
+ // Size of the epilogs in the method.
+ // For methods which use CEE_JMP, some epilogs may end with a "ret" instruction
+ // and some may end with a "jmp". The epilogSize reported should be for the
+ // epilog with the smallest size.
+ unsigned int epilogSize;
+
+ unsigned char epilogCnt;
+ bool epilogEnd; // is the epilog at the end of the method
+
+ bool ebpFrame; // locals and arguments addressed relative to EBP
+ bool doubleAlign; // is the stack double-aligned? locals addressed relative to ESP, and arguments relative to EBP
+ bool interruptible; // intr. at all times (excluding prolog/epilog), not just call sites
+
+ bool handlers; // has callable handlers
+ bool localloc; // uses localloc
+ bool editNcontinue; // has been compiled in EnC mode
+ bool varargs; // is this a varargs routine
+ bool profCallbacks; // does the method have Enter-Leave callbacks
+ bool genericsContext;// has a reported generic context parameter
+ bool genericsContextIsMethodDesc;// reported generic context parameter is methoddesc
+ bool isSpeculativeStackWalk; // is the stackwalk seeded by an untrusted source (e.g., sampling profiler)?
+
+ // These always includes EBP for EBP-frames and double-aligned-frames
+ RegMask savedRegMask:8; // which callee-saved regs are saved on stack
+
+ // Count of the callee-saved registers, excluding the frame pointer.
+ // This does not include EBP for EBP-frames and double-aligned-frames.
+ unsigned int savedRegsCountExclFP;
+
+ unsigned int untrackedCnt;
+ unsigned int varPtrTableSize;
+ unsigned int argTabOffset; // INVALID_ARGTAB_OFFSET if argtab must be reached by stepping through ptr tables
+ unsigned int gsCookieOffset; // INVALID_GS_COOKIE_OFFSET if there is no GuardStack cookie
+
+ unsigned int syncStartOffset; // start/end code offset of the protected region in synchronized methods.
+ unsigned int syncEndOffset; // INVALID_SYNC_OFFSET if there not synchronized method
+ unsigned int syncEpilogStart; // The start of the epilog. Synchronized methods are guaranteed to have no more than one epilog.
+ unsigned int revPInvokeOffset; // INVALID_REV_PINVOKE_OFFSET if there is no Reverse PInvoke frame
+
+ enum { NOT_IN_PROLOG = -1, NOT_IN_EPILOG = -1 };
+
+ int prologOffs; // NOT_IN_PROLOG if not in prolog
+ int epilogOffs; // NOT_IN_EPILOG if not in epilog. It is never 0
+
+ //
+ // Results passed back from scanArgRegTable
+ //
+ regNum thisPtrResult; // register holding "this"
+ RegMask regMaskResult; // registers currently holding GC ptrs
+ RegMask iregMaskResult; // iptr qualifier for regMaskResult
+ unsigned argHnumResult;
+ PTR_CBYTE argTabResult; // Table of encoded offsets of pending ptr args
+ unsigned argTabBytes; // Number of bytes in argTabResult[]
+
+ // These next two are now large structs (i.e 132 bytes each)
+
+ ptrArgTP argMaskResult; // pending arguments mask
+ ptrArgTP iargMaskResult; // iptr qualifier for argMaskResult
+};
/*****************************************************************************
How the stackwalkers buffer will be interpreted
@@ -640,9 +757,6 @@ struct CodeManStateBuf
DWORD hdrInfoSize;
hdrInfo hdrInfoBody;
};
-
-#endif
-
//*****************************************************************************
#endif // _EETWAIN_H
//*****************************************************************************
diff --git a/src/coreclr/inc/gc_unwind_x86.h b/src/coreclr/inc/gc_unwind_x86.h
deleted file mode 100644
index e5be6b2e4aa43f..00000000000000
--- a/src/coreclr/inc/gc_unwind_x86.h
+++ /dev/null
@@ -1,138 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-#ifndef _UNWIND_X86_H
-#define _UNWIND_X86_H
-
-// This file is shared between CoreCLR and NativeAOT. Some of the differences are handled
-// with the FEATURE_NATIVEAOT and FEATURE_EH_FUNCLETS defines. There are three main methods
-// that are used by both runtimes - DecodeGCHdrInfo, UnwindStackFrameX86, and EnumGcRefsX86.
-//
-// The IN_EH_FUNCLETS and IN_EH_FUNCLETS_COMMA macros are used to specify some parameters
-// for the above methods that are specific for a certain runtime or configuration.
-#ifdef FEATURE_EH_FUNCLETS
-#define IN_EH_FUNCLETS(a) a
-#define IN_EH_FUNCLETS_COMMA(a) a,
-#else
-#define IN_EH_FUNCLETS(a)
-#define IN_EH_FUNCLETS_COMMA(a)
-#endif
-
-enum regNum
-{
- REGI_EAX, REGI_ECX, REGI_EDX, REGI_EBX,
- REGI_ESP, REGI_EBP, REGI_ESI, REGI_EDI,
- REGI_COUNT,
- REGI_NA = REGI_COUNT
-};
-
-/*****************************************************************************
- Register masks
- */
-
-enum RegMask
-{
- RM_EAX = 0x01,
- RM_ECX = 0x02,
- RM_EDX = 0x04,
- RM_EBX = 0x08,
- RM_ESP = 0x10,
- RM_EBP = 0x20,
- RM_ESI = 0x40,
- RM_EDI = 0x80,
-
- RM_NONE = 0x00,
- RM_ALL = (RM_EAX|RM_ECX|RM_EDX|RM_EBX|RM_ESP|RM_EBP|RM_ESI|RM_EDI),
- RM_CALLEE_SAVED = (RM_EBP|RM_EBX|RM_ESI|RM_EDI),
- RM_CALLEE_TRASHED = (RM_ALL & ~RM_CALLEE_SAVED),
-};
-
-/*****************************************************************************
- *
- * Helper to extract basic info from a method info block.
- */
-
-struct hdrInfo
-{
- unsigned int methodSize; // native code bytes
- unsigned int argSize; // in bytes
- unsigned int stackSize; // including callee saved registers
- unsigned int rawStkSize; // excluding callee saved registers
- ReturnKind returnKind; // The ReturnKind for this method.
-
- unsigned int prologSize;
-
- // Size of the epilogs in the method.
- // For methods which use CEE_JMP, some epilogs may end with a "ret" instruction
- // and some may end with a "jmp". The epilogSize reported should be for the
- // epilog with the smallest size.
- unsigned int epilogSize;
-
- unsigned char epilogCnt;
- bool epilogEnd; // is the epilog at the end of the method
-
- bool ebpFrame; // locals and arguments addressed relative to EBP
- bool doubleAlign; // is the stack double-aligned? locals addressed relative to ESP, and arguments relative to EBP
- bool interruptible; // intr. at all times (excluding prolog/epilog), not just call sites
-
- bool handlers; // has callable handlers
- bool localloc; // uses localloc
- bool editNcontinue; // has been compiled in EnC mode
- bool varargs; // is this a varargs routine
- bool profCallbacks; // does the method have Enter-Leave callbacks
- bool genericsContext;// has a reported generic context parameter
- bool genericsContextIsMethodDesc;// reported generic context parameter is methoddesc
- bool isSpeculativeStackWalk; // is the stackwalk seeded by an untrusted source (e.g., sampling profiler)?
-
- // These always includes EBP for EBP-frames and double-aligned-frames
- RegMask savedRegMask:8; // which callee-saved regs are saved on stack
-
- // Count of the callee-saved registers, excluding the frame pointer.
- // This does not include EBP for EBP-frames and double-aligned-frames.
- unsigned int savedRegsCountExclFP;
-
- unsigned int untrackedCnt;
- unsigned int varPtrTableSize;
- unsigned int argTabOffset; // INVALID_ARGTAB_OFFSET if argtab must be reached by stepping through ptr tables
- unsigned int gsCookieOffset; // INVALID_GS_COOKIE_OFFSET if there is no GuardStack cookie
-
- unsigned int syncStartOffset; // start/end code offset of the protected region in synchronized methods.
- unsigned int syncEndOffset; // INVALID_SYNC_OFFSET if there not synchronized method
- unsigned int syncEpilogStart; // The start of the epilog. Synchronized methods are guaranteed to have no more than one epilog.
- unsigned int revPInvokeOffset; // INVALID_REV_PINVOKE_OFFSET if there is no Reverse PInvoke frame
-
- enum { NOT_IN_PROLOG = -1, NOT_IN_EPILOG = -1 };
-
- int prologOffs; // NOT_IN_PROLOG if not in prolog
- int epilogOffs; // NOT_IN_EPILOG if not in epilog. It is never 0
-
- //
- // Results passed back from scanArgRegTable
- //
- regNum thisPtrResult; // register holding "this"
- RegMask regMaskResult; // registers currently holding GC ptrs
- RegMask iregMaskResult; // iptr qualifier for regMaskResult
- unsigned argHnumResult;
- PTR_CBYTE argTabResult; // Table of encoded offsets of pending ptr args
- unsigned argTabBytes; // Number of bytes in argTabResult[]
-
- // These next two are now large structs (i.e 132 bytes each)
-
- ptrArgTP argMaskResult; // pending arguments mask
- ptrArgTP iargMaskResult; // iptr qualifier for argMaskResult
-};
-
-bool UnwindStackFrameX86(PREGDISPLAY pContext,
- PTR_CBYTE methodStart,
- DWORD curOffs,
- hdrInfo * info,
- PTR_CBYTE table,
- IN_EH_FUNCLETS_COMMA(PTR_CBYTE funcletStart)
- IN_EH_FUNCLETS_COMMA(bool isFunclet)
- bool updateAllRegs);
-
-size_t DecodeGCHdrInfo(GCInfoToken gcInfoToken,
- unsigned curOffset,
- hdrInfo * infoPtr);
-
-#endif // _UNWIND_X86_H
diff --git a/src/coreclr/inc/gcinfo.h b/src/coreclr/inc/gcinfo.h
index f334b099f2578e..66933b10f0445a 100644
--- a/src/coreclr/inc/gcinfo.h
+++ b/src/coreclr/inc/gcinfo.h
@@ -13,6 +13,9 @@
/*****************************************************************************/
#include "daccess.h"
+#include "windef.h" // For BYTE
+
+// Some declarations in this file are used on non-x86 platforms, but most are x86-specific.
// Use the lower 2 bits of the offsets stored in the tables
// to encode properties
@@ -20,15 +23,14 @@
const unsigned OFFSET_MASK = 0x3; // mask to access the low 2 bits
//
+// Note for untracked locals the flags allowed are "pinned" and "byref"
+// and for tracked locals the flags allowed are "this" and "byref"
// Note that these definitions should also match the definitions of
// GC_CALL_INTERIOR and GC_CALL_PINNED in VM/gc.h
//
const unsigned byref_OFFSET_FLAG = 0x1; // the offset is an interior ptr
const unsigned pinned_OFFSET_FLAG = 0x2; // the offset is a pinned ptr
-#if defined(TARGET_X86) && !defined(FEATURE_EH_FUNCLETS)
-// JIT32_ENCODER has additional restriction on x86 without funclets:
-// - for untracked locals the flags allowed are "pinned" and "byref"
-// - for tracked locals the flags allowed are "this" and "byref"
+#if !defined(TARGET_X86) || !defined(FEATURE_EH_FUNCLETS)
const unsigned this_OFFSET_FLAG = 0x2; // the offset is "this"
#endif
@@ -55,17 +57,9 @@ const unsigned this_OFFSET_FLAG = 0x2; // the offset is "this"
struct GCInfoToken
{
PTR_VOID Info;
- uint32_t Version;
-
-#ifdef FEATURE_NATIVEAOT
- GCInfoToken(PTR_VOID info)
- {
- Info = info;
- Version = GCINFO_VERSION;
- }
-#endif
+ UINT32 Version;
- static uint32_t ReadyToRunVersionToGcInfoVersion(uint32_t readyToRunMajorVersion)
+ static UINT32 ReadyToRunVersionToGcInfoVersion(UINT32 readyToRunMajorVersion)
{
// GcInfo version is current from ReadyToRun version 2.0
return GCINFO_VERSION;
diff --git a/src/coreclr/inc/gcinfodecoder.h b/src/coreclr/inc/gcinfodecoder.h
index eb60728af5b1f7..34af8c53055687 100644
--- a/src/coreclr/inc/gcinfodecoder.h
+++ b/src/coreclr/inc/gcinfodecoder.h
@@ -31,17 +31,7 @@
#ifdef FEATURE_NATIVEAOT
-#include "gcinfo.h"
-
typedef ArrayDPTR(const uint8_t) PTR_CBYTE;
-#ifdef TARGET_X86
-// Bridge few additional pointer types used in x86 unwinding code
-typedef DPTR(DWORD) PTR_DWORD;
-typedef DPTR(WORD) PTR_WORD;
-typedef DPTR(BYTE) PTR_BYTE;
-typedef DPTR(signed char) PTR_SBYTE;
-typedef DPTR(INT32) PTR_INT32;
-#endif
#define LIMITED_METHOD_CONTRACT
#define SUPPORTS_DAC
@@ -60,12 +50,22 @@ typedef DPTR(INT32) PTR_INT32;
#define SSIZE_T intptr_t
#define LPVOID void*
-#define CHECK_APP_DOMAIN 0
-
typedef void * OBJECTREF;
#define GET_CALLER_SP(pREGDISPLAY) ((TADDR)0)
+struct GCInfoToken
+{
+ PTR_VOID Info;
+ UINT32 Version;
+
+ GCInfoToken(PTR_VOID info)
+ {
+ Info = info;
+ Version = 2;
+ }
+};
+
#else // FEATURE_NATIVEAOT
// Stuff from cgencpu.h:
@@ -179,7 +179,6 @@ enum ICodeManagerFlags
ExecutionAborted = 0x0002, // execution of this function has been aborted
// (i.e. it will not continue execution at the
// current location)
- AbortingCall = 0x0004, // The current call will never return
ParentOfFuncletStackFrame
= 0x0040, // A funclet for this frame was previously reported
diff --git a/src/coreclr/inc/icorjitinfoimpl_generated.h b/src/coreclr/inc/icorjitinfoimpl_generated.h
index 2348162d948545..8dd993f5b47829 100644
--- a/src/coreclr/inc/icorjitinfoimpl_generated.h
+++ b/src/coreclr/inc/icorjitinfoimpl_generated.h
@@ -104,7 +104,6 @@ CORINFO_CLASS_HANDLE getDefaultEqualityComparerClass(
void expandRawHandleIntrinsic(
CORINFO_RESOLVED_TOKEN* pResolvedToken,
- CORINFO_METHOD_HANDLE callerHandle,
CORINFO_GENERICHANDLE_RESULT* pResult) override;
bool isIntrinsicType(
@@ -298,14 +297,12 @@ bool getReadyToRunHelper(
CORINFO_RESOLVED_TOKEN* pResolvedToken,
CORINFO_LOOKUP_KIND* pGenericLookupKind,
CorInfoHelpFunc id,
- CORINFO_METHOD_HANDLE callerHandle,
CORINFO_CONST_LOOKUP* pLookup) override;
void getReadyToRunDelegateCtorHelper(
CORINFO_RESOLVED_TOKEN* pTargetMethod,
mdToken targetConstraint,
CORINFO_CLASS_HANDLE delegateType,
- CORINFO_METHOD_HANDLE callerHandle,
CORINFO_LOOKUP* pLookup) override;
CorInfoInitClassResult initClass(
@@ -441,11 +438,6 @@ void reportRichMappings(
ICorDebugInfo::RichOffsetMapping* mappings,
uint32_t numMappings) override;
-void reportMetadata(
- const char* key,
- const void* value,
- size_t length) override;
-
void* allocateArray(
size_t cBytes) override;
@@ -507,10 +499,6 @@ bool getSystemVAmd64PassStructInRegisterDescriptor(
CORINFO_CLASS_HANDLE structHnd,
SYSTEMV_AMD64_CORINFO_STRUCT_REG_PASSING_DESCRIPTOR* structPassInRegDescPtr) override;
-void getSwiftLowering(
- CORINFO_CLASS_HANDLE structHnd,
- CORINFO_SWIFT_LOWERING* pLowering) override;
-
uint32_t getLoongArch64PassStructInRegisterFlags(
CORINFO_CLASS_HANDLE structHnd) override;
@@ -563,7 +551,6 @@ CORINFO_FIELD_HANDLE embedFieldHandle(
void embedGenericHandle(
CORINFO_RESOLVED_TOKEN* pResolvedToken,
bool fEmbedParent,
- CORINFO_METHOD_HANDLE callerHandle,
CORINFO_GENERICHANDLE_RESULT* pResult) override;
void getLocationOfThisType(
diff --git a/src/coreclr/inc/jiteeversionguid.h b/src/coreclr/inc/jiteeversionguid.h
index 41e58ca24537b8..6355fc20dd0fd5 100644
--- a/src/coreclr/inc/jiteeversionguid.h
+++ b/src/coreclr/inc/jiteeversionguid.h
@@ -43,11 +43,11 @@ typedef const GUID *LPCGUID;
#define GUID_DEFINED
#endif // !GUID_DEFINED
-constexpr GUID JITEEVersionIdentifier = { /* 6fd660c7-96be-4832-a84c-4200141f7d08 */
- 0x6fd660c7,
- 0x96be,
- 0x4832,
- {0xa8, 0x4c, 0x42, 0x00, 0x14, 0x1f, 0x7d, 0x08}
+constexpr GUID JITEEVersionIdentifier = { /* 0fb71692-0ee6-4914-88a8-6446e45f23e8 */
+ 0x0fb71692,
+ 0x0ee6,
+ 0x4914,
+ {0x88, 0xa8, 0x64, 0x46, 0xe4, 0x5f, 0x23, 0xe8}
};
//////////////////////////////////////////////////////////////////////////////////////////////////////////
diff --git a/src/coreclr/inc/jithelpers.h b/src/coreclr/inc/jithelpers.h
index a0982f3ac6520f..65167abd6a4dd6 100644
--- a/src/coreclr/inc/jithelpers.h
+++ b/src/coreclr/inc/jithelpers.h
@@ -235,10 +235,13 @@
DYNAMICJITHELPER(CORINFO_HELP_INIT_PINVOKE_FRAME, NULL, CORINFO_HELP_SIG_REG_ONLY)
#endif
- DYNAMICJITHELPER(CORINFO_HELP_MEMSET, NULL, CORINFO_HELP_SIG_REG_ONLY)
- DYNAMICJITHELPER(CORINFO_HELP_MEMZERO, NULL, CORINFO_HELP_SIG_REG_ONLY)
- DYNAMICJITHELPER(CORINFO_HELP_MEMCPY, NULL, CORINFO_HELP_SIG_REG_ONLY)
- JITHELPER(CORINFO_HELP_NATIVE_MEMSET, Jit_NativeMemSet, CORINFO_HELP_SIG_REG_ONLY)
+#ifdef TARGET_X86
+ JITHELPER(CORINFO_HELP_MEMSET, NULL, CORINFO_HELP_SIG_CANNOT_USE_ALIGN_STUB)
+ JITHELPER(CORINFO_HELP_MEMCPY, NULL, CORINFO_HELP_SIG_CANNOT_USE_ALIGN_STUB)
+#else
+ JITHELPER(CORINFO_HELP_MEMSET, JIT_MemSet, CORINFO_HELP_SIG_REG_ONLY)
+ JITHELPER(CORINFO_HELP_MEMCPY, JIT_MemCpy, CORINFO_HELP_SIG_REG_ONLY)
+#endif
// Generics
JITHELPER(CORINFO_HELP_RUNTIMEHANDLE_METHOD, JIT_GenericHandleMethod, CORINFO_HELP_SIG_REG_ONLY)
diff --git a/src/coreclr/inc/readytorun.h b/src/coreclr/inc/readytorun.h
index 41a4aa251fa742..b3128cb00e4b73 100644
--- a/src/coreclr/inc/readytorun.h
+++ b/src/coreclr/inc/readytorun.h
@@ -20,7 +20,7 @@
// If you update this, ensure you run `git grep MINIMUM_READYTORUN_MAJOR_VERSION`
// and handle pending work.
#define READYTORUN_MAJOR_VERSION 0x0009
-#define READYTORUN_MINOR_VERSION 0x0002
+#define READYTORUN_MINOR_VERSION 0x0001
#define MINIMUM_READYTORUN_MAJOR_VERSION 0x009
@@ -33,8 +33,6 @@
// R2R Version 8.0 Changes the alignment of the Int128 type
// R2R Version 9.0 adds support for the Vector512 type
// R2R Version 9.1 adds new helpers to allocate objects on frozen segments
-// R2R Version 9.2 adds MemZero and NativeMemSet helpers
-
struct READYTORUN_CORE_HEADER
{
@@ -327,9 +325,7 @@ enum ReadyToRunHelper
READYTORUN_HELPER_Stelem_Ref = 0x38,
READYTORUN_HELPER_Ldelema_Ref = 0x39,
- READYTORUN_HELPER_MemZero = 0x3E,
- READYTORUN_HELPER_MemSet = 0x3F,
- READYTORUN_HELPER_NativeMemSet = 0x40,
+ READYTORUN_HELPER_MemSet = 0x40,
READYTORUN_HELPER_MemCpy = 0x41,
// PInvoke helpers
@@ -445,6 +441,10 @@ enum ReadyToRunHelper
READYTORUN_HELPER_StackProbe = 0x111,
READYTORUN_HELPER_GetCurrentManagedThreadId = 0x112,
+
+ // Array helpers for use with native ints
+ READYTORUN_HELPER_Stelem_Ref_I = 0x113,
+ READYTORUN_HELPER_Ldelema_Ref_I = 0x114,
};
#include "readytoruninstructionset.h"
diff --git a/src/coreclr/inc/readytorunhelpers.h b/src/coreclr/inc/readytorunhelpers.h
index bbb586e8eb4a30..8691f9b9cb8c0c 100644
--- a/src/coreclr/inc/readytorunhelpers.h
+++ b/src/coreclr/inc/readytorunhelpers.h
@@ -29,8 +29,6 @@ HELPER(READYTORUN_HELPER_Stelem_Ref, CORINFO_HELP_ARRADDR_ST,
HELPER(READYTORUN_HELPER_Ldelema_Ref, CORINFO_HELP_LDELEMA_REF, )
HELPER(READYTORUN_HELPER_MemSet, CORINFO_HELP_MEMSET, )
-HELPER(READYTORUN_HELPER_MemZero, CORINFO_HELP_MEMZERO, )
-HELPER(READYTORUN_HELPER_NativeMemSet, CORINFO_HELP_NATIVE_MEMSET, )
HELPER(READYTORUN_HELPER_MemCpy, CORINFO_HELP_MEMCPY, )
HELPER(READYTORUN_HELPER_LogMethodEnter, CORINFO_HELP_BBT_FCN_ENTER, )
diff --git a/src/coreclr/inc/regdisp.h b/src/coreclr/inc/regdisp.h
index ec47b9019dbc02..4832791ebfa5dc 100644
--- a/src/coreclr/inc/regdisp.h
+++ b/src/coreclr/inc/regdisp.h
@@ -131,12 +131,6 @@ inline LPVOID GetRegdisplayFPAddress(REGDISPLAY *display) {
return (LPVOID)display->GetEbpLocation();
}
-inline void SetRegdisplayPCTAddr(REGDISPLAY *display, TADDR addr)
-{
- display->PCTAddr = addr;
- display->ControlPC = *PTR_PCODE(addr);
-}
-
// This function tells us if the given stack pointer is in one of the frames of the functions called by the given frame
inline BOOL IsInCalleesFrames(REGDISPLAY *display, LPVOID stackPointer) {
@@ -324,7 +318,7 @@ struct REGDISPLAY : public REGDISPLAY_BASE {
memset(this, 0, sizeof(REGDISPLAY));
// Setup the pointer to ControlPC field
- pPC = (DWORD *)&ControlPC;
+ pPC = &ControlPC;
}
};
@@ -453,7 +447,7 @@ inline void FillContextPointers(PT_KNONVOLATILE_CONTEXT_POINTERS pCtxPtrs, PT_CO
}
#endif // FEATURE_EH_FUNCLETS
-inline void FillRegDisplay(const PREGDISPLAY pRD, PT_CONTEXT pctx, PT_CONTEXT pCallerCtx = NULL, bool fLightUnwind = false)
+inline void FillRegDisplay(const PREGDISPLAY pRD, PT_CONTEXT pctx, PT_CONTEXT pCallerCtx = NULL)
{
WRAPPER_NO_CONTRACT;
@@ -503,16 +497,6 @@ inline void FillRegDisplay(const PREGDISPLAY pRD, PT_CONTEXT pctx, PT_CONTEXT pC
pRD->IsCallerSPValid = TRUE; // Don't add usage of this field. This is only temporary.
}
-#ifdef DEBUG_REGDISPLAY
- pRD->_pThread = NULL;
-#endif // DEBUG_REGDISPLAY
-
- // This will setup the PC and SP
- SyncRegDisplayToCurrentContext(pRD);
-
- if (fLightUnwind)
- return;
-
FillContextPointers(&pRD->ctxPtrsOne, pctx);
#if defined(TARGET_ARM)
@@ -566,6 +550,12 @@ inline void FillRegDisplay(const PREGDISPLAY pRD, PT_CONTEXT pctx, PT_CONTEXT pC
pRD->volatileCurrContextPointers.T6 = &pctx->T6;
#endif // TARGET_RISCV64
+#ifdef DEBUG_REGDISPLAY
+ pRD->_pThread = NULL;
+#endif // DEBUG_REGDISPLAY
+
+ // This will setup the PC and SP
+ SyncRegDisplayToCurrentContext(pRD);
#endif // !FEATURE_EH_FUNCLETS
}
diff --git a/src/coreclr/jit/CMakeLists.txt b/src/coreclr/jit/CMakeLists.txt
index 5ba50306d1b72a..aa660321075890 100644
--- a/src/coreclr/jit/CMakeLists.txt
+++ b/src/coreclr/jit/CMakeLists.txt
@@ -23,8 +23,6 @@ function(create_standalone_jit)
if(TARGETDETAILS_OS STREQUAL "unix_osx" OR TARGETDETAILS_OS STREQUAL "unix_anyos")
set(JIT_ARCH_LINK_LIBRARIES gcinfo_unix_${TARGETDETAILS_ARCH})
- elseif(TARGETDETAILS_OS STREQUAL "win_aot")
- set(JIT_ARCH_LINK_LIBRARIES gcinfo_win_${TARGETDETAILS_ARCH})
else()
set(JIT_ARCH_LINK_LIBRARIES gcinfo_${TARGETDETAILS_OS}_${TARGETDETAILS_ARCH})
endif()
@@ -96,6 +94,7 @@ set( JIT_SOURCES
bitset.cpp
block.cpp
buildstring.cpp
+ layout.cpp
codegencommon.cpp
codegenlinear.cpp
compiler.cpp
@@ -124,23 +123,20 @@ set( JIT_SOURCES
gentree.cpp
gschecks.cpp
hashbv.cpp
- helperexpansion.cpp
- hostallocator.cpp
hwintrinsic.cpp
+ hostallocator.cpp
ifconversion.cpp
- importer.cpp
+ helperexpansion.cpp
+ indirectcalltransformer.cpp
importercalls.cpp
+ importer.cpp
importervectorization.cpp
- indirectcalltransformer.cpp
- inductionvariableopts.cpp
inline.cpp
inlinepolicy.cpp
instr.cpp
jitconfig.cpp
jiteh.cpp
jithashtable.cpp
- jitmetadata.cpp
- layout.cpp
lclmorph.cpp
lclvars.cpp
likelyclass.cpp
@@ -155,6 +151,7 @@ set( JIT_SOURCES
objectalloc.cpp
optcse.cpp
optimizebools.cpp
+ switchrecognition.cpp
optimizer.cpp
patchpoint.cpp
phase.cpp
@@ -167,7 +164,6 @@ set( JIT_SOURCES
regalloc.cpp
registerargconvention.cpp
regset.cpp
- scev.cpp
scopeinfo.cpp
sideeffects.cpp
sm.cpp
@@ -176,7 +172,6 @@ set( JIT_SOURCES
ssabuilder.cpp
ssarenamestate.cpp
stacklevelsetter.cpp
- switchrecognition.cpp
treelifeupdater.cpp
unwind.cpp
utils.cpp
@@ -339,8 +334,6 @@ set( JIT_HEADERS
jitexpandarray.h
jitgcinfo.h
jithashtable.h
- jitmetadata.h
- jitmetadatalist.h
jitpch.h
jitstd.h
lir.h
@@ -363,7 +356,6 @@ set( JIT_HEADERS
registerargconvention.h
register.h
regset.h
- scev.h
sideeffects.h
simd.h
simdashwintrinsic.h
@@ -654,7 +646,6 @@ else()
create_standalone_jit(TARGET clrjit_universal_arm_${ARCH_HOST_NAME} OS universal ARCH arm DESTINATIONS .)
target_compile_definitions(clrjit_universal_arm_${ARCH_HOST_NAME} PRIVATE ARM_SOFTFP CONFIGURABLE_ARM_ABI)
create_standalone_jit(TARGET clrjit_win_x86_${ARCH_HOST_NAME} OS win ARCH x86 DESTINATIONS .)
- create_standalone_jit(TARGET clrjit_win_aot_x86_${ARCH_HOST_NAME} OS win_aot ARCH x86 DESTINATIONS .)
endif (CLR_CMAKE_TARGET_ARCH_RISCV64)
if (CLR_CMAKE_TARGET_ARCH_I386 AND CLR_CMAKE_TARGET_UNIX)
diff --git a/src/coreclr/jit/ICorJitInfo_names_generated.h b/src/coreclr/jit/ICorJitInfo_names_generated.h
index 30c499518e007c..5fe1f716d474b8 100644
--- a/src/coreclr/jit/ICorJitInfo_names_generated.h
+++ b/src/coreclr/jit/ICorJitInfo_names_generated.h
@@ -108,7 +108,6 @@ DEF_CLR_API(setBoundaries)
DEF_CLR_API(getVars)
DEF_CLR_API(setVars)
DEF_CLR_API(reportRichMappings)
-DEF_CLR_API(reportMetadata)
DEF_CLR_API(allocateArray)
DEF_CLR_API(freeArray)
DEF_CLR_API(getArgNext)
@@ -125,7 +124,6 @@ DEF_CLR_API(printMethodName)
DEF_CLR_API(getMethodNameFromMetadata)
DEF_CLR_API(getMethodHash)
DEF_CLR_API(getSystemVAmd64PassStructInRegisterDescriptor)
-DEF_CLR_API(getSwiftLowering)
DEF_CLR_API(getLoongArch64PassStructInRegisterFlags)
DEF_CLR_API(getRISCV64PassStructInRegisterFlags)
DEF_CLR_API(getThreadTLSIndex)
diff --git a/src/coreclr/jit/ICorJitInfo_wrapper_generated.hpp b/src/coreclr/jit/ICorJitInfo_wrapper_generated.hpp
index 77af720739ecee..cae9b5d7b39e59 100644
--- a/src/coreclr/jit/ICorJitInfo_wrapper_generated.hpp
+++ b/src/coreclr/jit/ICorJitInfo_wrapper_generated.hpp
@@ -202,11 +202,10 @@ CORINFO_CLASS_HANDLE WrapICorJitInfo::getDefaultEqualityComparerClass(
void WrapICorJitInfo::expandRawHandleIntrinsic(
CORINFO_RESOLVED_TOKEN* pResolvedToken,
- CORINFO_METHOD_HANDLE callerHandle,
CORINFO_GENERICHANDLE_RESULT* pResult)
{
API_ENTER(expandRawHandleIntrinsic);
- wrapHnd->expandRawHandleIntrinsic(pResolvedToken, callerHandle, pResult);
+ wrapHnd->expandRawHandleIntrinsic(pResolvedToken, pResult);
API_LEAVE(expandRawHandleIntrinsic);
}
@@ -688,11 +687,10 @@ bool WrapICorJitInfo::getReadyToRunHelper(
CORINFO_RESOLVED_TOKEN* pResolvedToken,
CORINFO_LOOKUP_KIND* pGenericLookupKind,
CorInfoHelpFunc id,
- CORINFO_METHOD_HANDLE callerHandle,
CORINFO_CONST_LOOKUP* pLookup)
{
API_ENTER(getReadyToRunHelper);
- bool temp = wrapHnd->getReadyToRunHelper(pResolvedToken, pGenericLookupKind, id, callerHandle, pLookup);
+ bool temp = wrapHnd->getReadyToRunHelper(pResolvedToken, pGenericLookupKind, id, pLookup);
API_LEAVE(getReadyToRunHelper);
return temp;
}
@@ -701,11 +699,10 @@ void WrapICorJitInfo::getReadyToRunDelegateCtorHelper(
CORINFO_RESOLVED_TOKEN* pTargetMethod,
mdToken targetConstraint,
CORINFO_CLASS_HANDLE delegateType,
- CORINFO_METHOD_HANDLE callerHandle,
CORINFO_LOOKUP* pLookup)
{
API_ENTER(getReadyToRunDelegateCtorHelper);
- wrapHnd->getReadyToRunDelegateCtorHelper(pTargetMethod, targetConstraint, delegateType, callerHandle, pLookup);
+ wrapHnd->getReadyToRunDelegateCtorHelper(pTargetMethod, targetConstraint, delegateType, pLookup);
API_LEAVE(getReadyToRunDelegateCtorHelper);
}
@@ -1031,16 +1028,6 @@ void WrapICorJitInfo::reportRichMappings(
API_LEAVE(reportRichMappings);
}
-void WrapICorJitInfo::reportMetadata(
- const char* key,
- const void* value,
- size_t length)
-{
- API_ENTER(reportMetadata);
- wrapHnd->reportMetadata(key, value, length);
- API_LEAVE(reportMetadata);
-}
-
void* WrapICorJitInfo::allocateArray(
size_t cBytes)
{
@@ -1196,15 +1183,6 @@ bool WrapICorJitInfo::getSystemVAmd64PassStructInRegisterDescriptor(
return temp;
}
-void WrapICorJitInfo::getSwiftLowering(
- CORINFO_CLASS_HANDLE structHnd,
- CORINFO_SWIFT_LOWERING* pLowering)
-{
- API_ENTER(getSwiftLowering);
- wrapHnd->getSwiftLowering(structHnd, pLowering);
- API_LEAVE(getSwiftLowering);
-}
-
uint32_t WrapICorJitInfo::getLoongArch64PassStructInRegisterFlags(
CORINFO_CLASS_HANDLE structHnd)
{
@@ -1333,11 +1311,10 @@ CORINFO_FIELD_HANDLE WrapICorJitInfo::embedFieldHandle(
void WrapICorJitInfo::embedGenericHandle(
CORINFO_RESOLVED_TOKEN* pResolvedToken,
bool fEmbedParent,
- CORINFO_METHOD_HANDLE callerHandle,
CORINFO_GENERICHANDLE_RESULT* pResult)
{
API_ENTER(embedGenericHandle);
- wrapHnd->embedGenericHandle(pResolvedToken, fEmbedParent, callerHandle, pResult);
+ wrapHnd->embedGenericHandle(pResolvedToken, fEmbedParent, pResult);
API_LEAVE(embedGenericHandle);
}
diff --git a/src/coreclr/jit/assertionprop.cpp b/src/coreclr/jit/assertionprop.cpp
index 68a01a1ab7fcd1..fb317faea42c13 100644
--- a/src/coreclr/jit/assertionprop.cpp
+++ b/src/coreclr/jit/assertionprop.cpp
@@ -857,27 +857,12 @@ void Compiler::optPrintAssertion(AssertionDsc* curAssertion, AssertionIndex asse
if (curAssertion->op1.kind == O1K_EXACT_TYPE)
{
ssize_t iconVal = curAssertion->op2.u1.iconVal;
- if (IsTargetAbi(CORINFO_NATIVEAOT_ABI) || opts.IsReadyToRun())
- {
- printf("Exact Type MT(0x%p)", dspPtr(iconVal));
- }
- else
- {
- printf("Exact Type MT(0x%p %s)", dspPtr(iconVal),
- eeGetClassName((CORINFO_CLASS_HANDLE)iconVal));
- }
+ printf("Exact Type MT(0x%p %s)", dspPtr(iconVal), eeGetClassName((CORINFO_CLASS_HANDLE)iconVal));
}
else if (curAssertion->op1.kind == O1K_SUBTYPE)
{
ssize_t iconVal = curAssertion->op2.u1.iconVal;
- if (IsTargetAbi(CORINFO_NATIVEAOT_ABI) || opts.IsReadyToRun())
- {
- printf("MT(0x%p)", dspPtr(iconVal));
- }
- else
- {
- printf("MT(0x%p %s)", dspPtr(iconVal), eeGetClassName((CORINFO_CLASS_HANDLE)iconVal));
- }
+ printf("MT(0x%p %s)", dspPtr(iconVal), eeGetClassName((CORINFO_CLASS_HANDLE)iconVal));
assert(curAssertion->op2.HasIconFlag());
}
else if ((curAssertion->op1.kind == O1K_BOUND_OPER_BND) ||
@@ -2637,20 +2622,28 @@ AssertionIndex Compiler::optAssertionIsSubtype(GenTree* tree, GenTree* methodTab
{
AssertionIndex const index = GetAssertionIndex(bvIndex);
AssertionDsc* curAssertion = optGetAssertion(index);
- if ((curAssertion->assertionKind != OAK_EQUAL) ||
- ((curAssertion->op1.kind != O1K_SUBTYPE) && (curAssertion->op1.kind != O1K_EXACT_TYPE)))
- {
- // TODO-CQ: We might benefit from OAK_NOT_EQUAL assertion as well, e.g.:
- // if (obj is not MyClass) // obj is known to be never of MyClass class
- // {
- // if (obj is MyClass) // can be folded to false
- // {
- //
+ if (curAssertion->assertionKind != OAK_EQUAL ||
+ (curAssertion->op1.kind != O1K_SUBTYPE && curAssertion->op1.kind != O1K_EXACT_TYPE))
+ {
continue;
}
- if ((curAssertion->op1.vn != vnStore->VNConservativeNormalValue(tree->gtVNPair) ||
- (curAssertion->op2.kind != O2K_CONST_INT)))
+ // If local assertion prop use "lcl" based comparison, if global assertion prop use vn based comparison.
+ if ((optLocalAssertionProp) ? (curAssertion->op1.lcl.lclNum != tree->AsLclVarCommon()->GetLclNum())
+ : (curAssertion->op1.vn != vnStore->VNConservativeNormalValue(tree->gtVNPair)))
+ {
+ continue;
+ }
+
+ if (curAssertion->op2.kind == O2K_IND_CNS_INT)
+ {
+ if (methodTableArg->gtOper != GT_IND)
+ {
+ continue;
+ }
+ methodTableArg = methodTableArg->AsOp()->gtOp1;
+ }
+ else if (curAssertion->op2.kind != O2K_CONST_INT)
{
continue;
}
@@ -2664,8 +2657,6 @@ AssertionIndex Compiler::optAssertionIsSubtype(GenTree* tree, GenTree* methodTab
if (curAssertion->op2.u1.iconVal == methodTableVal)
{
- // TODO-CQ: if they don't match, we might still be able to prove that the result is foldable via
- // compareTypesForCast.
return index;
}
}
@@ -2687,6 +2678,10 @@ GenTree* Compiler::optVNBasedFoldExpr_Call(BasicBlock* block, GenTree* parent, G
{
switch (call->GetHelperNum())
{
+ //
+ // Fold "CAST(IsInstanceOf(obj, cls), cls)" to "IsInstanceOf(obj, cls)"
+ // where CAST is either ISINST or CASTCLASS.
+ //
case CORINFO_HELP_CHKCASTARRAY:
case CORINFO_HELP_CHKCASTANY:
case CORINFO_HELP_CHKCASTINTERFACE:
@@ -2696,8 +2691,10 @@ GenTree* Compiler::optVNBasedFoldExpr_Call(BasicBlock* block, GenTree* parent, G
case CORINFO_HELP_ISINSTANCEOFANY:
case CORINFO_HELP_ISINSTANCEOFINTERFACE:
{
- GenTree* castClsArg = call->gtArgs.GetUserArgByIndex(0)->GetNode();
- GenTree* castObjArg = call->gtArgs.GetUserArgByIndex(1)->GetNode();
+ GenTree* castClsArg = call->gtArgs.GetUserArgByIndex(0)->GetNode();
+ GenTree* castObjArg = call->gtArgs.GetUserArgByIndex(1)->GetNode();
+ ValueNum castClsArgVN = castClsArg->gtVNPair.GetConservative();
+ ValueNum castObjArgVN = castObjArg->gtVNPair.GetConservative();
if ((castObjArg->gtFlags & GTF_ALL_EFFECT) != 0)
{
@@ -2707,26 +2704,17 @@ GenTree* Compiler::optVNBasedFoldExpr_Call(BasicBlock* block, GenTree* parent, G
return nullptr;
}
- // If object has the same VN as the cast, then the cast is effectively a no-op.
- //
- if (castObjArg->gtVNPair == call->gtVNPair)
- {
- return gtWrapWithSideEffects(castObjArg, call, GTF_ALL_EFFECT, true);
- }
-
- // Let's see if gtGetClassHandle may help us to fold the cast (since VNForCast did not).
- if (castClsArg->IsIconHandle(GTF_ICON_CLASS_HDL))
+ VNFuncApp funcApp;
+ if (vnStore->GetVNFunc(castObjArgVN, &funcApp) && (funcApp.m_func == VNF_IsInstanceOf))
{
- bool isExact;
- bool isNonNull;
- CORINFO_CLASS_HANDLE castFrom = gtGetClassHandle(castObjArg, &isExact, &isNonNull);
- if (castFrom != NO_CLASS_HANDLE)
+ ValueNum innerCastClsVN = funcApp.m_args[0];
+ if (innerCastClsVN == castClsArgVN)
{
- CORINFO_CLASS_HANDLE castTo = gtGetHelperArgClassHandle(castClsArg);
- if (info.compCompHnd->compareTypesForCast(castFrom, castTo) == TypeCompareState::Must)
- {
- return gtWrapWithSideEffects(castObjArg, call, GTF_ALL_EFFECT, true);
- }
+ // The outer cast is redundant, remove it and preserve its side effects
+ // We do ignoreRoot here because the actual cast node never throws any exceptions.
+ GenTree* result = gtWrapWithSideEffects(castObjArg, call, GTF_ALL_EFFECT, true);
+ fgSetTreeSeq(result);
+ return result;
}
}
}
@@ -3042,18 +3030,6 @@ GenTree* Compiler::optVNBasedFoldConstExpr(BasicBlock* block, GenTree* parent, G
break;
}
break;
-
- case TYP_MASK:
- {
- simdmask_t value = vnStore->ConstantValue(vnCns);
-
- GenTreeVecCon* vecCon = gtNewVconNode(tree->TypeGet());
- memcpy(&vecCon->gtSimdVal, &value, sizeof(simdmask_t));
-
- conValTree = vecCon;
- break;
- }
- break;
#endif // TARGET_XARCH
#endif // FEATURE_SIMD
@@ -4222,20 +4198,20 @@ AssertionIndex Compiler::optGlobalAssertionIsEqualOrNotEqual(ASSERT_VALARG_TP as
return assertionIndex;
}
- // Look for matching exact type assertions based on vtable accesses. E.g.:
- //
- // op1: VNF_InvariantLoad(myObj) or in other words: a vtable access
- // op2: 'MyType' class handle
- // Assertion: 'myObj's type is exactly MyType
- //
+ // Look for matching exact type assertions based on vtable accesses
if ((curAssertion->assertionKind == OAK_EQUAL) && (curAssertion->op1.kind == O1K_EXACT_TYPE) &&
- (curAssertion->op2.vn == vnStore->VNConservativeNormalValue(op2->gtVNPair)) && op1->TypeIs(TYP_I_IMPL))
+ op1->OperIs(GT_IND))
{
- VNFuncApp funcApp;
- if (vnStore->GetVNFunc(vnStore->VNConservativeNormalValue(op1->gtVNPair), &funcApp) &&
- (funcApp.m_func == VNF_InvariantLoad) && (curAssertion->op1.vn == funcApp.m_args[0]))
+ GenTree* indirAddr = op1->AsIndir()->Addr();
+
+ if (indirAddr->OperIs(GT_LCL_VAR) && (indirAddr->TypeGet() == TYP_REF))
{
- return assertionIndex;
+ // op1 is accessing vtable of a ref type local var
+ if ((curAssertion->op1.vn == vnStore->VNConservativeNormalValue(indirAddr->gtVNPair)) &&
+ (curAssertion->op2.vn == vnStore->VNConservativeNormalValue(op2->gtVNPair)))
+ {
+ return assertionIndex;
+ }
}
}
}
@@ -5210,8 +5186,7 @@ GenTree* Compiler::optAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeCal
{
return optAssertionProp_Update(call, call, stmt);
}
-
- if (!optLocalAssertionProp && call->IsHelperCall())
+ else if (!optLocalAssertionProp && call->IsHelperCall())
{
const CorInfoHelpFunc helper = eeGetHelperNum(call->gtCallMethHnd);
if ((helper == CORINFO_HELP_ISINSTANCEOFINTERFACE) || (helper == CORINFO_HELP_ISINSTANCEOFARRAY) ||
@@ -5220,21 +5195,22 @@ GenTree* Compiler::optAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeCal
(helper == CORINFO_HELP_CHKCASTCLASS) || (helper == CORINFO_HELP_CHKCASTANY) ||
(helper == CORINFO_HELP_CHKCASTCLASS_SPECIAL))
{
- GenTree* castToArg = call->gtArgs.GetArgByIndex(0)->GetNode();
- GenTree* objArg = call->gtArgs.GetArgByIndex(1)->GetNode();
+ GenTree* arg1 = call->gtArgs.GetArgByIndex(1)->GetNode();
+ if (arg1->gtOper != GT_LCL_VAR)
+ {
+ return nullptr;
+ }
- // We require objArg to be side effect free due to limitations in gtWrapWithSideEffects
- if ((objArg->gtFlags & GTF_ALL_EFFECT) == 0)
+ GenTree* arg2 = call->gtArgs.GetArgByIndex(0)->GetNode();
+
+ unsigned index = optAssertionIsSubtype(arg1, arg2, assertions);
+ if (index != NO_ASSERTION_INDEX)
{
- const unsigned index = optAssertionIsSubtype(objArg, castToArg, assertions);
- if (index != NO_ASSERTION_INDEX)
- {
- JITDUMP("\nDid VN based subtype prop for index #%02u in " FMT_BB ":\n", index, compCurBB->bbNum);
- DISPTREE(call);
+ JITDUMP("\nDid VN based subtype prop for index #%02u in " FMT_BB ":\n", index, compCurBB->bbNum);
+ DISPTREE(call);
- objArg = gtWrapWithSideEffects(objArg, call, GTF_SIDE_EFFECT, true);
- return optAssertionProp_Update(objArg, call, stmt);
- }
+ arg1 = gtWrapWithSideEffects(arg1, call, GTF_SIDE_EFFECT, true);
+ return optAssertionProp_Update(arg1, call, stmt);
}
// Leave a hint for fgLateCastExpansion that obj is never null.
@@ -5242,7 +5218,7 @@ GenTree* Compiler::optAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeCal
INDEBUG(bool vnBased = false);
// GTF_CALL_M_CAST_CAN_BE_EXPANDED check is to improve TP
if (((call->gtCallMoreFlags & GTF_CALL_M_CAST_CAN_BE_EXPANDED) != 0) &&
- optAssertionIsNonNull(objArg, assertions DEBUGARG(&vnBased) DEBUGARG(&nonNullIdx)))
+ optAssertionIsNonNull(arg1, assertions DEBUGARG(&vnBased) DEBUGARG(&nonNullIdx)))
{
call->gtCallMoreFlags |= GTF_CALL_M_CAST_OBJ_NONNULL;
return optAssertionProp_Update(call, call, stmt);
@@ -5499,6 +5475,7 @@ GenTree* Compiler::optAssertionProp(ASSERT_VALARG_TP assertions, GenTree* tree,
case GT_IND:
case GT_STOREIND:
case GT_NULLCHECK:
+ case GT_STORE_DYN_BLK:
return optAssertionProp_Ind(assertions, tree, stmt);
case GT_BOUNDS_CHECK:
@@ -6152,7 +6129,7 @@ ASSERT_TP* Compiler::optComputeAssertionGen()
AssertionIndex valueAssertionIndex;
AssertionIndex jumpDestAssertionIndex;
- if (info.AssertionHoldsOnFalseEdge())
+ if (info.IsNextEdgeAssertion())
{
valueAssertionIndex = info.GetAssertionIndex();
jumpDestAssertionIndex = optFindComplementary(info.GetAssertionIndex());
diff --git a/src/coreclr/jit/block.cpp b/src/coreclr/jit/block.cpp
index 1e7750997a103a..58b91b739fe424 100644
--- a/src/coreclr/jit/block.cpp
+++ b/src/coreclr/jit/block.cpp
@@ -68,68 +68,6 @@ unsigned SsaStressHashHelper()
}
#endif
-//------------------------------------------------------------------------
-// setLikelihood: set the likelihood of a flow edge
-//
-// Arguments:
-// likelihood -- value in range [0.0, 1.0] indicating how likely
-// the source block is to transfer control along this edge.
-//
-void FlowEdge::setLikelihood(weight_t likelihood)
-{
- assert(likelihood >= 0.0);
- assert(likelihood <= 1.0);
-
- if (m_likelihoodSet)
- {
- JITDUMP("setting likelihood of " FMT_BB " -> " FMT_BB " from " FMT_WT " to " FMT_WT "\n", m_sourceBlock->bbNum,
- m_destBlock->bbNum, m_likelihood, likelihood);
- }
- else
- {
- JITDUMP("setting likelihood of " FMT_BB " -> " FMT_BB " to " FMT_WT "\n", m_sourceBlock->bbNum,
- m_destBlock->bbNum, likelihood);
- }
-
- m_likelihoodSet = true;
- m_likelihood = likelihood;
-}
-
-//------------------------------------------------------------------------
-// addLikelihood: adjust the likelihood of a flow edge
-//
-// Arguments:
-// addedLikelihood -- value in range [-likelihood, 1.0 - likelihood]
-// to add to current likelihood.
-//
-void FlowEdge::addLikelihood(weight_t addedLikelihood)
-{
- assert(m_likelihoodSet);
-
- weight_t newLikelihood = m_likelihood + addedLikelihood;
-
- // Tolerate slight overflow or underflow
- //
- const weight_t eps = 0.0001;
-
- if ((newLikelihood < 0) && (newLikelihood > -eps))
- {
- newLikelihood = 0.0;
- }
- else if ((newLikelihood > 1) && (newLikelihood < 1 + eps))
- {
- newLikelihood = 1.0;
- }
-
- assert(newLikelihood >= 0.0);
- assert(newLikelihood <= 1.0);
-
- JITDUMP("updating likelihood of " FMT_BB " -> " FMT_BB " from " FMT_WT " to " FMT_WT "\n", m_sourceBlock->bbNum,
- m_destBlock->bbNum, m_likelihood, newLikelihood);
-
- m_likelihood = newLikelihood;
-}
-
//------------------------------------------------------------------------
// AllSuccessorEnumerator: Construct an instance of the enumerator.
//
@@ -688,33 +626,20 @@ void BasicBlock::dspSuccs(Compiler* compiler)
// things strictly.
void BasicBlock::dspKind() const
{
- auto dspBlockNum = [](const FlowEdge* e) -> const char* {
+ auto dspBlockNum = [](const BasicBlock* b) -> const char* {
static char buffers[3][64]; // static array of 3 to allow 3 concurrent calls in one printf()
static int nextBufferIndex = 0;
- auto& buffer = buffers[nextBufferIndex];
- nextBufferIndex = (nextBufferIndex + 1) % ArrLen(buffers);
- const size_t sizeOfBuffer = ArrLen(buffer);
- int written;
+ auto& buffer = buffers[nextBufferIndex];
+ nextBufferIndex = (nextBufferIndex + 1) % ArrLen(buffers);
- const BasicBlock* b = e->getDestinationBlock();
if (b == nullptr)
{
- written = _snprintf_s(buffer, sizeOfBuffer, sizeOfBuffer, "NULL");
+ _snprintf_s(buffer, ArrLen(buffer), ArrLen(buffer), "NULL");
}
else
{
- written = _snprintf_s(buffer, sizeOfBuffer, sizeOfBuffer, FMT_BB, b->bbNum);
- }
-
- const bool printEdgeLikelihoods = true; // TODO: parameterize this?
- if (printEdgeLikelihoods)
- {
- if (e->hasLikelihood())
- {
- written = _snprintf_s(buffer + written, sizeOfBuffer - written, sizeOfBuffer - written, "(" FMT_WT ")",
- e->getLikelihood());
- }
+ _snprintf_s(buffer, ArrLen(buffer), ArrLen(buffer), FMT_BB, b->bbNum);
}
return buffer;
@@ -738,7 +663,7 @@ void BasicBlock::dspKind() const
for (unsigned i = 0; i < jumpCnt; i++)
{
- printf("%c%s", (i == 0) ? ' ' : ',', dspBlockNum(jumpTab[i]));
+ printf("%c%s", (i == 0) ? ' ' : ',', dspBlockNum(jumpTab[i]->getDestinationBlock()));
}
}
@@ -751,11 +676,11 @@ void BasicBlock::dspKind() const
break;
case BBJ_EHFILTERRET:
- printf(" -> %s (fltret)", dspBlockNum(GetTargetEdge()));
+ printf(" -> %s (fltret)", dspBlockNum(bbTarget));
break;
case BBJ_EHCATCHRET:
- printf(" -> %s (cret)", dspBlockNum(GetTargetEdge()));
+ printf(" -> %s (cret)", dspBlockNum(bbTarget));
break;
case BBJ_THROW:
@@ -769,36 +694,36 @@ void BasicBlock::dspKind() const
case BBJ_ALWAYS:
if (HasFlag(BBF_KEEP_BBJ_ALWAYS))
{
- printf(" -> %s (ALWAYS)", dspBlockNum(GetTargetEdge()));
+ printf(" -> %s (ALWAYS)", dspBlockNum(bbTarget));
}
else
{
- printf(" -> %s (always)", dspBlockNum(GetTargetEdge()));
+ printf(" -> %s (always)", dspBlockNum(bbTarget));
}
break;
case BBJ_LEAVE:
- printf(" -> %s (leave)", dspBlockNum(GetTargetEdge()));
+ printf(" -> %s (leave)", dspBlockNum(bbTarget));
break;
case BBJ_CALLFINALLY:
- printf(" -> %s (callf)", dspBlockNum(GetTargetEdge()));
+ printf(" -> %s (callf)", dspBlockNum(bbTarget));
break;
case BBJ_CALLFINALLYRET:
- printf(" -> %s (callfr)", dspBlockNum(GetTargetEdge()));
+ printf(" -> %s (callfr)", dspBlockNum(bbTarget));
break;
case BBJ_COND:
- printf(" -> %s,%s (cond)", dspBlockNum(GetTrueEdge()), dspBlockNum(GetFalseEdge()));
+ printf(" -> %s,%s (cond)", dspBlockNum(bbTrueTarget), dspBlockNum(bbFalseTarget));
break;
case BBJ_SWITCH:
{
printf(" ->");
- const unsigned jumpCnt = bbSwtTargets->bbsCount;
- FlowEdge** const jumpTab = bbSwtTargets->bbsDstTab;
+ const unsigned jumpCnt = bbSwtTargets->bbsCount;
+ BasicBlock** const jumpTab = bbSwtTargets->bbsDstTab;
for (unsigned i = 0; i < jumpCnt; i++)
{
@@ -932,16 +857,11 @@ void BasicBlock::TransferTarget(BasicBlock* from)
SetEhf(from->GetEhfTargets());
from->bbEhfTargets = nullptr; // Make sure nobody uses the descriptor after this.
break;
-
- // TransferTarget may be called after setting the source block of `from`'s
- // successor edges to this block.
- // This means calling GetTarget/GetTrueTarget/GetFalseTarget would trigger asserts.
- // Avoid this by accessing the edges directly.
case BBJ_COND:
- SetCond(from->bbTrueEdge, from->bbFalseEdge);
+ SetCond(from->GetTrueTarget(), from->GetFalseTarget());
break;
case BBJ_ALWAYS:
- SetKindAndTargetEdge(BBJ_ALWAYS, from->bbTargetEdge);
+ SetKindAndTarget(from->GetKind(), from->GetTarget());
CopyFlags(from, BBF_NONE_QUIRK);
break;
case BBJ_CALLFINALLY:
@@ -949,10 +869,10 @@ void BasicBlock::TransferTarget(BasicBlock* from)
case BBJ_EHCATCHRET:
case BBJ_EHFILTERRET:
case BBJ_LEAVE:
- SetKindAndTargetEdge(from->GetKind(), from->bbTargetEdge);
+ SetKindAndTarget(from->GetKind(), from->GetTarget());
break;
default:
- SetKindAndTargetEdge(from->GetKind()); // Clear the target
+ SetKindAndTarget(from->GetKind()); // Clear the target
break;
}
assert(KindIs(from->GetKind()));
@@ -1065,7 +985,7 @@ BasicBlock* BasicBlock::GetUniquePred(Compiler* compiler) const
//
BasicBlock* BasicBlock::GetUniqueSucc() const
{
- return KindIs(BBJ_ALWAYS) ? GetTarget() : nullptr;
+ return KindIs(BBJ_ALWAYS) ? bbTarget : nullptr;
}
// Static vars.
@@ -1225,7 +1145,7 @@ unsigned BasicBlock::NumSucc() const
return 1;
case BBJ_COND:
- if (bbTrueEdge == bbFalseEdge)
+ if (bbTrueTarget == bbFalseTarget)
{
return 1;
}
@@ -1260,15 +1180,15 @@ unsigned BasicBlock::NumSucc() const
}
//------------------------------------------------------------------------
-// GetSucc: Returns the requested successor edge. See the declaration comment for details.
+// GetSucc: Returns the requested block successor. See the declaration comment for details.
//
// Arguments:
// i - index of successor to return. 0 <= i <= NumSucc().
//
// Return Value:
-// Requested successor edge
+// Requested successor block
//
-FlowEdge* BasicBlock::GetSuccEdge(unsigned i) const
+BasicBlock* BasicBlock::GetSucc(unsigned i) const
{
assert(i < NumSucc()); // Index bounds check.
switch (bbKind)
@@ -1279,22 +1199,22 @@ FlowEdge* BasicBlock::GetSuccEdge(unsigned i) const
case BBJ_EHCATCHRET:
case BBJ_EHFILTERRET:
case BBJ_LEAVE:
- return GetTargetEdge();
+ return bbTarget;
case BBJ_COND:
if (i == 0)
{
- return GetFalseEdge();
+ return bbFalseTarget;
}
else
{
assert(i == 1);
- assert(bbTrueEdge != bbFalseEdge);
- return GetTrueEdge();
+ assert(bbFalseTarget != bbTrueTarget);
+ return bbTrueTarget;
}
case BBJ_EHFINALLYRET:
- return bbEhfTargets->bbeSuccs[i];
+ return bbEhfTargets->bbeSuccs[i]->getDestinationBlock();
case BBJ_SWITCH:
return bbSwtTargets->bbsDstTab[i];
@@ -1304,20 +1224,6 @@ FlowEdge* BasicBlock::GetSuccEdge(unsigned i) const
}
}
-//------------------------------------------------------------------------
-// GetSucc: Returns the requested block successor. See the declaration comment for details.
-//
-// Arguments:
-// i - index of successor to return. 0 <= i <= NumSucc().
-//
-// Return Value:
-// Requested successor block
-//
-BasicBlock* BasicBlock::GetSucc(unsigned i) const
-{
- return GetSuccEdge(i)->getDestinationBlock();
-}
-
//------------------------------------------------------------------------
// NumSucc: Returns the count of block successors. See the declaration comment for details.
//
@@ -1364,7 +1270,7 @@ unsigned BasicBlock::NumSucc(Compiler* comp)
return 1;
case BBJ_COND:
- if (bbTrueEdge == bbFalseEdge)
+ if (bbTrueTarget == bbFalseTarget)
{
return 1;
}
@@ -1385,16 +1291,16 @@ unsigned BasicBlock::NumSucc(Compiler* comp)
}
//------------------------------------------------------------------------
-// GetSucc: Returns the requested successor edge. See the declaration comment for details.
+// GetSucc: Returns the requested block successor. See the declaration comment for details.
//
// Arguments:
// i - index of successor to return. 0 <= i <= NumSucc(comp).
// comp - Compiler instance
//
// Return Value:
-// Requested successor edge
+// Requested successor block
//
-FlowEdge* BasicBlock::GetSuccEdge(unsigned i, Compiler* comp)
+BasicBlock* BasicBlock::GetSucc(unsigned i, Compiler* comp)
{
assert(comp != nullptr);
@@ -1403,31 +1309,31 @@ FlowEdge* BasicBlock::GetSuccEdge(unsigned i, Compiler* comp)
{
case BBJ_EHFILTERRET:
// Handler is the (sole) normal successor of the filter.
- assert(comp->fgFirstBlockOfHandler(this) == GetTarget());
- return GetTargetEdge();
+ assert(comp->fgFirstBlockOfHandler(this) == bbTarget);
+ return bbTarget;
case BBJ_EHFINALLYRET:
assert(bbEhfTargets != nullptr);
assert(i < bbEhfTargets->bbeCount);
- return bbEhfTargets->bbeSuccs[i];
+ return bbEhfTargets->bbeSuccs[i]->getDestinationBlock();
case BBJ_CALLFINALLY:
case BBJ_CALLFINALLYRET:
case BBJ_ALWAYS:
case BBJ_EHCATCHRET:
case BBJ_LEAVE:
- return GetTargetEdge();
+ return bbTarget;
case BBJ_COND:
if (i == 0)
{
- return GetFalseEdge();
+ return bbFalseTarget;
}
else
{
assert(i == 1);
- assert(bbTrueEdge != bbFalseEdge);
- return GetTrueEdge();
+ assert(bbFalseTarget != bbTrueTarget);
+ return bbTrueTarget;
}
case BBJ_SWITCH:
@@ -1442,21 +1348,6 @@ FlowEdge* BasicBlock::GetSuccEdge(unsigned i, Compiler* comp)
}
}
-//------------------------------------------------------------------------
-// GetSucc: Returns the requested block successor. See the declaration comment for details.
-//
-// Arguments:
-// i - index of successor to return. 0 <= i <= NumSucc(comp).
-// comp - Compiler instance
-//
-// Return Value:
-// Requested successor block
-//
-BasicBlock* BasicBlock::GetSucc(unsigned i, Compiler* comp)
-{
- return GetSuccEdge(i, comp)->getDestinationBlock();
-}
-
void BasicBlock::InitVarSets(Compiler* comp)
{
VarSetOps::AssignNoCopy(comp, bbVarUse, VarSetOps::MakeEmpty(comp));
@@ -1694,10 +1585,15 @@ BasicBlock* BasicBlock::New(Compiler* compiler)
return block;
}
-BasicBlock* BasicBlock::New(Compiler* compiler, BBKinds kind)
+BasicBlock* BasicBlock::New(Compiler* compiler, BBKinds kind, BasicBlock* target /* = nullptr */)
{
BasicBlock* block = BasicBlock::New(compiler);
- block->bbKind = kind;
+
+ // In some cases, we don't know a block's jump target during initialization, so don't check the jump kind/target
+ // yet.
+ // The checks will be done any time the jump kind/target is read or written to after initialization.
+ block->bbKind = kind;
+ block->bbTarget = target;
if (block->KindIs(BBJ_THROW))
{
@@ -1878,7 +1774,7 @@ BBswtDesc::BBswtDesc(Compiler* comp, const BBswtDesc* other)
{
// Allocate and fill in a new dst tab
//
- bbsDstTab = new (comp, CMK_FlowEdge) FlowEdge*[bbsCount];
+ bbsDstTab = new (comp, CMK_BasicBlock) BasicBlock*[bbsCount];
for (unsigned i = 0; i < bbsCount; i++)
{
bbsDstTab[i] = other->bbsDstTab[i];
diff --git a/src/coreclr/jit/block.h b/src/coreclr/jit/block.h
index ae881d99f7361b..09fc1c71c35638 100644
--- a/src/coreclr/jit/block.h
+++ b/src/coreclr/jit/block.h
@@ -46,13 +46,9 @@ typedef BitVec_ValRet_T ASSERT_VALRET_TP;
// Use this format for loop indices
#define FMT_LP "L%02u"
-// Use this format for profile weights
+// And this format for profile weights
#define FMT_WT "%.7g"
-// Use this format for profile weights where we want to conserve horizontal space, at the expense of displaying
-// less precision.
-#define FMT_WT_NARROW "%.3g"
-
/*****************************************************************************
*
* Each basic block ends with a jump which is described as a value
@@ -262,9 +258,7 @@ class PredEdgeList
// PredBlockList: adapter class for forward iteration of the predecessor edge linked list yielding
// predecessor blocks, using range-based `for`, normally used via BasicBlock::PredBlocks(), e.g.:
// for (BasicBlock* const predBlock : block->PredBlocks()) ...
-// allowEdits controls whether the iterator should be resilient to changes to the predecessor list.
//
-template
class PredBlockList
{
FlowEdge* m_begin;
@@ -276,12 +270,13 @@ class PredBlockList
{
FlowEdge* m_pred;
- // When allowEdits=false, try to guard against the user of the iterator from modifying the predecessor list
- // being traversed: cache the edge we think should be next, then check it when we actually do the `++`
+#ifdef DEBUG
+ // Try to guard against the user of the iterator from making changes to the IR that would invalidate
+ // the iterator: cache the edge we think should be next, then check it when we actually do the `++`
// operation. This is a bit conservative, but attempts to protect against callers assuming too much about
// this iterator implementation.
- // When allowEdits=true, m_next is always used to update m_pred, so changes to m_pred don't break the iterator.
FlowEdge* m_next;
+#endif
public:
iterator(FlowEdge* pred);
@@ -312,67 +307,44 @@ class PredBlockList
}
};
-// BBArrayIterator: forward iterator for an array of BasicBlock*.
+// BBArrayIterator: forward iterator for an array of BasicBlock*, such as the BBswtDesc->bbsDstTab.
// It is an error (with assert) to yield a nullptr BasicBlock* in this array.
-// `m_edgeEntry` can be nullptr, but it only makes sense if both the begin and end of an iteration range are nullptr
+// `m_bbEntry` can be nullptr, but it only makes sense if both the begin and end of an iteration range are nullptr
// (meaning, no actual iteration will happen).
//
class BBArrayIterator
{
- FlowEdge* const* m_edgeEntry;
-
-public:
- BBArrayIterator(FlowEdge* const* edgeEntry) : m_edgeEntry(edgeEntry)
- {
- }
-
- BasicBlock* operator*() const;
-
- BBArrayIterator& operator++()
- {
- assert(m_edgeEntry != nullptr);
- ++m_edgeEntry;
- return *this;
- }
-
- bool operator!=(const BBArrayIterator& i) const
- {
- return m_edgeEntry != i.m_edgeEntry;
- }
-};
+ // Quirk: Some BasicBlock kinds refer to their successors with BasicBlock pointers,
+ // while others use FlowEdge pointers. Eventually, every type will use FlowEdge pointers.
+ // For now, support iterating with both types.
+ union {
+ BasicBlock* const* m_bbEntry;
+ FlowEdge* const* m_edgeEntry;
+ };
-// FlowEdgeArrayIterator: forward iterator for an array of FlowEdge*, such as the BBswtDesc->bbsDstTab.
-// It is an error (with assert) to yield a nullptr FlowEdge* in this array.
-// `m_edgeEntry` can be nullptr, but it only makes sense if both the begin and end of an iteration range are nullptr
-// (meaning, no actual iteration will happen).
-//
-class FlowEdgeArrayIterator
-{
- FlowEdge* const* m_edgeEntry;
+ bool iterateEdges;
public:
- FlowEdgeArrayIterator(FlowEdge* const* edgeEntry) : m_edgeEntry(edgeEntry)
+ BBArrayIterator(BasicBlock* const* bbEntry) : m_bbEntry(bbEntry), iterateEdges(false)
{
}
- FlowEdge* operator*() const
+ BBArrayIterator(FlowEdge* const* edgeEntry) : m_edgeEntry(edgeEntry), iterateEdges(true)
{
- assert(m_edgeEntry != nullptr);
- FlowEdge* const edge = *m_edgeEntry;
- assert(edge != nullptr);
- return edge;
}
- FlowEdgeArrayIterator& operator++()
+ BasicBlock* operator*() const;
+
+ BBArrayIterator& operator++()
{
- assert(m_edgeEntry != nullptr);
- ++m_edgeEntry;
+ assert(m_bbEntry != nullptr);
+ ++m_bbEntry;
return *this;
}
- bool operator!=(const FlowEdgeArrayIterator& i) const
+ bool operator!=(const BBArrayIterator& i) const
{
- return m_edgeEntry != i.m_edgeEntry;
+ return m_bbEntry != i.m_bbEntry;
}
};
@@ -534,180 +506,6 @@ enum class BasicBlockVisit
// clang-format on
-//-------------------------------------------------------------------------
-// FlowEdge -- control flow edge
-//
-// In compiler terminology the control flow between two BasicBlocks
-// is typically referred to as an "edge". Most well known are the
-// backward branches for loops, which are often called "back-edges".
-//
-// "struct FlowEdge" is the type that represents our control flow edges.
-// This type is a linked list of zero or more "edges".
-// (The list of zero edges is represented by NULL.)
-// Every BasicBlock has a field called bbPreds of this type. This field
-// represents the list of "edges" that flow into this BasicBlock.
-// The FlowEdge type only stores the BasicBlock* of the source for the
-// control flow edge. The destination block for the control flow edge
-// is implied to be the block which contained the bbPreds field.
-//
-// For a switch branch target there may be multiple "edges" that have
-// the same source block (and destination block). We need to count the
-// number of these edges so that during optimization we will know when
-// we have zero of them. Rather than have extra FlowEdge entries we
-// track this via the DupCount property.
-//
-// When we have Profile weight for the BasicBlocks we can usually compute
-// the number of times each edge was executed by examining the adjacent
-// BasicBlock weights. As we are doing for BasicBlocks, we call the number
-// of times that a control flow edge was executed the "edge weight".
-// In order to compute the edge weights we need to use a bounded range
-// for every edge weight. These two fields, 'flEdgeWeightMin' and 'flEdgeWeightMax'
-// are used to hold a bounded range. Most often these will converge such
-// that both values are the same and that value is the exact edge weight.
-// Sometimes we are left with a rage of possible values between [Min..Max]
-// which represents an inexact edge weight.
-//
-// The bbPreds list is initially created by Compiler::fgLinkBasicBlocks()
-// and is incrementally kept up to date.
-//
-// The edge weight are computed by Compiler::fgComputeEdgeWeights()
-// the edge weights are used to straighten conditional branches
-// by Compiler::fgReorderBlocks()
-//
-struct FlowEdge
-{
-private:
- // The next predecessor edge in the list, nullptr for end of list.
- FlowEdge* m_nextPredEdge;
-
- // The source of the control flow
- BasicBlock* m_sourceBlock;
-
- // The destination of the control flow
- BasicBlock* m_destBlock;
-
- // Edge weights
- weight_t m_edgeWeightMin;
- weight_t m_edgeWeightMax;
-
- // Likelihood that m_sourceBlock transfers control along this edge.
- // Values in range [0..1]
- weight_t m_likelihood;
-
- // The count of duplicate "edges" (used for switch stmts or degenerate branches)
- unsigned m_dupCount;
-
- // True if likelihood has been set
- bool m_likelihoodSet;
-
-public:
- FlowEdge(BasicBlock* sourceBlock, BasicBlock* destBlock, FlowEdge* rest)
- : m_nextPredEdge(rest)
- , m_sourceBlock(sourceBlock)
- , m_destBlock(destBlock)
- , m_edgeWeightMin(0)
- , m_edgeWeightMax(0)
- , m_likelihood(0)
- , m_dupCount(0)
- , m_likelihoodSet(false)
- {
- }
-
- FlowEdge* getNextPredEdge() const
- {
- return m_nextPredEdge;
- }
-
- FlowEdge** getNextPredEdgeRef()
- {
- return &m_nextPredEdge;
- }
-
- void setNextPredEdge(FlowEdge* newEdge)
- {
- m_nextPredEdge = newEdge;
- }
-
- BasicBlock* getSourceBlock() const
- {
- assert(m_sourceBlock != nullptr);
- return m_sourceBlock;
- }
-
- void setSourceBlock(BasicBlock* newBlock)
- {
- assert(newBlock != nullptr);
- m_sourceBlock = newBlock;
- }
-
- BasicBlock* getDestinationBlock() const
- {
- assert(m_destBlock != nullptr);
- return m_destBlock;
- }
-
- void setDestinationBlock(BasicBlock* newBlock)
- {
- assert(newBlock != nullptr);
- m_destBlock = newBlock;
- }
-
- weight_t edgeWeightMin() const
- {
- return m_edgeWeightMin;
- }
-
- weight_t edgeWeightMax() const
- {
- return m_edgeWeightMax;
- }
-
- // These two methods are used to set new values for edge weights.
- // They return false if the newWeight is not between the current [min..max]
- // when slop is non-zero we allow for the case where our weights might be off by 'slop'
- //
- bool setEdgeWeightMinChecked(weight_t newWeight, BasicBlock* bDst, weight_t slop, bool* wbUsedSlop);
- bool setEdgeWeightMaxChecked(weight_t newWeight, BasicBlock* bDst, weight_t slop, bool* wbUsedSlop);
- void setEdgeWeights(weight_t newMinWeight, weight_t newMaxWeight, BasicBlock* bDst);
-
- weight_t getLikelihood() const
- {
- return m_likelihood;
- }
-
- void setLikelihood(weight_t likelihood);
- void addLikelihood(weight_t addedLikelihod);
-
- void clearLikelihood()
- {
- m_likelihood = 0.0;
- m_likelihoodSet = false;
- }
-
- bool hasLikelihood() const
- {
- return m_likelihoodSet;
- }
-
- weight_t getLikelyWeight() const;
-
- unsigned getDupCount() const
- {
- return m_dupCount;
- }
-
- void incrementDupCount()
- {
- m_dupCount++;
- }
-
- void decrementDupCount()
- {
- assert(m_dupCount >= 1);
- m_dupCount--;
- }
-};
-
//------------------------------------------------------------------------
// BasicBlock: describes a basic block in the flowgraph.
//
@@ -727,19 +525,19 @@ struct BasicBlock : private LIR::Range
/* The following union describes the jump target(s) of this block */
union {
- unsigned bbTargetOffs; // PC offset (temporary only)
- FlowEdge* bbTargetEdge; // successor edge for block kinds with only one successor (BBJ_ALWAYS, etc)
- FlowEdge* bbTrueEdge; // BBJ_COND successor edge when its condition is true (alias for bbTargetEdge)
- BBswtDesc* bbSwtTargets; // switch descriptor
- BBehfDesc* bbEhfTargets; // BBJ_EHFINALLYRET descriptor
+ unsigned bbTargetOffs; // PC offset (temporary only)
+ BasicBlock* bbTarget; // basic block
+ BasicBlock* bbTrueTarget; // BBJ_COND jump target when its condition is true (alias for bbTarget)
+ BBswtDesc* bbSwtTargets; // switch descriptor
+ BBehfDesc* bbEhfTargets; // BBJ_EHFINALLYRET descriptor
};
- // Successor edge of a BBJ_COND block if bbTrueEdge is not taken
- FlowEdge* bbFalseEdge;
+ // Points to the successor of a BBJ_COND block if bbTrueTarget is not taken
+ BasicBlock* bbFalseTarget;
public:
static BasicBlock* New(Compiler* compiler);
- static BasicBlock* New(Compiler* compiler, BBKinds kind);
+ static BasicBlock* New(Compiler* compiler, BBKinds kind, BasicBlock* target = nullptr);
static BasicBlock* New(Compiler* compiler, BBehfDesc* ehfTargets);
static BasicBlock* New(Compiler* compiler, BBswtDesc* swtTargets);
static BasicBlock* New(Compiler* compiler, BBKinds kind, unsigned targetOffs);
@@ -825,145 +623,100 @@ struct BasicBlock : private LIR::Range
return bbTargetOffs;
}
- bool HasTarget() const
+ void SetKindAndTarget(BBKinds kind, unsigned targetOffs)
{
- // These block types should always have bbTargetEdge set
- return KindIs(BBJ_ALWAYS, BBJ_CALLFINALLY, BBJ_CALLFINALLYRET, BBJ_EHCATCHRET, BBJ_EHFILTERRET, BBJ_LEAVE);
+ bbKind = kind;
+ bbTargetOffs = targetOffs;
+ assert(KindIs(BBJ_ALWAYS, BBJ_COND, BBJ_LEAVE));
}
- BasicBlock* GetTarget() const
+ bool HasTarget() const
{
- return GetTargetEdge()->getDestinationBlock();
+ // These block types should always have bbTarget set
+ return KindIs(BBJ_ALWAYS, BBJ_CALLFINALLY, BBJ_CALLFINALLYRET, BBJ_EHCATCHRET, BBJ_EHFILTERRET, BBJ_LEAVE);
}
- FlowEdge* GetTargetEdge() const
+ BasicBlock* GetTarget() const
{
- // Only block kinds that use `bbTargetEdge` can access it, and it must be non-null.
+ // Only block kinds that use `bbTarget` can access it, and it must be non-null.
assert(HasInitializedTarget());
- assert(bbTargetEdge->getSourceBlock() == this);
- assert(bbTargetEdge->getDestinationBlock() != nullptr);
- return bbTargetEdge;
+ return bbTarget;
}
- void SetTargetEdge(FlowEdge* targetEdge)
+ void SetTarget(BasicBlock* target)
{
// SetKindAndTarget() nulls target for non-jump kinds,
- // so don't use SetTargetEdge() to null bbTargetEdge without updating bbKind.
- bbTargetEdge = targetEdge;
+ // so don't use SetTarget() to null bbTarget without updating bbKind.
+ bbTarget = target;
assert(HasInitializedTarget());
- assert(bbTargetEdge->getSourceBlock() == this);
- assert(bbTargetEdge->getDestinationBlock() != nullptr);
-
- // This is the only successor edge for this block, so likelihood should be 1.0
- bbTargetEdge->setLikelihood(1.0);
}
BasicBlock* GetTrueTarget() const
- {
- return GetTrueEdge()->getDestinationBlock();
- }
-
- FlowEdge* GetTrueEdge() const
{
assert(KindIs(BBJ_COND));
- assert(bbTrueEdge != nullptr);
- assert(bbTrueEdge->getSourceBlock() == this);
- assert(bbTrueEdge->getDestinationBlock() != nullptr);
- return bbTrueEdge;
+ assert(bbTrueTarget != nullptr);
+ return bbTrueTarget;
}
- void SetTrueEdge(FlowEdge* trueEdge)
+ void SetTrueTarget(BasicBlock* target)
{
assert(KindIs(BBJ_COND));
- bbTrueEdge = trueEdge;
- assert(bbTrueEdge != nullptr);
- assert(bbTrueEdge->getSourceBlock() == this);
- assert(bbTrueEdge->getDestinationBlock() != nullptr);
+ assert(target != nullptr);
+ bbTrueTarget = target;
}
bool TrueTargetIs(const BasicBlock* target) const
{
- return (GetTrueTarget() == target);
- }
-
- bool TrueEdgeIs(const FlowEdge* targetEdge) const
- {
- return (GetTrueEdge() == targetEdge);
+ assert(KindIs(BBJ_COND));
+ assert(bbTrueTarget != nullptr);
+ return (bbTrueTarget == target);
}
BasicBlock* GetFalseTarget() const
- {
- return GetFalseEdge()->getDestinationBlock();
- }
-
- FlowEdge* GetFalseEdge() const
{
assert(KindIs(BBJ_COND));
- assert(bbFalseEdge != nullptr);
- assert(bbFalseEdge->getSourceBlock() == this);
- assert(bbFalseEdge->getDestinationBlock() != nullptr);
- return bbFalseEdge;
+ assert(bbFalseTarget != nullptr);
+ return bbFalseTarget;
}
- void SetFalseEdge(FlowEdge* falseEdge)
+ void SetFalseTarget(BasicBlock* target)
{
assert(KindIs(BBJ_COND));
- bbFalseEdge = falseEdge;
- assert(bbFalseEdge != nullptr);
- assert(bbFalseEdge->getSourceBlock() == this);
- assert(bbFalseEdge->getDestinationBlock() != nullptr);
+ assert(target != nullptr);
+ bbFalseTarget = target;
}
bool FalseTargetIs(const BasicBlock* target) const
{
- return (GetFalseTarget() == target);
- }
-
- bool FalseEdgeIs(const FlowEdge* targetEdge) const
- {
- return (GetFalseEdge() == targetEdge);
- }
-
- void SetCond(FlowEdge* trueEdge, FlowEdge* falseEdge)
- {
- bbKind = BBJ_COND;
- SetTrueEdge(trueEdge);
- SetFalseEdge(falseEdge);
+ assert(KindIs(BBJ_COND));
+ assert(bbFalseTarget != nullptr);
+ return (bbFalseTarget == target);
}
- // In most cases, a block's true and false targets are known by the time SetCond is called.
- // To simplify the few cases where the false target isn't available until later,
- // overload SetCond to initialize only the true target.
- // This simplifies, for example, lowering switch blocks into jump sequences.
- void SetCond(FlowEdge* trueEdge)
+ void SetCond(BasicBlock* trueTarget, BasicBlock* falseTarget)
{
- bbKind = BBJ_COND;
- SetTrueEdge(trueEdge);
+ assert(trueTarget != nullptr);
+ bbKind = BBJ_COND;
+ bbTrueTarget = trueTarget;
+ bbFalseTarget = falseTarget;
}
- // Set both the block kind and target edge.
- void SetKindAndTargetEdge(BBKinds kind, FlowEdge* targetEdge)
+ // Set both the block kind and target. This can clear `bbTarget` when setting
+ // block kinds that don't use `bbTarget`.
+ void SetKindAndTarget(BBKinds kind, BasicBlock* target = nullptr)
{
- bbKind = kind;
- bbTargetEdge = targetEdge;
- assert(HasInitializedTarget());
-
- // This is the only successor edge for this block, so likelihood should be 1.0
- bbTargetEdge->setLikelihood(1.0);
- }
+ bbKind = kind;
+ bbTarget = target;
- // Set the block kind, and clear bbTargetEdge.
- void SetKindAndTargetEdge(BBKinds kind)
- {
- bbKind = kind;
- bbTargetEdge = nullptr;
- assert(!HasTarget());
+ // If bbKind indicates this block has a jump, bbTarget cannot be null.
+ // You shouldn't use this to set a BBJ_COND, BBJ_SWITCH, or BBJ_EHFINALLYRET.
+ assert(HasTarget() ? HasInitializedTarget() : (bbTarget == nullptr));
}
bool HasInitializedTarget() const
{
assert(HasTarget());
- return (bbTargetEdge != nullptr);
+ return (bbTarget != nullptr);
}
bool TargetIs(const BasicBlock* target) const
@@ -1009,13 +762,19 @@ struct BasicBlock : private LIR::Range
bbEhfTargets = ehfTarget;
}
- // BBJ_CALLFINALLYRET uses the `bbTargetEdge` field. However, also treat it specially:
+ // BBJ_CALLFINALLYRET uses the `bbTarget` field. However, also treat it specially:
// for callers that know they want a continuation, use this function instead of the
// general `GetTarget()` to allow asserting on the block kind.
BasicBlock* GetFinallyContinuation() const
{
assert(KindIs(BBJ_CALLFINALLYRET));
- return GetTarget();
+ return bbTarget;
+ }
+
+ void SetFinallyContinuation(BasicBlock* finallyContinuation)
+ {
+ assert(KindIs(BBJ_CALLFINALLYRET));
+ bbTarget = finallyContinuation;
}
#ifdef DEBUG
@@ -1024,42 +783,21 @@ struct BasicBlock : private LIR::Range
BasicBlock* GetTargetRaw() const
{
assert(HasTarget());
- return (bbTargetEdge == nullptr) ? nullptr : bbTargetEdge->getDestinationBlock();
+ return bbTarget;
}
// Return the BBJ_COND true target; it might be null. Only used during dumping.
BasicBlock* GetTrueTargetRaw() const
{
assert(KindIs(BBJ_COND));
- return (bbTrueEdge == nullptr) ? nullptr : bbTrueEdge->getDestinationBlock();
+ return bbTrueTarget;
}
// Return the BBJ_COND false target; it might be null. Only used during dumping.
BasicBlock* GetFalseTargetRaw() const
{
assert(KindIs(BBJ_COND));
- return (bbFalseEdge == nullptr) ? nullptr : bbFalseEdge->getDestinationBlock();
- }
-
- // Return the target edge; it might be null. Only used during dumping.
- FlowEdge* GetTargetEdgeRaw() const
- {
- assert(HasTarget());
- return bbTargetEdge;
- }
-
- // Return the BBJ_COND true target edge; it might be null. Only used during dumping.
- FlowEdge* GetTrueEdgeRaw() const
- {
- assert(KindIs(BBJ_COND));
- return bbTrueEdge;
- }
-
- // Return the BBJ_COND false target edge; it might be null. Only used during dumping.
- FlowEdge* GetFalseEdgeRaw() const
- {
- assert(KindIs(BBJ_COND));
- return bbFalseEdge;
+ return bbFalseTarget;
}
#endif // DEBUG
@@ -1349,11 +1087,7 @@ struct BasicBlock : private LIR::Range
unsigned NumSucc() const;
unsigned NumSucc(Compiler* comp);
- // GetSuccEdge: Returns the "i"th successor edge. Requires (0 <= i < NumSucc()).
- FlowEdge* GetSuccEdge(unsigned i) const;
- FlowEdge* GetSuccEdge(unsigned i, Compiler* comp);
-
- // GetSucc: Returns the "i"th successor block. Requires (0 <= i < NumSucc()).
+ // GetSucc: Returns the "i"th successor. Requires (0 <= i < NumSucc()).
BasicBlock* GetSucc(unsigned i) const;
BasicBlock* GetSucc(unsigned i, Compiler* comp);
@@ -1556,18 +1290,9 @@ struct BasicBlock : private LIR::Range
// PredBlocks: convenience method for enabling range-based `for` iteration over predecessor blocks, e.g.:
// for (BasicBlock* const predBlock : block->PredBlocks()) ...
//
- PredBlockList PredBlocks() const
- {
- return PredBlockList(bbPreds);
- }
-
- // PredBlocksEditing: convenience method for enabling range-based `for` iteration over predecessor blocks, e.g.:
- // for (BasicBlock* const predBlock : block->PredBlocksEditing()) ...
- // This iterator tolerates modifications to bbPreds.
- //
- PredBlockList PredBlocksEditing() const
+ PredBlockList PredBlocks() const
{
- return PredBlockList(bbPreds);
+ return PredBlockList(bbPreds);
}
// Pred list maintenance
@@ -1841,64 +1566,37 @@ struct BasicBlock : private LIR::Range
bool HasPotentialEHSuccs(Compiler* comp);
- // Base class for Successor block/edge iterators.
+ // BBSuccList: adapter class for forward iteration of block successors, using range-based `for`,
+ // normally used via BasicBlock::Succs(), e.g.:
+ // for (BasicBlock* const target : block->Succs()) ...
//
- class SuccList
+ class BBSuccList
{
- protected:
// For one or two successors, pre-compute and stash the successors inline, in m_succs[], so we don't
// need to call a function or execute another `switch` to get them. Also, pre-compute the begin and end
// points of the iteration, for use by BBArrayIterator. `m_begin` and `m_end` will either point at
// `m_succs` or at the switch table successor array.
- FlowEdge* m_succs[2];
- FlowEdge* const* m_begin;
- FlowEdge* const* m_end;
-
- SuccList(const BasicBlock* block);
- };
+ BasicBlock* m_succs[2];
+
+ // Quirk: Some BasicBlock kinds refer to their successors with BasicBlock pointers,
+ // while others use FlowEdge pointers. Eventually, every type will use FlowEdge pointers.
+ // For now, support iterating with both types.
+ union {
+ BasicBlock* const* m_begin;
+ FlowEdge* const* m_beginEdge;
+ };
- // BBSuccList: adapter class for forward iteration of block successors, using range-based `for`,
- // normally used via BasicBlock::Succs(), e.g.:
- // for (BasicBlock* const target : block->Succs()) ...
- //
- class BBSuccList : private SuccList
- {
- public:
- BBSuccList(const BasicBlock* block) : SuccList(block)
- {
- }
+ union {
+ BasicBlock* const* m_end;
+ FlowEdge* const* m_endEdge;
+ };
- BBArrayIterator begin() const
- {
- return BBArrayIterator(m_begin);
- }
+ bool iterateEdges;
- BBArrayIterator end() const
- {
- return BBArrayIterator(m_end);
- }
- };
-
- // BBSuccEdgeList: adapter class for forward iteration of block successors edges, using range-based `for`,
- // normally used via BasicBlock::SuccEdges(), e.g.:
- // for (FlowEdge* const succEdge : block->SuccEdges()) ...
- //
- class BBSuccEdgeList : private SuccList
- {
public:
- BBSuccEdgeList(const BasicBlock* block) : SuccList(block)
- {
- }
-
- FlowEdgeArrayIterator begin() const
- {
- return FlowEdgeArrayIterator(m_begin);
- }
-
- FlowEdgeArrayIterator end() const
- {
- return FlowEdgeArrayIterator(m_end);
- }
+ BBSuccList(const BasicBlock* block);
+ BBArrayIterator begin() const;
+ BBArrayIterator end() const;
};
// BBCompilerSuccList: adapter class for forward iteration of block successors, using range-based `for`,
@@ -1912,7 +1610,7 @@ struct BasicBlock : private LIR::Range
Compiler* m_comp;
BasicBlock* m_block;
- // iterator: forward iterator for an array of BasicBlock*
+ // iterator: forward iterator for an array of BasicBlock*, such as the BBswtDesc->bbsDstTab.
//
class iterator
{
@@ -1962,67 +1660,6 @@ struct BasicBlock : private LIR::Range
}
};
- // BBCompilerSuccEdgeList: adapter class for forward iteration of block successors edges, using range-based `for`,
- // normally used via BasicBlock::SuccEdges(), e.g.:
- // for (FlowEdge* const succEdge : block->SuccEdges(compiler)) ...
- //
- // This version uses NumSucc(Compiler*)/GetSucc(Compiler*). See the documentation there for the explanation
- // of the implications of this versus the version that does not take `Compiler*`.
- class BBCompilerSuccEdgeList
- {
- Compiler* m_comp;
- BasicBlock* m_block;
-
- // iterator: forward iterator for an array of BasicBlock*
- //
- class iterator
- {
- Compiler* m_comp;
- BasicBlock* m_block;
- unsigned m_succNum;
-
- public:
- iterator(Compiler* comp, BasicBlock* block, unsigned succNum)
- : m_comp(comp), m_block(block), m_succNum(succNum)
- {
- }
-
- FlowEdge* operator*() const
- {
- assert(m_block != nullptr);
- FlowEdge* succEdge = m_block->GetSuccEdge(m_succNum, m_comp);
- assert(succEdge != nullptr);
- return succEdge;
- }
-
- iterator& operator++()
- {
- ++m_succNum;
- return *this;
- }
-
- bool operator!=(const iterator& i) const
- {
- return m_succNum != i.m_succNum;
- }
- };
-
- public:
- BBCompilerSuccEdgeList(Compiler* comp, BasicBlock* block) : m_comp(comp), m_block(block)
- {
- }
-
- iterator begin() const
- {
- return iterator(m_comp, m_block, 0);
- }
-
- iterator end() const
- {
- return iterator(m_comp, m_block, m_block->NumSucc(m_comp));
- }
- };
-
// Succs: convenience methods for enabling range-based `for` iteration over a block's successors, e.g.:
// for (BasicBlock* const succ : block->Succs()) ...
//
@@ -2039,16 +1676,6 @@ struct BasicBlock : private LIR::Range
return BBCompilerSuccList(comp, this);
}
- BBSuccEdgeList SuccEdges()
- {
- return BBSuccEdgeList(this);
- }
-
- BBCompilerSuccEdgeList SuccEdges(Compiler* comp)
- {
- return BBCompilerSuccEdgeList(comp, this);
- }
-
// Clone block state and statements from `from` block to `to` block (which must be new/empty)
static void CloneBlockState(Compiler* compiler, BasicBlock* to, const BasicBlock* from);
@@ -2213,8 +1840,8 @@ class BasicBlockRangeList
//
struct BBswtDesc
{
- FlowEdge** bbsDstTab; // case label table address
- unsigned bbsCount; // count of cases (includes 'default' if bbsHasDefault)
+ BasicBlock** bbsDstTab; // case label table address
+ unsigned bbsCount; // count of cases (includes 'default' if bbsHasDefault)
// Case number and likelihood of most likely case
// (only known with PGO, only valid if bbsHasDominantCase is true)
@@ -2240,7 +1867,7 @@ struct BBswtDesc
bbsCount--;
}
- FlowEdge* getDefault()
+ BasicBlock* getDefault()
{
assert(bbsHasDefault);
assert(bbsCount > 0);
@@ -2300,11 +1927,12 @@ inline BBArrayIterator BBEhfSuccList::end() const
return BBArrayIterator(m_bbeDesc->bbeSuccs + m_bbeDesc->bbeCount);
}
-// SuccList out-of-class-declaration implementations
+// BBSuccList out-of-class-declaration implementations
//
-inline BasicBlock::SuccList::SuccList(const BasicBlock* block)
+inline BasicBlock::BBSuccList::BBSuccList(const BasicBlock* block)
{
assert(block != nullptr);
+ iterateEdges = false;
switch (block->bbKind)
{
@@ -2322,24 +1950,24 @@ inline BasicBlock::SuccList::SuccList(const BasicBlock* block)
case BBJ_EHCATCHRET:
case BBJ_EHFILTERRET:
case BBJ_LEAVE:
- m_succs[0] = block->GetTargetEdge();
+ m_succs[0] = block->bbTarget;
m_begin = &m_succs[0];
m_end = &m_succs[1];
break;
case BBJ_COND:
- m_succs[0] = block->GetFalseEdge();
+ m_succs[0] = block->bbFalseTarget;
m_begin = &m_succs[0];
// If both fall-through and branch successors are identical, then only include
// them once in the iteration (this is the same behavior as NumSucc()/GetSucc()).
- if (block->TrueEdgeIs(block->GetFalseEdge()))
+ if (block->TrueTargetIs(block->GetFalseTarget()))
{
m_end = &m_succs[1];
}
else
{
- m_succs[1] = block->GetTrueEdge();
+ m_succs[1] = block->bbTrueTarget;
m_end = &m_succs[2];
}
break;
@@ -2350,14 +1978,16 @@ inline BasicBlock::SuccList::SuccList(const BasicBlock* block)
// been computed.
if (block->GetEhfTargets() == nullptr)
{
- m_begin = nullptr;
- m_end = nullptr;
+ m_beginEdge = nullptr;
+ m_endEdge = nullptr;
}
else
{
- m_begin = block->GetEhfTargets()->bbeSuccs;
- m_end = block->GetEhfTargets()->bbeSuccs + block->GetEhfTargets()->bbeCount;
+ m_beginEdge = block->GetEhfTargets()->bbeSuccs;
+ m_endEdge = block->GetEhfTargets()->bbeSuccs + block->GetEhfTargets()->bbeCount;
}
+
+ iterateEdges = true;
break;
case BBJ_SWITCH:
@@ -2375,6 +2005,16 @@ inline BasicBlock::SuccList::SuccList(const BasicBlock* block)
assert(m_end >= m_begin);
}
+inline BBArrayIterator BasicBlock::BBSuccList::begin() const
+{
+ return (iterateEdges ? BBArrayIterator(m_beginEdge) : BBArrayIterator(m_begin));
+}
+
+inline BBArrayIterator BasicBlock::BBSuccList::end() const
+{
+ return (iterateEdges ? BBArrayIterator(m_endEdge) : BBArrayIterator(m_end));
+}
+
// We have a simpler struct, BasicBlockList, which is simply a singly-linked
// list of blocks.
@@ -2392,23 +2032,206 @@ struct BasicBlockList
}
};
-// FlowEdge implementations (that are required to be defined after the declaration of BasicBlock)
-
-inline weight_t FlowEdge::getLikelyWeight() const
+//-------------------------------------------------------------------------
+// FlowEdge -- control flow edge
+//
+// In compiler terminology the control flow between two BasicBlocks
+// is typically referred to as an "edge". Most well known are the
+// backward branches for loops, which are often called "back-edges".
+//
+// "struct FlowEdge" is the type that represents our control flow edges.
+// This type is a linked list of zero or more "edges".
+// (The list of zero edges is represented by NULL.)
+// Every BasicBlock has a field called bbPreds of this type. This field
+// represents the list of "edges" that flow into this BasicBlock.
+// The FlowEdge type only stores the BasicBlock* of the source for the
+// control flow edge. The destination block for the control flow edge
+// is implied to be the block which contained the bbPreds field.
+//
+// For a switch branch target there may be multiple "edges" that have
+// the same source block (and destination block). We need to count the
+// number of these edges so that during optimization we will know when
+// we have zero of them. Rather than have extra FlowEdge entries we
+// track this via the DupCount property.
+//
+// When we have Profile weight for the BasicBlocks we can usually compute
+// the number of times each edge was executed by examining the adjacent
+// BasicBlock weights. As we are doing for BasicBlocks, we call the number
+// of times that a control flow edge was executed the "edge weight".
+// In order to compute the edge weights we need to use a bounded range
+// for every edge weight. These two fields, 'flEdgeWeightMin' and 'flEdgeWeightMax'
+// are used to hold a bounded range. Most often these will converge such
+// that both values are the same and that value is the exact edge weight.
+// Sometimes we are left with a rage of possible values between [Min..Max]
+// which represents an inexact edge weight.
+//
+// The bbPreds list is initially created by Compiler::fgLinkBasicBlocks()
+// and is incrementally kept up to date.
+//
+// The edge weight are computed by Compiler::fgComputeEdgeWeights()
+// the edge weights are used to straighten conditional branches
+// by Compiler::fgReorderBlocks()
+//
+struct FlowEdge
{
- assert(m_likelihoodSet);
- return m_likelihood * m_sourceBlock->bbWeight;
-}
+private:
+ // The next predecessor edge in the list, nullptr for end of list.
+ FlowEdge* m_nextPredEdge;
+
+ // The source of the control flow
+ BasicBlock* m_sourceBlock;
+
+ // The destination of the control flow
+ BasicBlock* m_destBlock;
+
+ // Edge weights
+ weight_t m_edgeWeightMin;
+ weight_t m_edgeWeightMax;
+
+ // Likelihood that m_sourceBlock transfers control along this edge.
+ // Values in range [0..1]
+ weight_t m_likelihood;
+
+ // The count of duplicate "edges" (used for switch stmts or degenerate branches)
+ unsigned m_dupCount;
+
+ // True if likelihood has been set
+ bool m_likelihoodSet;
+
+public:
+ FlowEdge(BasicBlock* sourceBlock, BasicBlock* destBlock, FlowEdge* rest)
+ : m_nextPredEdge(rest)
+ , m_sourceBlock(sourceBlock)
+ , m_destBlock(destBlock)
+ , m_edgeWeightMin(0)
+ , m_edgeWeightMax(0)
+ , m_likelihood(0)
+ , m_dupCount(0)
+ , m_likelihoodSet(false)
+ {
+ }
+
+ FlowEdge* getNextPredEdge() const
+ {
+ return m_nextPredEdge;
+ }
+
+ FlowEdge** getNextPredEdgeRef()
+ {
+ return &m_nextPredEdge;
+ }
+
+ void setNextPredEdge(FlowEdge* newEdge)
+ {
+ m_nextPredEdge = newEdge;
+ }
+
+ BasicBlock* getSourceBlock() const
+ {
+ assert(m_sourceBlock != nullptr);
+ return m_sourceBlock;
+ }
+
+ void setSourceBlock(BasicBlock* newBlock)
+ {
+ assert(newBlock != nullptr);
+ m_sourceBlock = newBlock;
+ }
+
+ BasicBlock* getDestinationBlock() const
+ {
+ assert(m_destBlock != nullptr);
+ return m_destBlock;
+ }
+
+ void setDestinationBlock(BasicBlock* newBlock)
+ {
+ assert(newBlock != nullptr);
+ m_destBlock = newBlock;
+ }
+
+ weight_t edgeWeightMin() const
+ {
+ return m_edgeWeightMin;
+ }
+
+ weight_t edgeWeightMax() const
+ {
+ return m_edgeWeightMax;
+ }
+
+ // These two methods are used to set new values for edge weights.
+ // They return false if the newWeight is not between the current [min..max]
+ // when slop is non-zero we allow for the case where our weights might be off by 'slop'
+ //
+ bool setEdgeWeightMinChecked(weight_t newWeight, BasicBlock* bDst, weight_t slop, bool* wbUsedSlop);
+ bool setEdgeWeightMaxChecked(weight_t newWeight, BasicBlock* bDst, weight_t slop, bool* wbUsedSlop);
+ void setEdgeWeights(weight_t newMinWeight, weight_t newMaxWeight, BasicBlock* bDst);
+
+ weight_t getLikelihood() const
+ {
+ return m_likelihood;
+ }
+
+ void setLikelihood(weight_t likelihood)
+ {
+ assert(likelihood >= 0.0);
+ assert(likelihood <= 1.0);
+ m_likelihoodSet = true;
+ m_likelihood = likelihood;
+ }
+
+ void clearLikelihood()
+ {
+ m_likelihood = 0.0;
+ m_likelihoodSet = false;
+ }
+
+ bool hasLikelihood() const
+ {
+ return m_likelihoodSet;
+ }
+
+ weight_t getLikelyWeight() const
+ {
+ assert(m_likelihoodSet);
+ return m_likelihood * m_sourceBlock->bbWeight;
+ }
+
+ unsigned getDupCount() const
+ {
+ return m_dupCount;
+ }
+
+ void incrementDupCount()
+ {
+ m_dupCount++;
+ }
+
+ void decrementDupCount()
+ {
+ assert(m_dupCount >= 1);
+ m_dupCount--;
+ }
+};
// BasicBlock iterator implementations (that are required to be defined after the declaration of FlowEdge)
inline BasicBlock* BBArrayIterator::operator*() const
{
- assert(m_edgeEntry != nullptr);
- FlowEdge* edgeTarget = *m_edgeEntry;
- assert(edgeTarget != nullptr);
- assert(edgeTarget->getDestinationBlock() != nullptr);
- return edgeTarget->getDestinationBlock();
+ if (iterateEdges)
+ {
+ assert(m_edgeEntry != nullptr);
+ FlowEdge* edgeTarget = *m_edgeEntry;
+ assert(edgeTarget != nullptr);
+ assert(edgeTarget->getDestinationBlock() != nullptr);
+ return edgeTarget->getDestinationBlock();
+ }
+
+ assert(m_bbEntry != nullptr);
+ BasicBlock* bTarget = *m_bbEntry;
+ assert(bTarget != nullptr);
+ return bTarget;
}
// Pred list iterator implementations (that are required to be defined after the declaration of BasicBlock and FlowEdge)
@@ -2434,45 +2257,29 @@ inline PredEdgeList::iterator& PredEdgeList::iterator::operator++()
return *this;
}
-template
-inline PredBlockList::iterator::iterator(FlowEdge* pred) : m_pred(pred)
+inline PredBlockList::iterator::iterator(FlowEdge* pred) : m_pred(pred)
{
- bool initNextPointer = allowEdits;
- INDEBUG(initNextPointer = true);
- if (initNextPointer)
- {
- m_next = (m_pred == nullptr) ? nullptr : m_pred->getNextPredEdge();
- }
+#ifdef DEBUG
+ m_next = (m_pred == nullptr) ? nullptr : m_pred->getNextPredEdge();
+#endif
}
-template
-inline BasicBlock* PredBlockList::iterator::operator*() const
+inline BasicBlock* PredBlockList::iterator::operator*() const
{
return m_pred->getSourceBlock();
}
-template
-inline typename PredBlockList::iterator& PredBlockList::iterator::operator++()
+inline PredBlockList::iterator& PredBlockList::iterator::operator++()
{
- if (allowEdits)
- {
- // For editing iterators, m_next is always used and maintained
- m_pred = m_next;
- m_next = (m_next == nullptr) ? nullptr : m_next->getNextPredEdge();
- }
- else
- {
- FlowEdge* next = m_pred->getNextPredEdge();
+ FlowEdge* next = m_pred->getNextPredEdge();
#ifdef DEBUG
- // If allowEdits=false, check that the next block is the one we expect to see.
- assert(next == m_next);
- m_next = (m_next == nullptr) ? nullptr : m_next->getNextPredEdge();
+ // Check that the next block is the one we expect to see.
+ assert(next == m_next);
+ m_next = (next == nullptr) ? nullptr : next->getNextPredEdge();
#endif // DEBUG
- m_pred = next;
- }
-
+ m_pred = next;
return *this;
}
diff --git a/src/coreclr/jit/clrjit.natvis b/src/coreclr/jit/clrjit.natvis
index cfbc6a181e9743..95dd3dc305689b 100644
--- a/src/coreclr/jit/clrjit.natvis
+++ b/src/coreclr/jit/clrjit.natvis
@@ -8,8 +8,9 @@ The .NET Foundation licenses this file to you under the MIT license.
@@ -20,17 +21,12 @@ Documentation for VS debugger format specifiers: https://learn.microsoft.com/en-
- BB{bbNum,d}->BB{bbTargetEdge->m_destBlock->bbNum,d}; {bbKind,en}
+ BB{bbNum,d}->BB{bbTarget->bbNum,d}; {bbKind,en}
BB{bbNum,d}; {bbKind,en}; {bbSwtTargets->bbsCount} cases
BB{bbNum,d}; {bbKind,en}; {bbEhfTargets->bbeCount} succs
BB{bbNum,d}; {bbKind,en}
-
- BB{m_sourceBlock->bbNum,d}->BB{m_destBlock->bbNum,d} ({m_likelihood,g}) (dup {m_dupCount,d})
- BB{m_sourceBlock->bbNum,d}->BB{m_destBlock->bbNum,d} ({m_likelihood,g})
-
-
REMOVED
[BB{lpTop->bbNum,d}..BB{lpBottom->bbNum,d}] pre-h:BB{lpHead->bbNum,d} e:BB{lpEntry->bbNum,d} {lpFlags,en}
@@ -90,11 +86,6 @@ Documentation for VS debugger format specifiers: https://learn.microsoft.com/en-
{gtTreeID, d}: [{gtOper,en}, {gtType,en} V{((GenTreeLclFld*)this)->_gtLclNum,u}[+{((GenTreeLclFld*)this)->m_lclOffs,u}]]
-
-
- [{Oper,en}, {Type,en}]
-
-
LinearScan
@@ -178,7 +169,6 @@ Documentation for VS debugger format specifiers: https://learn.microsoft.com/en-
- [U{this->relatedInterval->varNum,d}, #{this->intervalIndex, d}, reg={(regNumber)physReg, en}]
[V{this->varNum,d}, #{this->intervalIndex, d}, reg={(regNumber)physReg, en}]
[C{this->intervalIndex, d}, reg={(regNumber)physReg, en}]
[I{this->intervalIndex, d}, reg={(regNumber)physReg, en}]
diff --git a/src/coreclr/jit/codegen.h b/src/coreclr/jit/codegen.h
index 7a2359c9fb5fc9..afd9e42a9d2cdd 100644
--- a/src/coreclr/jit/codegen.h
+++ b/src/coreclr/jit/codegen.h
@@ -1182,10 +1182,10 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
void genCodeForCpObj(GenTreeBlk* cpObjNode);
void genCodeForCpBlkRepMovs(GenTreeBlk* cpBlkNode);
void genCodeForCpBlkUnroll(GenTreeBlk* cpBlkNode);
+#ifndef TARGET_X86
+ void genCodeForCpBlkHelper(GenTreeBlk* cpBlkNode);
+#endif
void genCodeForPhysReg(GenTreePhysReg* tree);
-#ifdef SWIFT_SUPPORT
- void genCodeForSwiftErrorReg(GenTree* tree);
-#endif // SWIFT_SUPPORT
void genCodeForNullCheck(GenTreeIndir* tree);
void genCodeForCmpXchg(GenTreeCmpXchg* tree);
void genCodeForReuseVal(GenTree* treeNode);
@@ -1257,10 +1257,12 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
#endif // FEATURE_PUT_STRUCT_ARG_STK
void genCodeForStoreBlk(GenTreeBlk* storeBlkNode);
+#ifndef TARGET_X86
+ void genCodeForInitBlkHelper(GenTreeBlk* initBlkNode);
+#endif
void genCodeForInitBlkLoop(GenTreeBlk* initBlkNode);
void genCodeForInitBlkRepStos(GenTreeBlk* initBlkNode);
void genCodeForInitBlkUnroll(GenTreeBlk* initBlkNode);
- unsigned genEmitJumpTable(GenTree* treeNode, bool relativeAddr);
void genJumpTable(GenTree* tree);
void genTableBasedSwitch(GenTree* tree);
#if defined(TARGET_LOONGARCH64) || defined(TARGET_RISCV64)
diff --git a/src/coreclr/jit/codegenarm.cpp b/src/coreclr/jit/codegenarm.cpp
index 8cf3ac32b3a329..4a8c08a89858e8 100644
--- a/src/coreclr/jit/codegenarm.cpp
+++ b/src/coreclr/jit/codegenarm.cpp
@@ -647,7 +647,29 @@ void CodeGen::genTableBasedSwitch(GenTree* treeNode)
//
void CodeGen::genJumpTable(GenTree* treeNode)
{
- unsigned jmpTabBase = genEmitJumpTable(treeNode, false);
+ noway_assert(compiler->compCurBB->KindIs(BBJ_SWITCH));
+ assert(treeNode->OperGet() == GT_JMPTABLE);
+
+ unsigned jumpCount = compiler->compCurBB->GetSwitchTargets()->bbsCount;
+ BasicBlock** jumpTable = compiler->compCurBB->GetSwitchTargets()->bbsDstTab;
+ unsigned jmpTabBase;
+
+ jmpTabBase = GetEmitter()->emitBBTableDataGenBeg(jumpCount, false);
+
+ JITDUMP("\n J_M%03u_DS%02u LABEL DWORD\n", compiler->compMethodID, jmpTabBase);
+
+ for (unsigned i = 0; i < jumpCount; i++)
+ {
+ BasicBlock* target = *jumpTable++;
+ noway_assert(target->HasFlag(BBF_HAS_LABEL));
+
+ JITDUMP(" DD L_M%03u_" FMT_BB "\n", compiler->compMethodID, target->bbNum);
+
+ GetEmitter()->emitDataGenData(i, target);
+ }
+
+ GetEmitter()->emitDataGenEnd();
+
genMov32RelocatableDataLabel(jmpTabBase, treeNode->GetRegNum());
genProduceReg(treeNode);
diff --git a/src/coreclr/jit/codegenarm64.cpp b/src/coreclr/jit/codegenarm64.cpp
index 81370e6413835f..3883b20118ad8a 100644
--- a/src/coreclr/jit/codegenarm64.cpp
+++ b/src/coreclr/jit/codegenarm64.cpp
@@ -3626,7 +3626,7 @@ void CodeGen::genCodeForCpObj(GenTreeBlk* cpObjNode)
unsigned slots = layout->GetSlotCount();
// Temp register(s) used to perform the sequence of loads and stores.
- regNumber tmpReg = cpObjNode->ExtractTempReg(RBM_ALLINT);
+ regNumber tmpReg = cpObjNode->ExtractTempReg();
regNumber tmpReg2 = REG_NA;
assert(genIsValidIntReg(tmpReg));
@@ -3635,7 +3635,7 @@ void CodeGen::genCodeForCpObj(GenTreeBlk* cpObjNode)
if (slots > 1)
{
- tmpReg2 = cpObjNode->ExtractTempReg(RBM_ALLINT);
+ tmpReg2 = cpObjNode->GetSingleTempReg();
assert(tmpReg2 != tmpReg);
assert(genIsValidIntReg(tmpReg2));
assert(tmpReg2 != REG_WRITE_BARRIER_DST_BYREF);
@@ -3682,60 +3682,26 @@ void CodeGen::genCodeForCpObj(GenTreeBlk* cpObjNode)
{
unsigned gcPtrCount = cpObjNode->GetLayout()->GetGCPtrCount();
- // We might also need SIMD regs if we have 4 or more continuous non-gc slots
- // On ARM64, SIMD loads/stores provide 8-byte atomicity guarantees when aligned to 8 bytes.
- regNumber tmpSimdReg1 = REG_NA;
- regNumber tmpSimdReg2 = REG_NA;
- if ((slots >= 4) && compiler->IsBaselineSimdIsaSupported())
- {
- tmpSimdReg1 = cpObjNode->ExtractTempReg(RBM_ALLFLOAT);
- tmpSimdReg2 = cpObjNode->ExtractTempReg(RBM_ALLFLOAT);
- }
-
unsigned i = 0;
while (i < slots)
{
if (!layout->IsGCPtr(i))
{
- // How many continuous non-gc slots do we have?
- unsigned nonGcSlots = 0;
- do
+ // Check if the next slot's type is also TYP_GC_NONE and use ldp/stp
+ if ((i + 1 < slots) && !layout->IsGCPtr(i + 1))
{
- nonGcSlots++;
- i++;
- } while ((i < slots) && !layout->IsGCPtr(i));
-
- const regNumber srcReg = REG_WRITE_BARRIER_SRC_BYREF;
- const regNumber dstReg = REG_WRITE_BARRIER_DST_BYREF;
- while (nonGcSlots > 0)
+ emit->emitIns_R_R_R_I(INS_ldp, EA_8BYTE, tmpReg, tmpReg2, REG_WRITE_BARRIER_SRC_BYREF,
+ 2 * TARGET_POINTER_SIZE, INS_OPTS_POST_INDEX);
+ emit->emitIns_R_R_R_I(INS_stp, EA_8BYTE, tmpReg, tmpReg2, REG_WRITE_BARRIER_DST_BYREF,
+ 2 * TARGET_POINTER_SIZE, INS_OPTS_POST_INDEX);
+ ++i; // extra increment of i, since we are copying two items
+ }
+ else
{
- regNumber tmp1 = tmpReg;
- regNumber tmp2 = tmpReg2;
- emitAttr size = EA_8BYTE;
- insOpts opts = INS_OPTS_POST_INDEX;
-
- // Copy at least two slots at a time
- if (nonGcSlots >= 2)
- {
- // Do 4 slots at a time if SIMD is supported
- if ((nonGcSlots >= 4) && compiler->IsBaselineSimdIsaSupported())
- {
- // We need SIMD temp regs now
- tmp1 = tmpSimdReg1;
- tmp2 = tmpSimdReg2;
- size = EA_16BYTE;
- nonGcSlots -= 2;
- }
- nonGcSlots -= 2;
- emit->emitIns_R_R_R_I(INS_ldp, size, tmp1, tmp2, srcReg, EA_SIZE(size) * 2, opts);
- emit->emitIns_R_R_R_I(INS_stp, size, tmp1, tmp2, dstReg, EA_SIZE(size) * 2, opts);
- }
- else
- {
- nonGcSlots--;
- emit->emitIns_R_R_I(INS_ldr, EA_8BYTE, tmp1, srcReg, EA_SIZE(size), opts);
- emit->emitIns_R_R_I(INS_str, EA_8BYTE, tmp1, dstReg, EA_SIZE(size), opts);
- }
+ emit->emitIns_R_R_I(INS_ldr, EA_8BYTE, tmpReg, REG_WRITE_BARRIER_SRC_BYREF, TARGET_POINTER_SIZE,
+ INS_OPTS_POST_INDEX);
+ emit->emitIns_R_R_I(INS_str, EA_8BYTE, tmpReg, REG_WRITE_BARRIER_DST_BYREF, TARGET_POINTER_SIZE,
+ INS_OPTS_POST_INDEX);
}
}
else
@@ -3743,8 +3709,8 @@ void CodeGen::genCodeForCpObj(GenTreeBlk* cpObjNode)
// In the case of a GC-Pointer we'll call the ByRef write barrier helper
genEmitHelperCall(CORINFO_HELP_ASSIGN_BYREF, 0, EA_PTRSIZE);
gcPtrCount--;
- i++;
}
+ ++i;
}
assert(gcPtrCount == 0);
}
@@ -3784,7 +3750,32 @@ void CodeGen::genTableBasedSwitch(GenTree* treeNode)
// emits the table and an instruction to get the address of the first element
void CodeGen::genJumpTable(GenTree* treeNode)
{
- unsigned jmpTabBase = genEmitJumpTable(treeNode, true);
+ noway_assert(compiler->compCurBB->KindIs(BBJ_SWITCH));
+ assert(treeNode->OperGet() == GT_JMPTABLE);
+
+ unsigned jumpCount = compiler->compCurBB->GetSwitchTargets()->bbsCount;
+ BasicBlock** jumpTable = compiler->compCurBB->GetSwitchTargets()->bbsDstTab;
+ unsigned jmpTabOffs;
+ unsigned jmpTabBase;
+
+ jmpTabBase = GetEmitter()->emitBBTableDataGenBeg(jumpCount, true);
+
+ jmpTabOffs = 0;
+
+ JITDUMP("\n J_M%03u_DS%02u LABEL DWORD\n", compiler->compMethodID, jmpTabBase);
+
+ for (unsigned i = 0; i < jumpCount; i++)
+ {
+ BasicBlock* target = *jumpTable++;
+ noway_assert(target->HasFlag(BBF_HAS_LABEL));
+
+ JITDUMP(" DD L_M%03u_" FMT_BB "\n", compiler->compMethodID, target->bbNum);
+
+ GetEmitter()->emitDataGenData(i, target);
+ };
+
+ GetEmitter()->emitDataGenEnd();
+
// Access to inline data is 'abstracted' by a special type of static member
// (produced by eeFindJitDataOffs) which the emitter recognizes as being a reference
// to constant data, not a real static field.
diff --git a/src/coreclr/jit/codegenarm64test.cpp b/src/coreclr/jit/codegenarm64test.cpp
index 3c02c968fe919d..47cc0fb6bfd194 100644
--- a/src/coreclr/jit/codegenarm64test.cpp
+++ b/src/coreclr/jit/codegenarm64test.cpp
@@ -4581,14 +4581,6 @@ void CodeGen::genArm64EmitterUnitTestsSve()
theEmitter->emitIns_R_R_R(INS_sve_subr, EA_SCALABLE, REG_V2, REG_P0, REG_V13,
INS_OPTS_SCALABLE_S); // SUBR ., /M, ., .
-#ifdef ALL_ARM64_EMITTER_UNIT_TESTS_SVE_UNSUPPORTED
- // IF_SVE_AB_3B
- theEmitter->emitIns_R_R_R(INS_sve_addpt, EA_SCALABLE, REG_V0, REG_P1, REG_V2,
- INS_OPTS_SCALABLE_D); // ADDPT .D, /M, .D, .D
- theEmitter->emitIns_R_R_R(INS_sve_subpt, EA_SCALABLE, REG_V0, REG_P1, REG_V2,
- INS_OPTS_SCALABLE_D); // SUBPT .D, /M, .D, .D
-#endif // ALL_ARM64_EMITTER_UNIT_TESTS_SVE_UNSUPPORTED
-
// IF_SVE_AC_3A
theEmitter->emitIns_R_R_R(INS_sve_sdiv, EA_SCALABLE, REG_V3, REG_P2, REG_V9,
INS_OPTS_SCALABLE_S); // SDIV ., /M, ., .
@@ -4733,54 +4725,6 @@ void CodeGen::genArm64EmitterUnitTestsSve()
theEmitter->emitIns_R_R_R(INS_sve_lsr, EA_SCALABLE, REG_V0, REG_P0, REG_V0, INS_OPTS_SCALABLE_S,
INS_SCALABLE_OPTS_WIDE); // LSR ., /M, ., .D
- // IF_SVE_CE_2A
- theEmitter->emitIns_R_R(INS_sve_pmov, EA_SCALABLE, REG_P2, REG_V12, INS_OPTS_SCALABLE_B,
- INS_SCALABLE_OPTS_TO_PREDICATE); // PMOV .B,
- theEmitter->emitIns_R_R(INS_sve_pmov, EA_SCALABLE, REG_P7, REG_V2, INS_OPTS_SCALABLE_H,
- INS_SCALABLE_OPTS_TO_PREDICATE); // PMOV .H, [0]
-
- // IF_SVE_CE_2B
- theEmitter->emitIns_R_R_I(INS_sve_pmov, EA_SCALABLE, REG_P15, REG_V7, 7, INS_OPTS_SCALABLE_D,
- INS_SCALABLE_OPTS_TO_PREDICATE); // PMOV .D, []
- theEmitter->emitIns_R_R_I(INS_sve_pmov, EA_SCALABLE, REG_P7, REG_V16, 0, INS_OPTS_SCALABLE_D,
- INS_SCALABLE_OPTS_TO_PREDICATE); // PMOV .D, []
-
- // IF_SVE_CE_2C
- theEmitter->emitIns_R_R_I(INS_sve_pmov, EA_SCALABLE, REG_P0, REG_V31, 1, INS_OPTS_SCALABLE_H,
- INS_SCALABLE_OPTS_TO_PREDICATE); // PMOV .H, []
- theEmitter->emitIns_R_R_I(INS_sve_pmov, EA_SCALABLE, REG_V1, REG_P1, 0, INS_OPTS_SCALABLE_H,
- INS_SCALABLE_OPTS_TO_PREDICATE); // PMOV .H, []
-
- // IF_SVE_CE_2D
- theEmitter->emitIns_R_R_I(INS_sve_pmov, EA_SCALABLE, REG_P3, REG_V9, 3, INS_OPTS_SCALABLE_S,
- INS_SCALABLE_OPTS_TO_PREDICATE); // PMOV .S, []
- theEmitter->emitIns_R_R_I(INS_sve_pmov, EA_SCALABLE, REG_P10, REG_V4, 0, INS_OPTS_SCALABLE_S,
- INS_SCALABLE_OPTS_TO_PREDICATE); // PMOV .S, []
-
- // IF_SVE_CF_2A
- theEmitter->emitIns_R_R(INS_sve_pmov, EA_SCALABLE, REG_V11, REG_P12, INS_OPTS_SCALABLE_B,
- INS_SCALABLE_OPTS_TO_VECTOR); // PMOV , .B
- theEmitter->emitIns_R_R(INS_sve_pmov, EA_SCALABLE, REG_V2, REG_P7, INS_OPTS_SCALABLE_S,
- INS_SCALABLE_OPTS_TO_VECTOR); // PMOV [0], .S
-
- // IF_SVE_CF_2B
- theEmitter->emitIns_R_R_I(INS_sve_pmov, EA_SCALABLE, REG_V6, REG_P8, 7, INS_OPTS_SCALABLE_D,
- INS_SCALABLE_OPTS_TO_VECTOR); // PMOV [], .D
- theEmitter->emitIns_R_R_I(INS_sve_pmov, EA_SCALABLE, REG_V9, REG_P7, 0, INS_OPTS_SCALABLE_D,
- INS_SCALABLE_OPTS_TO_VECTOR); // PMOV [], .D
-
- // IF_SVE_CF_2C
- theEmitter->emitIns_R_R_I(INS_sve_pmov, EA_SCALABLE, REG_V8, REG_P4, 1, INS_OPTS_SCALABLE_H,
- INS_SCALABLE_OPTS_TO_VECTOR); // PMOV [], .H
- theEmitter->emitIns_R_R_I(INS_sve_pmov, EA_SCALABLE, REG_V5, REG_P9, 0, INS_OPTS_SCALABLE_H,
- INS_SCALABLE_OPTS_TO_VECTOR); // PMOV [], .H
-
- // IF_SVE_CF_2D
- theEmitter->emitIns_R_R_I(INS_sve_pmov, EA_SCALABLE, REG_V14, REG_P2, 3, INS_OPTS_SCALABLE_S,
- INS_SCALABLE_OPTS_TO_VECTOR); // PMOV [], .S
- theEmitter->emitIns_R_R_I(INS_sve_pmov, EA_SCALABLE, REG_V3, REG_P15, 0, INS_OPTS_SCALABLE_S,
- INS_SCALABLE_OPTS_TO_VECTOR); // PMOV [], .S
-
// IF_SVE_CJ_2A
theEmitter->emitIns_R_R(INS_sve_rev, EA_SCALABLE, REG_P1, REG_P2,
INS_OPTS_SCALABLE_B); // REV ., .
@@ -5157,106 +5101,6 @@ void CodeGen::genArm64EmitterUnitTestsSve()
theEmitter->emitIns_R_R_R(INS_sve_fsubr, EA_SCALABLE, REG_V6, REG_P4, REG_V29,
INS_OPTS_SCALABLE_D); // FSUBR ., /M, ., .
- // IF_SVE_HL_3B
- theEmitter->emitIns_R_R_R(INS_sve_bfadd, EA_SCALABLE, REG_V0, REG_P0, REG_V1,
- INS_OPTS_SCALABLE_H); // BFADD .H, /M, .H, .H
- theEmitter->emitIns_R_R_R(INS_sve_bfmax, EA_SCALABLE, REG_V2, REG_P1, REG_V3,
- INS_OPTS_SCALABLE_H); // BFMAX .H, /M, .H, .H
- theEmitter->emitIns_R_R_R(INS_sve_bfmaxnm, EA_SCALABLE, REG_V4, REG_P2, REG_V5,
- INS_OPTS_SCALABLE_H); // BFMAXNM .H, /M, .H, .H
- theEmitter->emitIns_R_R_R(INS_sve_bfmin, EA_SCALABLE, REG_V6, REG_P3, REG_V7,
- INS_OPTS_SCALABLE_H); // BFMIN .H, /M, .H, .H
- theEmitter->emitIns_R_R_R(INS_sve_bfminnm, EA_SCALABLE, REG_V8, REG_P4, REG_V9,
- INS_OPTS_SCALABLE_H); // BFMINNM .H, /M, .H, .H
- theEmitter->emitIns_R_R_R(INS_sve_bfmul, EA_SCALABLE, REG_V10, REG_P5, REG_V11,
- INS_OPTS_SCALABLE_H); // BFMUL .H, /M, .H, .H
- theEmitter->emitIns_R_R_R(INS_sve_bfsub, EA_SCALABLE, REG_V12, REG_P6, REG_V13,
- INS_OPTS_SCALABLE_H); // BFSUB .H, /M, .H, .H
-
- // IF_SVE_HO_3A
- theEmitter->emitIns_R_R_R(INS_sve_bfcvt, EA_SCALABLE, REG_V3, REG_P2, REG_V9,
- INS_OPTS_S_TO_H); // BFCVT .H, /M, .S
-
- // IF_SVE_HO_3B
- theEmitter->emitIns_R_R_R(INS_sve_fcvt, EA_SCALABLE, REG_V7, REG_P7, REG_V1,
- INS_OPTS_S_TO_D); // FCVT .D, /M, .S
- theEmitter->emitIns_R_R_R(INS_sve_fcvt, EA_SCALABLE, REG_V29, REG_P3, REG_V12,
- INS_OPTS_D_TO_S); // FCVT .S, /M, .D
- theEmitter->emitIns_R_R_R(INS_sve_fcvt, EA_SCALABLE, REG_V0, REG_P4, REG_V13,
- INS_OPTS_D_TO_H); // FCVT .H, /M,