diff --git a/src/ci/azure-pipelines/steps/install-clang.yml b/src/ci/azure-pipelines/steps/install-clang.yml deleted file mode 100644 index 14daf81b43075..0000000000000 --- a/src/ci/azure-pipelines/steps/install-clang.yml +++ /dev/null @@ -1,46 +0,0 @@ -steps: - -- bash: | - set -e - curl -f http://releases.llvm.org/7.0.0/clang+llvm-7.0.0-x86_64-apple-darwin.tar.xz | tar xJf - - - export CC=`pwd`/clang+llvm-7.0.0-x86_64-apple-darwin/bin/clang - echo "##vso[task.setvariable variable=CC]$CC" - - export CXX=`pwd`/clang+llvm-7.0.0-x86_64-apple-darwin/bin/clang++ - echo "##vso[task.setvariable variable=CXX]$CXX" - - # Configure `AR` specifically so rustbuild doesn't try to infer it as - # `clang-ar` by accident. - echo "##vso[task.setvariable variable=AR]ar" - displayName: Install clang (OSX) - condition: and(succeeded(), eq(variables['Agent.OS'], 'Darwin')) - -# If we're compiling for MSVC then we, like most other distribution builders, -# switch to clang as the compiler. This'll allow us eventually to enable LTO -# amongst LLVM and rustc. Note that we only do this on MSVC as I don't think -# clang has an output mode compatible with MinGW that we need. If it does we -# should switch to clang for MinGW as well! -# -# Note that the LLVM installer is an NSIS installer -# -# Original downloaded here came from -# http://releases.llvm.org/7.0.0/LLVM-7.0.0-win64.exe -# That installer was run through `wine` on Linux and then the resulting -# installation directory (found in `$HOME/.wine/drive_c/Program Files/LLVM`) was -# packaged up into a tarball. We've had issues otherwise that the installer will -# randomly hang, provide not a lot of useful information, pollute global state, -# etc. In general the tarball is just more confined and easier to deal with when -# working with various CI environments. -- bash: | - set -e - mkdir -p citools - cd citools - curl -f https://rust-lang-ci-mirrors.s3-us-west-1.amazonaws.com/rustc/LLVM-7.0.0-win64.tar.gz | tar xzf - - echo "##vso[task.setvariable variable=RUST_CONFIGURE_ARGS]$RUST_CONFIGURE_ARGS --set llvm.clang-cl=`pwd`/clang-rust/bin/clang-cl.exe" - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT'), eq(variables['MINGW_URL'],'')) - displayName: Install clang (Windows) - -# Note that we don't install clang on Linux since its compiler story is just so -# different. Each container has its own toolchain configured appropriately -# already. diff --git a/src/ci/azure-pipelines/steps/install-sccache.yml b/src/ci/azure-pipelines/steps/install-sccache.yml deleted file mode 100644 index d4679c1c6733e..0000000000000 --- a/src/ci/azure-pipelines/steps/install-sccache.yml +++ /dev/null @@ -1,21 +0,0 @@ -steps: - -- bash: | - set -e - curl -fo /usr/local/bin/sccache https://rust-lang-ci-mirrors.s3-us-west-1.amazonaws.com/rustc/2018-04-02-sccache-x86_64-apple-darwin - chmod +x /usr/local/bin/sccache - displayName: Install sccache (OSX) - condition: and(succeeded(), eq(variables['Agent.OS'], 'Darwin')) - -- script: | - md sccache - powershell -Command "$ProgressPreference = 'SilentlyContinue'; iwr -outf sccache\sccache.exe https://rust-lang-ci-mirrors.s3-us-west-1.amazonaws.com/rustc/2018-04-26-sccache-x86_64-pc-windows-msvc" - echo ##vso[task.prependpath]%CD%\sccache - displayName: Install sccache (Windows) - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) - -# Note that we don't install sccache on Linux since it's installed elsewhere -# through all the containers. -# -# FIXME: we should probably install sccache outside the containers and then -# mount it inside the containers so we can centralize all installation here. diff --git a/src/ci/azure-pipelines/steps/install-windows-build-deps.yml b/src/ci/azure-pipelines/steps/install-windows-build-deps.yml deleted file mode 100644 index bd4f1ed0cea43..0000000000000 --- a/src/ci/azure-pipelines/steps/install-windows-build-deps.yml +++ /dev/null @@ -1,120 +0,0 @@ -steps: -# We use the WIX toolset to create combined installers for Windows, and these -# binaries are downloaded from -# https://github.com/wixtoolset/wix3 originally -- bash: | - set -e - curl -O https://rust-lang-ci-mirrors.s3-us-west-1.amazonaws.com/rustc/wix311-binaries.zip - echo "##vso[task.setvariable variable=WIX]`pwd`/wix" - mkdir -p wix/bin - cd wix/bin - 7z x ../../wix311-binaries.zip - displayName: Install wix - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) - -# We use InnoSetup and its `iscc` program to also create combined installers. -# Honestly at this point WIX above and `iscc` are just holdovers from -# oh-so-long-ago and are required for creating installers on Windows. I think -# one is MSI installers and one is EXE, but they're not used so frequently at -# this point anyway so perhaps it's a wash! -- script: | - echo ##vso[task.prependpath]C:\Program Files (x86)\Inno Setup 5 - curl.exe -o is-install.exe https://rust-lang-ci-mirrors.s3-us-west-1.amazonaws.com/rustc/2017-08-22-is.exe - is-install.exe /VERYSILENT /SUPPRESSMSGBOXES /NORESTART /SP- - displayName: Install InnoSetup - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) - -# We've had issues with the default drive in use running out of space during a -# build, and it looks like the `C:` drive has more space than the default `D:` -# drive. We should probably confirm this with the azure pipelines team at some -# point, but this seems to fix our "disk space full" problems. -- script: | - mkdir c:\MORE_SPACE - mklink /J build c:\MORE_SPACE - displayName: "Ensure build happens on C:/ instead of D:/" - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) - -- bash: git config --replace-all --global core.autocrlf false - displayName: "Disable git automatic line ending conversion (on C:/)" - -# Download and install MSYS2, needed primarily for the test suite (run-make) but -# also used by the MinGW toolchain for assembling things. -# -# FIXME: we should probe the default azure image and see if we can use the MSYS2 -# toolchain there. (if there's even one there). For now though this gets the job -# done. -- bash: | - set -e - choco install msys2 --params="/InstallDir:$(System.Workfolder)/msys2 /NoPath" -y --no-progress - echo "##vso[task.prependpath]$(System.Workfolder)/msys2/usr/bin" - mkdir -p "$(System.Workfolder)/msys2/home/$USERNAME" - displayName: Install msys2 - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) - -- bash: pacman -S --noconfirm --needed base-devel ca-certificates make diffutils tar - displayName: Install msys2 base deps - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) - -# If we need to download a custom MinGW, do so here and set the path -# appropriately. -# -# Here we also do a pretty heinous thing which is to mangle the MinGW -# installation we just downloaded. Currently, as of this writing, we're using -# MinGW-w64 builds of gcc, and that's currently at 6.3.0. We use 6.3.0 as it -# appears to be the first version which contains a fix for #40546, builds -# randomly failing during LLVM due to ar.exe/ranlib.exe failures. -# -# Unfortunately, though, 6.3.0 *also* is the first version of MinGW-w64 builds -# to contain a regression in gdb (#40184). As a result if we were to use the -# gdb provided (7.11.1) then we would fail all debuginfo tests. -# -# In order to fix spurious failures (pretty high priority) we use 6.3.0. To -# avoid disabling gdb tests we download an *old* version of gdb, specifically -# that found inside the 6.2.0 distribution. We then overwrite the 6.3.0 gdb -# with the 6.2.0 gdb to get tests passing. -# -# Note that we don't literally overwrite the gdb.exe binary because it appears -# to just use gdborig.exe, so that's the binary we deal with instead. -- bash: | - set -e - curl -o mingw.7z $MINGW_URL/$MINGW_ARCHIVE - 7z x -y mingw.7z > /dev/null - curl -o $MINGW_DIR/bin/gdborig.exe $MINGW_URL/2017-04-20-${MSYS_BITS}bit-gdborig.exe - echo "##vso[task.prependpath]`pwd`/$MINGW_DIR/bin" - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT'), ne(variables['MINGW_URL'],'')) - displayName: Download custom MinGW - -# Otherwise install MinGW through `pacman` -- bash: | - set -e - arch=i686 - if [ "$MSYS_BITS" = "64" ]; then - arch=x86_64 - fi - pacman -S --noconfirm --needed mingw-w64-$arch-toolchain mingw-w64-$arch-cmake mingw-w64-$arch-gcc mingw-w64-$arch-python2 - echo "##vso[task.prependpath]$(System.Workfolder)/msys2/mingw$MSYS_BITS/bin" - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT'), eq(variables['MINGW_URL'],'')) - displayName: Download standard MinGW - -# Make sure we use the native python interpreter instead of some msys equivalent -# one way or another. The msys interpreters seem to have weird path conversions -# baked in which break LLVM's build system one way or another, so let's use the -# native version which keeps everything as native as possible. -- bash: | - set -e - cp C:/Python27amd64/python.exe C:/Python27amd64/python2.7.exe - echo "##vso[task.prependpath]C:/Python27amd64" - displayName: Prefer the "native" Python as LLVM has trouble building with MSYS sometimes - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) - -# Note that this is originally from the github releases patch of Ninja -- bash: | - set -e - mkdir ninja - curl -o ninja.zip https://rust-lang-ci-mirrors.s3-us-west-1.amazonaws.com/rustc/2017-03-15-ninja-win.zip - 7z x -oninja ninja.zip - rm ninja.zip - echo "##vso[task.setvariable variable=RUST_CONFIGURE_ARGS]$RUST_CONFIGURE_ARGS --enable-ninja" - echo "##vso[task.prependpath]`pwd`/ninja" - displayName: Download and install ninja - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) diff --git a/src/ci/azure-pipelines/steps/run.yml b/src/ci/azure-pipelines/steps/run.yml index 15a2499e4609e..1f8ef40ec21ab 100644 --- a/src/ci/azure-pipelines/steps/run.yml +++ b/src/ci/azure-pipelines/steps/run.yml @@ -48,86 +48,99 @@ steps: - bash: python src/ci/cpu-usage-over-time.py &> cpu-usage.csv & displayName: "Collect CPU-usage statistics in the background" -- bash: printenv | sort - displayName: Show environment variables +- bash: src/ci/scripts/dump-environment.sh + displayName: Show the current environment -- bash: | - set -e - df -h - du . | sort -nr | head -n100 - displayName: Show disk usage - # FIXME: this hasn't been tested, but maybe it works on Windows? Should test! - condition: and(succeeded(), ne(variables['Agent.OS'], 'Windows_NT')) - -- template: install-sccache.yml -- template: install-clang.yml - -# Switch to XCode 9.3 on OSX since it seems to be the last version that supports -# i686-apple-darwin. We'll eventually want to upgrade this and it will probably -# force us to drop i686-apple-darwin, but let's keep the wheels turning for now. -- bash: | - set -e - sudo xcode-select --switch /Applications/Xcode_9.3.app - displayName: Switch to Xcode 9.3 (OSX) - condition: and(succeeded(), eq(variables['Agent.OS'], 'Darwin')) +- bash: src/ci/scripts/install-sccache.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Install sccache + condition: and(succeeded(), not(variables.SKIP_JOB)) -- template: install-windows-build-deps.yml +- bash: src/ci/scripts/install-clang.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Install clang + condition: and(succeeded(), not(variables.SKIP_JOB)) -# Looks like docker containers have IPv6 disabled by default, so let's turn it -# on since libstd tests require it -- bash: | - set -e - sudo mkdir -p /etc/docker - echo '{"ipv6":true,"fixed-cidr-v6":"fd9a:8454:6789:13f7::/64"}' | sudo tee /etc/docker/daemon.json - sudo service docker restart - displayName: Enable IPv6 - condition: and(succeeded(), not(variables.SKIP_JOB), eq(variables['Agent.OS'], 'Linux')) +- bash: src/ci/scripts/switch-xcode.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Switch to Xcode 9.3 + condition: and(succeeded(), not(variables.SKIP_JOB)) + +- bash: src/ci/scripts/install-wix.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Install wix + condition: and(succeeded(), not(variables.SKIP_JOB)) + +- bash: src/ci/scripts/install-innosetup.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Install InnoSetup + condition: and(succeeded(), not(variables.SKIP_JOB)) + +- bash: src/ci/scripts/windows-symlink-build-dir.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Ensure the build happens on C:\ instead of D:\ + condition: and(succeeded(), not(variables.SKIP_JOB)) + +- bash: src/ci/scripts/disable-git-crlf-conversion.sh + displayName: "Disable git automatic line ending conversion (on C:/)" + condition: and(succeeded(), not(variables.SKIP_JOB)) + +- bash: src/ci/scripts/install-msys2.sh + env: + AGENT_OS: $(Agent.OS) + SYSTEM_WORKFOLDER: $(System.Workfolder) + displayName: Install msys2 + condition: and(succeeded(), not(variables.SKIP_JOB)) + +- bash: src/ci/scripts/install-mingw.sh + env: + AGENT_OS: $(Agent.OS) + SYSTEM_WORKFOLDER: $(System.Workfolder) + displayName: Install MinGW + condition: and(succeeded(), not(variables.SKIP_JOB)) + +- bash: src/ci/scripts/install-ninja.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Install ninja + condition: and(succeeded(), not(variables.SKIP_JOB)) + +- bash: src/ci/scripts/enable-docker-ipv6.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Enable IPv6 on Docker + condition: and(succeeded(), not(variables.SKIP_JOB)) # Disable automatic line ending conversion (again). On Windows, when we're # installing dependencies, something switches the git configuration directory or # re-enables autocrlf. We've not tracked down the exact cause -- and there may # be multiple -- but this should ensure submodules are checked out with the # appropriate line endings. -- bash: git config --replace-all --global core.autocrlf false - displayName: "Disable git automatic line ending conversion" +- bash: src/ci/scripts/disable-git-crlf-conversion.sh + displayName: Disable git automatic line ending conversion + condition: and(succeeded(), not(variables.SKIP_JOB)) -# Check out all our submodules, but more quickly than using git by using one of -# our custom scripts -- bash: | - set -e - mkdir -p $HOME/rustsrc - $BUILD_SOURCESDIRECTORY/src/ci/init_repo.sh . $HOME/rustsrc - condition: and(succeeded(), not(variables.SKIP_JOB), ne(variables['Agent.OS'], 'Windows_NT')) - displayName: Check out submodules (Unix) -- script: | - if not exist C:\cache\rustsrc\NUL mkdir C:\cache\rustsrc - sh src/ci/init_repo.sh . /c/cache/rustsrc - condition: and(succeeded(), not(variables.SKIP_JOB), eq(variables['Agent.OS'], 'Windows_NT')) - displayName: Check out submodules (Windows) - -# See also the disable for autocrlf above, this just checks that it worked -# -# We check both in rust-lang/rust and in a submodule to make sure both are -# accurate. Submodules are checked out significantly later than the main -# repository in this script, so settings can (and do!) change between then. -# -# Linux (and maybe macOS) builders don't currently have dos2unix so just only -# run this step on Windows. -- bash: | - set -x - # print out the git configuration so we can better investigate failures in - # the following - git config --list --show-origin - dos2unix -ih Cargo.lock src/tools/rust-installer/install-template.sh - endings=$(dos2unix -ic Cargo.lock src/tools/rust-installer/install-template.sh) - # if endings has non-zero length, error out - if [ -n "$endings" ]; then exit 1 ; fi - condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) - displayName: Verify line endings are LF +- bash: src/ci/scripts/checkout-submodules.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Checkout submodules + condition: and(succeeded(), not(variables.SKIP_JOB)) + +- bash: src/ci/scripts/verify-line-endings.sh + env: + AGENT_OS: $(Agent.OS) + displayName: Verify line endings + condition: and(succeeded(), not(variables.SKIP_JOB)) # Ensure the `aws` CLI is installed so we can deploy later on, cache docker # images, etc. -- bash: src/ci/install-awscli.sh +- bash: src/ci/scripts/install-awscli.sh env: AGENT_OS: $(Agent.OS) condition: and(succeeded(), not(variables.SKIP_JOB)) diff --git a/src/ci/run.sh b/src/ci/run.sh index 0d5ea371245e4..bce35670c8d46 100755 --- a/src/ci/run.sh +++ b/src/ci/run.sh @@ -117,7 +117,7 @@ make check-bootstrap # Display the CPU and memory information. This helps us know why the CI timing # is fluctuating. -if isOSX; then +if isMacOS; then system_profiler SPHardwareDataType || true sysctl hw || true ncpus=$(sysctl -n hw.ncpu) diff --git a/src/ci/scripts/checkout-submodules.sh b/src/ci/scripts/checkout-submodules.sh new file mode 100755 index 0000000000000..0b44ea3c90bc9 --- /dev/null +++ b/src/ci/scripts/checkout-submodules.sh @@ -0,0 +1,17 @@ +#!/bin/bash +# Check out all our submodules, but more quickly than using git by using one of +# our custom scripts + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isWindows; then + path="/c/cache/rustsrc" +else + path="${HOME}/rustsrc" +fi + +mkdir -p "${path}" +"$(cd "$(dirname "$0")" && pwd)/../init_repo.sh" . "${path}" diff --git a/src/ci/scripts/disable-git-crlf-conversion.sh b/src/ci/scripts/disable-git-crlf-conversion.sh new file mode 100755 index 0000000000000..836145fbb8e60 --- /dev/null +++ b/src/ci/scripts/disable-git-crlf-conversion.sh @@ -0,0 +1,13 @@ +#!/bin/bash +# Disable automatic line ending conversion, which is enabled by default on +# Azure's Windows image. Having the conversion enabled caused regressions both +# in our test suite (it broke miri tests) and in the ecosystem, since we +# started shipping install scripts with CRLF endings instead of the old LF. +# +# Note that we do this a couple times during the build as the PATH and current +# user/directory change, e.g. when mingw is enabled. + +set -euo pipefail +IFS=$'\n\t' + +git config --replace-all --global core.autocrlf false diff --git a/src/ci/scripts/dump-environment.sh b/src/ci/scripts/dump-environment.sh new file mode 100755 index 0000000000000..c6774b52ab92d --- /dev/null +++ b/src/ci/scripts/dump-environment.sh @@ -0,0 +1,19 @@ +#!/bin/bash +# This script dumps information about the build environment to stdout. + +set -euo pipefail +IFS=$'\n\t' + +echo "environment variables:" +printenv | sort +echo + +echo "disk usage:" +df -h +echo + +echo "biggest files in the working dir:" +set +o pipefail +du . | sort -nr | head -n100 +set -o pipefail +echo diff --git a/src/ci/scripts/enable-docker-ipv6.sh b/src/ci/scripts/enable-docker-ipv6.sh new file mode 100755 index 0000000000000..03d5a75e24e27 --- /dev/null +++ b/src/ci/scripts/enable-docker-ipv6.sh @@ -0,0 +1,15 @@ +#!/bin/bash +# Looks like docker containers have IPv6 disabled by default, so let's turn it +# on since libstd tests require it + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isLinux; then + sudo mkdir -p /etc/docker + echo '{"ipv6":true,"fixed-cidr-v6":"fd9a:8454:6789:13f7::/64"}' \ + | sudo tee /etc/docker/daemon.json + sudo service docker restart +fi diff --git a/src/ci/install-awscli.sh b/src/ci/scripts/install-awscli.sh similarity index 88% rename from src/ci/install-awscli.sh rename to src/ci/scripts/install-awscli.sh index 69c8d2e3099ab..e21187938504c 100755 --- a/src/ci/install-awscli.sh +++ b/src/ci/scripts/install-awscli.sh @@ -16,12 +16,14 @@ set -euo pipefail IFS=$'\n\t' -MIRROR="https://rust-lang-ci-mirrors.s3-us-west-1.amazonaws.com/rustc/2019-07-27-awscli.tar" +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +MIRROR="${MIRRORS_BASE}/2019-07-27-awscli.tar" DEPS_DIR="/tmp/awscli-deps" pip="pip" pipflags="" -if [[ "${AGENT_OS}" == "Linux" ]]; then +if isLinux; then pip="pip3" pipflags="--user" diff --git a/src/ci/scripts/install-clang.sh b/src/ci/scripts/install-clang.sh new file mode 100755 index 0000000000000..ccb6a0bd9dc16 --- /dev/null +++ b/src/ci/scripts/install-clang.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# This script installs clang on the local machine. Note that we don't install +# clang on Linux since its compiler story is just so different. Each container +# has its own toolchain configured appropriately already. + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isMacOS; then + curl -f "${MIRRORS_BASE}/clang%2Bllvm-7.0.0-x86_64-apple-darwin.tar.xz" | tar xJf - + + ciCommandSetEnv CC "$(pwd)/clang+llvm-7.0.0-x86_64-apple-darwin/bin/clang" + ciCommandSetEnv CXX "$(pwd)/clang+llvm-7.0.0-x86_64-apple-darwin/bin/clang++" + + # Configure `AR` specifically so rustbuild doesn't try to infer it as + # `clang-ar` by accident. + ciCommandSetEnv AR "ar" +elif isWindows; then + # If we're compiling for MSVC then we, like most other distribution builders, + # switch to clang as the compiler. This'll allow us eventually to enable LTO + # amongst LLVM and rustc. Note that we only do this on MSVC as I don't think + # clang has an output mode compatible with MinGW that we need. If it does we + # should switch to clang for MinGW as well! + # + # Note that the LLVM installer is an NSIS installer + # + # Original downloaded here came from + # http://releases.llvm.org/7.0.0/LLVM-7.0.0-win64.exe + # That installer was run through `wine` on Linux and then the resulting + # installation directory (found in `$HOME/.wine/drive_c/Program Files/LLVM`) was + # packaged up into a tarball. We've had issues otherwise that the installer will + # randomly hang, provide not a lot of useful information, pollute global state, + # etc. In general the tarball is just more confined and easier to deal with when + # working with various CI environments. + + mkdir -p citools + cd citools + curl -f "${MIRRORS_BASE}/LLVM-7.0.0-win64.tar.gz" | tar xzf - + ciCommandSetEnv RUST_CONFIGURE_ARGS \ + "${RUST_CONFIGURE_ARGS} --set llvm.clang-cl=$(pwd)/clang-rust/bin/clang-cl.exe" +fi diff --git a/src/ci/scripts/install-innosetup.sh b/src/ci/scripts/install-innosetup.sh new file mode 100755 index 0000000000000..04ca249777a11 --- /dev/null +++ b/src/ci/scripts/install-innosetup.sh @@ -0,0 +1,18 @@ +#!/bin/bash +# We use InnoSetup and its `iscc` program to also create combined installers. +# Honestly at this point WIX above and `iscc` are just holdovers from +# oh-so-long-ago and are required for creating installers on Windows. I think +# one is MSI installers and one is EXE, but they're not used so frequently at +# this point anyway so perhaps it's a wash! + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isWindows; then + curl.exe -o is-install.exe "${MIRRORS_BASE}/2017-08-22-is.exe" + cmd.exe //c "is-install.exe /VERYSILENT /SUPPRESSMSGBOXES /NORESTART /SP-" + + ciCommandAddPath "C:\\Program Files (x86)\\Inno Setup 5" +fi diff --git a/src/ci/scripts/install-mingw.sh b/src/ci/scripts/install-mingw.sh new file mode 100755 index 0000000000000..5a1c19b8981ec --- /dev/null +++ b/src/ci/scripts/install-mingw.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# If we need to download a custom MinGW, do so here and set the path +# appropriately. +# +# Here we also do a pretty heinous thing which is to mangle the MinGW +# installation we just downloaded. Currently, as of this writing, we're using +# MinGW-w64 builds of gcc, and that's currently at 6.3.0. We use 6.3.0 as it +# appears to be the first version which contains a fix for #40546, builds +# randomly failing during LLVM due to ar.exe/ranlib.exe failures. +# +# Unfortunately, though, 6.3.0 *also* is the first version of MinGW-w64 builds +# to contain a regression in gdb (#40184). As a result if we were to use the +# gdb provided (7.11.1) then we would fail all debuginfo tests. +# +# In order to fix spurious failures (pretty high priority) we use 6.3.0. To +# avoid disabling gdb tests we download an *old* version of gdb, specifically +# that found inside the 6.2.0 distribution. We then overwrite the 6.3.0 gdb +# with the 6.2.0 gdb to get tests passing. +# +# Note that we don't literally overwrite the gdb.exe binary because it appears +# to just use gdborig.exe, so that's the binary we deal with instead. +# +# Otherwise install MinGW through `pacman` + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isWindows; then + if [[ -z "${MINGW_URL+x}" ]]; then + curl -o mingw.7z "${MINGW_URL}/${MINGW_ARCHIVE}" + 7z x -y mingw.7z > /dev/null + curl -o "${MINGW_DIR}/bin/gdborig.exe" "${MINGW_URL}/2017-04-20-${MSYS_BITS}bit-gdborig.exe" + ciCommandAddPath "$(pwd)/${MINGW_DIR}/bin" + else + arch=i686 + if [ "$MSYS_BITS" = "64" ]; then + arch=x86_64 + fi + pacman -S --noconfirm --needed mingw-w64-$arch-toolchain mingw-w64-$arch-cmake \ + mingw-w64-$arch-gcc mingw-w64-$arch-python2 + ciCommandAddPath "${SYSTEM_WORKFOLDER}/msys2/mingw${MSYS_BITS}/bin" + fi +fi diff --git a/src/ci/scripts/install-msys2.sh b/src/ci/scripts/install-msys2.sh new file mode 100755 index 0000000000000..8b9a55fb17972 --- /dev/null +++ b/src/ci/scripts/install-msys2.sh @@ -0,0 +1,29 @@ +#!/bin/bash +# Download and install MSYS2, needed primarily for the test suite (run-make) but +# also used by the MinGW toolchain for assembling things. +# +# FIXME: we should probe the default azure image and see if we can use the MSYS2 +# toolchain there. (if there's even one there). For now though this gets the job +# done. + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isWindows; then + choco install msys2 --params="/InstallDir:${SYSTEM_WORKFOLDER}/msys2 /NoPath" -y --no-progress + mkdir -p "${SYSTEM_WORKFOLDER}/msys2/home/${USERNAME}" + + ciCommandAddPath "${SYSTEM_WORKFOLDER}/msys2/usr/bin" + export PATH="${SYSTEM_WORKFOLDER}/msys2/usr/bin:${PATH}" + + pacman -S --noconfirm --needed base-devel ca-certificates make diffutils tar + + # Make sure we use the native python interpreter instead of some msys equivalent + # one way or another. The msys interpreters seem to have weird path conversions + # baked in which break LLVM's build system one way or another, so let's use the + # native version which keeps everything as native as possible. + cp C:/Python27amd64/python.exe C:/Python27amd64/python2.7.exe + ciCommandAddPath "C:\\Python27amd64" +fi diff --git a/src/ci/scripts/install-ninja.sh b/src/ci/scripts/install-ninja.sh new file mode 100755 index 0000000000000..b8261d8a6f284 --- /dev/null +++ b/src/ci/scripts/install-ninja.sh @@ -0,0 +1,16 @@ +#!/bin/bash +# Note that this is originally from the github releases patch of Ninja + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isWindows; then + mkdir ninja + curl -o ninja.zip "${MIRRORS_BASE}/2017-03-15-ninja-win.zip" + 7z x -oninja ninja.zip + rm ninja.zip + ciCommandSetEnv "RUST_CONFIGURE_ARGS" "${RUST_CONFIGURE_ARGS} --enable-ninja" + ciCommandAddPath "$(pwd)/ninja" +fi diff --git a/src/ci/scripts/install-sccache.sh b/src/ci/scripts/install-sccache.sh new file mode 100755 index 0000000000000..d3c298992254e --- /dev/null +++ b/src/ci/scripts/install-sccache.sh @@ -0,0 +1,20 @@ +#!/bin/bash +# This script installs sccache on the local machine. Note that we don't install +# sccache on Linux since it's installed elsewhere through all the containers. + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isMacOS; then + curl -fo /usr/local/bin/sccache "${MIRRORS_BASE}/2018-04-02-sccache-x86_64-apple-darwin" + chmod +x /usr/local/bin/sccache +elif isWindows; then + mkdir -p sccache + curl -fo sccache/sccache.exe "${MIRRORS_BASE}/2018-04-26-sccache-x86_64-pc-windows-msvc" + ciCommandAddPath "$(pwd)/sccache" +fi + +# FIXME: we should probably install sccache outside the containers and then +# mount it inside the containers so we can centralize all installation here. diff --git a/src/ci/scripts/install-wix.sh b/src/ci/scripts/install-wix.sh new file mode 100755 index 0000000000000..950e19ff6407c --- /dev/null +++ b/src/ci/scripts/install-wix.sh @@ -0,0 +1,17 @@ +#!/bin/bash +# We use the WIX toolset to create combined installers for Windows, and these +# binaries are downloaded from https://github.com/wixtoolset/wix3 originally + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isWindows; then + curl -O "${MIRRORS_BASE}/wix311-binaries.zip" + mkdir -p wix/bin + cd wix/bin + 7z x ../../wix311-binaries.zip + + ciCommandSetEnv WIX "$(pwd)/wix" +fi diff --git a/src/ci/scripts/switch-xcode.sh b/src/ci/scripts/switch-xcode.sh new file mode 100755 index 0000000000000..2cbb2ddbc7046 --- /dev/null +++ b/src/ci/scripts/switch-xcode.sh @@ -0,0 +1,13 @@ +#!/bin/bash +# Switch to XCode 9.3 on OSX since it seems to be the last version that supports +# i686-apple-darwin. We'll eventually want to upgrade this and it will probably +# force us to drop i686-apple-darwin, but let's keep the wheels turning for now. + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isMacOS; then + sudo xcode-select --switch /Applications/Xcode_9.3.app +fi diff --git a/src/ci/scripts/verify-line-endings.sh b/src/ci/scripts/verify-line-endings.sh new file mode 100755 index 0000000000000..f3cac13ea4802 --- /dev/null +++ b/src/ci/scripts/verify-line-endings.sh @@ -0,0 +1,24 @@ +#!/bin/bash +# See also the disable for autocrlf, this just checks that it worked. +# +# We check both in rust-lang/rust and in a submodule to make sure both are +# accurate. Submodules are checked out significantly later than the main +# repository in this script, so settings can (and do!) change between then. +# +# Linux (and maybe macOS) builders don't currently have dos2unix so just only +# run this step on Windows. + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isWindows; then + # print out the git configuration so we can better investigate failures in + # the following + git config --list --show-origin + dos2unix -ih Cargo.lock src/tools/rust-installer/install-template.sh + endings=$(dos2unix -ic Cargo.lock src/tools/rust-installer/install-template.sh) + # if endings has non-zero length, error out + if [ -n "$endings" ]; then exit 1 ; fi +fi diff --git a/src/ci/scripts/windows-symlink-build-dir.sh b/src/ci/scripts/windows-symlink-build-dir.sh new file mode 100755 index 0000000000000..e57128c70f5f1 --- /dev/null +++ b/src/ci/scripts/windows-symlink-build-dir.sh @@ -0,0 +1,15 @@ +#!/bin/bash +# We've had issues with the default drive in use running out of space during a +# build, and it looks like the `C:` drive has more space than the default `D:` +# drive. We should probably confirm this with the azure pipelines team at some +# point, but this seems to fix our "disk space full" problems. + +set -euo pipefail +IFS=$'\n\t' + +source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" + +if isWindows; then + cmd //c "mkdir c:\\MORE_SPACE" + cmd //c "mklink /J build c:\\MORE_SPACE" +fi diff --git a/src/ci/shared.sh b/src/ci/shared.sh index b093a07ec5c5a..37e45b5639dc9 100644 --- a/src/ci/shared.sh +++ b/src/ci/shared.sh @@ -4,6 +4,8 @@ # `source shared.sh`, hence the invalid shebang and not being # marked as an executable file in git. +export MIRRORS_BASE="https://rust-lang-ci-mirrors.s3-us-west-1.amazonaws.com/rustc" + # See http://unix.stackexchange.com/questions/82598 # Duplicated in docker/dist-various-2/shared.sh function retry { @@ -28,10 +30,39 @@ function isCI { [ "$CI" = "true" ] || [ "$TF_BUILD" = "True" ] } -function isOSX { +function isMacOS { [ "$AGENT_OS" = "Darwin" ] } +function isWindows { + [ "$AGENT_OS" = "Windows_NT" ] +} + +function isLinux { + [ "$AGENT_OS" = "Linux" ] +} + function getCIBranch { echo "$BUILD_SOURCEBRANCHNAME" } + +function ciCommandAddPath { + if [[ $# -ne 1 ]]; then + echo "usage: $0 " + exit 1 + fi + path="$1" + + echo "##vso[task.prependpath]${path}" +} + +function ciCommandSetEnv { + if [[ $# -ne 2 ]]; then + echo "usage: $0 " + exit 1 + fi + name="$1" + value="$2" + + echo "##vso[task.setvariable variable=${name}]${value}" +} diff --git a/src/librustc/infer/resolve.rs b/src/librustc/infer/resolve.rs index 2db18674e2f53..7c3a338366c9a 100644 --- a/src/librustc/infer/resolve.rs +++ b/src/librustc/infer/resolve.rs @@ -1,7 +1,7 @@ use super::{InferCtxt, FixupError, FixupResult, Span}; use super::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; use crate::mir::interpret::ConstValue; -use crate::ty::{self, Ty, Const, TyCtxt, TypeFoldable, InferConst, TypeFlags}; +use crate::ty::{self, Ty, Const, TyCtxt, TypeFoldable, InferConst}; use crate::ty::fold::{TypeFolder, TypeVisitor}; /////////////////////////////////////////////////////////////////////////// @@ -29,7 +29,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for OpportunisticVarResolver<'a, 'tcx> { } fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { - if !t.has_infer_types() { + if !t.has_infer_types() && !t.has_infer_consts() { t // micro-optimize -- if there is nothing in this type that this fold affects... } else { let t = self.infcx.shallow_resolve(t); @@ -38,7 +38,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for OpportunisticVarResolver<'a, 'tcx> { } fn fold_const(&mut self, ct: &'tcx Const<'tcx>) -> &'tcx Const<'tcx> { - if !ct.has_type_flags(TypeFlags::HAS_CT_INFER) { + if !ct.has_infer_consts() { ct // micro-optimize -- if there is nothing in this const that this fold affects... } else { let ct = self.infcx.shallow_resolve(ct); diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index 302c11f309d90..b8d1a549ecda3 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -905,11 +905,10 @@ pub fn check_unused_or_stable_features(tcx: TyCtxt<'_>) { // Warn if the user has enabled an already-stable lang feature. unnecessary_stable_feature_lint(tcx, span, feature, since); } - if lang_features.contains(&feature) { + if !lang_features.insert(feature) { // Warn if the user enables a lang feature multiple times. duplicate_feature_err(tcx.sess, span, feature); } - lang_features.insert(feature); } let declared_lib_features = &tcx.features().declared_lib_features; diff --git a/src/librustc/ty/fold.rs b/src/librustc/ty/fold.rs index 5192075c26e98..a95ed589c3e2a 100644 --- a/src/librustc/ty/fold.rs +++ b/src/librustc/ty/fold.rs @@ -88,6 +88,9 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone { fn has_infer_types(&self) -> bool { self.has_type_flags(TypeFlags::HAS_TY_INFER) } + fn has_infer_consts(&self) -> bool { + self.has_type_flags(TypeFlags::HAS_CT_INFER) + } fn has_local_value(&self) -> bool { self.has_type_flags(TypeFlags::KEEP_IN_LOCAL_TCX) } diff --git a/src/librustc_codegen_llvm/debuginfo/metadata.rs b/src/librustc_codegen_llvm/debuginfo/metadata.rs index 438a660b8a867..7bd82ced3c386 100644 --- a/src/librustc_codegen_llvm/debuginfo/metadata.rs +++ b/src/librustc_codegen_llvm/debuginfo/metadata.rs @@ -2069,11 +2069,9 @@ fn set_members_of_composite_type(cx: &CodegenCx<'ll, 'tcx>, { let mut composite_types_completed = debug_context(cx).composite_types_completed.borrow_mut(); - if composite_types_completed.contains(&composite_type_metadata) { + if !composite_types_completed.insert(&composite_type_metadata) { bug!("debuginfo::set_members_of_composite_type() - \ Already completed forward declaration re-encountered."); - } else { - composite_types_completed.insert(composite_type_metadata); } } diff --git a/src/librustc_codegen_ssa/mir/rvalue.rs b/src/librustc_codegen_ssa/mir/rvalue.rs index 27442bb6bff88..7e662ea37dbb4 100644 --- a/src/librustc_codegen_ssa/mir/rvalue.rs +++ b/src/librustc_codegen_ssa/mir/rvalue.rs @@ -556,7 +556,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { ) -> Bx::Value { let is_float = input_ty.is_floating_point(); let is_signed = input_ty.is_signed(); - let is_unit = input_ty.is_unit(); match op { mir::BinOp::Add => if is_float { bx.fadd(lhs, rhs) @@ -594,13 +593,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { mir::BinOp::Shl => common::build_unchecked_lshift(bx, lhs, rhs), mir::BinOp::Shr => common::build_unchecked_rshift(bx, input_ty, lhs, rhs), mir::BinOp::Ne | mir::BinOp::Lt | mir::BinOp::Gt | - mir::BinOp::Eq | mir::BinOp::Le | mir::BinOp::Ge => if is_unit { - bx.cx().const_bool(match op { - mir::BinOp::Ne | mir::BinOp::Lt | mir::BinOp::Gt => false, - mir::BinOp::Eq | mir::BinOp::Le | mir::BinOp::Ge => true, - _ => unreachable!() - }) - } else if is_float { + mir::BinOp::Eq | mir::BinOp::Le | mir::BinOp::Ge => if is_float { bx.fcmp( base::bin_op_to_fcmp_predicate(op.to_hir_binop()), lhs, rhs diff --git a/src/librustc_metadata/native_libs.rs b/src/librustc_metadata/native_libs.rs index 9e4c2685f1162..a58db6a903bb9 100644 --- a/src/librustc_metadata/native_libs.rs +++ b/src/librustc_metadata/native_libs.rs @@ -198,12 +198,10 @@ impl Collector<'tcx> { self.tcx.sess.err(&format!("renaming of the library `{}` was specified, \ however this crate contains no `#[link(...)]` \ attributes referencing this library.", name)); - } else if renames.contains(name) { + } else if !renames.insert(name) { self.tcx.sess.err(&format!("multiple renamings were \ specified for library `{}` .", name)); - } else { - renames.insert(name); } } } diff --git a/src/librustc_mir/borrow_check/conflict_errors.rs b/src/librustc_mir/borrow_check/conflict_errors.rs index 098258994f4e2..4c469a82ac3d6 100644 --- a/src/librustc_mir/borrow_check/conflict_errors.rs +++ b/src/librustc_mir/borrow_check/conflict_errors.rs @@ -78,7 +78,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { .last() .unwrap(); - if self.uninitialized_error_reported.contains(&root_place) { + if !self.uninitialized_error_reported.insert(root_place) { debug!( "report_use_of_moved_or_uninitialized place: error about {:?} suppressed", root_place @@ -86,8 +86,6 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { return; } - self.uninitialized_error_reported.insert(root_place); - let item_msg = match self.describe_place_with_options(used_place, IncludingDowncast(true)) { Some(name) => format!("`{}`", name), diff --git a/src/librustc_mir/hair/pattern/_match.rs b/src/librustc_mir/hair/pattern/_match.rs index 1d83b104177e2..dc6d4b27886e4 100644 --- a/src/librustc_mir/hair/pattern/_match.rs +++ b/src/librustc_mir/hair/pattern/_match.rs @@ -189,8 +189,8 @@ use std::ops::RangeInclusive; use std::u128; use std::convert::TryInto; -pub fn expand_pattern<'a, 'tcx>(cx: &MatchCheckCtxt<'a, 'tcx>, pat: Pat<'tcx>) -> &'a Pat<'tcx> { - cx.pattern_arena.alloc(LiteralExpander { tcx: cx.tcx }.fold_pattern(&pat)) +pub fn expand_pattern<'a, 'tcx>(cx: &MatchCheckCtxt<'a, 'tcx>, pat: Pat<'tcx>) -> Pat<'tcx> { + LiteralExpander { tcx: cx.tcx }.fold_pattern(&pat) } struct LiteralExpander<'tcx> { diff --git a/src/librustc_mir/hair/pattern/check_match.rs b/src/librustc_mir/hair/pattern/check_match.rs index 7bc4bf291ee48..77f3768172fb4 100644 --- a/src/librustc_mir/hair/pattern/check_match.rs +++ b/src/librustc_mir/hair/pattern/check_match.rs @@ -154,7 +154,8 @@ impl<'tcx> MatchVisitor<'_, 'tcx> { self.tables ); patcx.include_lint_checks(); - let pattern = expand_pattern(cx, patcx.lower_pattern(&pat)); + let pattern = + cx.pattern_arena.alloc(expand_pattern(cx, patcx.lower_pattern(&pat))) as &_; if !patcx.errors.is_empty() { patcx.report_inlining_errors(pat.span); have_errors = true; @@ -253,8 +254,9 @@ impl<'tcx> MatchVisitor<'_, 'tcx> { patcx.include_lint_checks(); let pattern = patcx.lower_pattern(pat); let pattern_ty = pattern.ty; + let pattern = expand_pattern(cx, pattern); let pats: Matrix<'_, '_> = vec![smallvec![ - expand_pattern(cx, pattern) + &pattern ]].into_iter().collect(); let witnesses = match check_not_useful(cx, pattern_ty, &pats, pat.hir_id) { diff --git a/src/librustc_mir/hair/pattern/mod.rs b/src/librustc_mir/hair/pattern/mod.rs index 7e17162dfb3ef..58480912929b3 100644 --- a/src/librustc_mir/hair/pattern/mod.rs +++ b/src/librustc_mir/hair/pattern/mod.rs @@ -1214,7 +1214,7 @@ fn search_for_adt_without_structural_match<'tcx>(tcx: TyCtxt<'tcx>, // tracks ADT's previously encountered during search, so that // we will not recur on them again. - seen: FxHashSet<&'tcx AdtDef>, + seen: FxHashSet, } impl<'tcx> TypeVisitor<'tcx> for Search<'tcx> { @@ -1254,14 +1254,12 @@ fn search_for_adt_without_structural_match<'tcx>(tcx: TyCtxt<'tcx>, return true // Halt visiting! } - if self.seen.contains(adt_def) { + if !self.seen.insert(adt_def.did) { debug!("Search already seen adt_def: {:?}", adt_def); // let caller continue its search return false; } - self.seen.insert(adt_def); - // `#[structural_match]` does not care about the // instantiation of the generics in an ADT (it // instead looks directly at its fields outside diff --git a/src/librustc_mir/lints.rs b/src/librustc_mir/lints.rs index da3fead1f9dd7..158b730b9bd43 100644 --- a/src/librustc_mir/lints.rs +++ b/src/librustc_mir/lints.rs @@ -72,13 +72,11 @@ fn check_fn_for_unconditional_recursion( let caller_substs = &InternalSubsts::identity_for_item(tcx, def_id)[..trait_substs_count]; while let Some(bb) = reachable_without_self_call_queue.pop() { - if visited.contains(bb) { + if !visited.insert(bb) { //already done continue; } - visited.insert(bb); - let block = &basic_blocks[bb]; if let Some(ref terminator) = block.terminator { diff --git a/src/librustc_resolve/resolve_imports.rs b/src/librustc_resolve/resolve_imports.rs index 424bf31a78505..34edd5eaf4fc7 100644 --- a/src/librustc_resolve/resolve_imports.rs +++ b/src/librustc_resolve/resolve_imports.rs @@ -673,13 +673,12 @@ impl<'a, 'b> ImportResolver<'a, 'b> { self.throw_unresolved_import_error(errors, None); errors = vec![]; } - if !seen_spans.contains(&err.span) { + if seen_spans.insert(err.span) { let path = import_path_to_string( &import.module_path.iter().map(|seg| seg.ident).collect::>(), &import.subclass, err.span, ); - seen_spans.insert(err.span); errors.push((path, err)); prev_root_id = import.root_id; } diff --git a/src/librustc_typeck/check/coercion.rs b/src/librustc_typeck/check/coercion.rs index 3a89cddda2362..bfccb032458f5 100644 --- a/src/librustc_typeck/check/coercion.rs +++ b/src/librustc_typeck/check/coercion.rs @@ -811,7 +811,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { target: Ty<'tcx>, allow_two_phase: AllowTwoPhase, ) -> RelateResult<'tcx, Ty<'tcx>> { - let source = self.resolve_type_vars_with_obligations(expr_ty); + let source = self.resolve_vars_with_obligations(expr_ty); debug!("coercion::try({:?}: {:?} -> {:?})", expr, source, target); let cause = self.cause(expr.span, ObligationCauseCode::ExprAssignable); @@ -829,7 +829,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// Same as `try_coerce()`, but without side-effects. pub fn can_coerce(&self, expr_ty: Ty<'tcx>, target: Ty<'tcx>) -> bool { - let source = self.resolve_type_vars_with_obligations(expr_ty); + let source = self.resolve_vars_with_obligations(expr_ty); debug!("coercion::can({:?} -> {:?})", source, target); let cause = self.cause(syntax_pos::DUMMY_SP, ObligationCauseCode::ExprAssignable); @@ -853,8 +853,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { -> RelateResult<'tcx, Ty<'tcx>> where E: AsCoercionSite { - let prev_ty = self.resolve_type_vars_with_obligations(prev_ty); - let new_ty = self.resolve_type_vars_with_obligations(new_ty); + let prev_ty = self.resolve_vars_with_obligations(prev_ty); + let new_ty = self.resolve_vars_with_obligations(new_ty); debug!("coercion::try_find_coercion_lub({:?}, {:?})", prev_ty, new_ty); // Special-case that coercion alone cannot handle: @@ -1333,7 +1333,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> { err.span_label(return_sp, "expected because this return type..."); err.span_label( *sp, format!( "...is found to be `{}` here", - fcx.resolve_type_vars_with_obligations(expected), + fcx.resolve_vars_with_obligations(expected), )); } err diff --git a/src/librustc_typeck/check/demand.rs b/src/librustc_typeck/check/demand.rs index 677e2ea356628..3509d6566ec93 100644 --- a/src/librustc_typeck/check/demand.rs +++ b/src/librustc_typeck/check/demand.rs @@ -108,7 +108,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { expected: Ty<'tcx>, allow_two_phase: AllowTwoPhase) -> (Ty<'tcx>, Option>) { - let expected = self.resolve_type_vars_with_obligations(expected); + let expected = self.resolve_vars_with_obligations(expected); let e = match self.try_coerce(expr, checked_ty, expected, allow_two_phase) { Ok(ty) => return (ty, None), @@ -117,7 +117,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let expr = expr.peel_drop_temps(); let cause = self.misc(expr.span); - let expr_ty = self.resolve_type_vars_with_obligations(checked_ty); + let expr_ty = self.resolve_vars_with_obligations(checked_ty); let mut err = self.report_mismatched_types(&cause, expected, expr_ty, e); if self.is_assign_to_bool(expr, expected) { diff --git a/src/librustc_typeck/check/expr.rs b/src/librustc_typeck/check/expr.rs index ad46a443b8ffa..f5f85bbcb100c 100644 --- a/src/librustc_typeck/check/expr.rs +++ b/src/librustc_typeck/check/expr.rs @@ -1010,7 +1010,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { expr: &'tcx hir::Expr, ) -> Ty<'tcx> { let flds = expected.only_has_type(self).and_then(|ty| { - let ty = self.resolve_type_vars_with_obligations(ty); + let ty = self.resolve_vars_with_obligations(ty); match ty.kind { ty::Tuple(ref flds) => Some(&flds[..]), _ => None diff --git a/src/librustc_typeck/check/method/suggest.rs b/src/librustc_typeck/check/method/suggest.rs index f2d001eadedde..d90ed2a790bb6 100644 --- a/src/librustc_typeck/check/method/suggest.rs +++ b/src/librustc_typeck/check/method/suggest.rs @@ -919,7 +919,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // This occurs for UFCS desugaring of `T::method`, where there is no // receiver expression for the method call, and thus no autoderef. if let SelfSource::QPath(_) = source { - return is_local(self.resolve_type_vars_with_obligations(rcvr_ty)); + return is_local(self.resolve_vars_with_obligations(rcvr_ty)); } self.autoderef(span, rcvr_ty).any(|(ty, _)| is_local(ty)) diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 152edf8dd0e5a..73f35dde4f6eb 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -2440,23 +2440,23 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.cause(span, ObligationCauseCode::MiscObligation) } - /// Resolves type variables in `ty` if possible. Unlike the infcx + /// Resolves type and const variables in `ty` if possible. Unlike the infcx /// version (resolve_vars_if_possible), this version will /// also select obligations if it seems useful, in an effort /// to get more type information. - fn resolve_type_vars_with_obligations(&self, mut ty: Ty<'tcx>) -> Ty<'tcx> { - debug!("resolve_type_vars_with_obligations(ty={:?})", ty); + fn resolve_vars_with_obligations(&self, mut ty: Ty<'tcx>) -> Ty<'tcx> { + debug!("resolve_vars_with_obligations(ty={:?})", ty); // No Infer()? Nothing needs doing. - if !ty.has_infer_types() { - debug!("resolve_type_vars_with_obligations: ty={:?}", ty); + if !ty.has_infer_types() && !ty.has_infer_consts() { + debug!("resolve_vars_with_obligations: ty={:?}", ty); return ty; } // If `ty` is a type variable, see whether we already know what it is. ty = self.resolve_vars_if_possible(&ty); - if !ty.has_infer_types() { - debug!("resolve_type_vars_with_obligations: ty={:?}", ty); + if !ty.has_infer_types() && !ty.has_infer_consts() { + debug!("resolve_vars_with_obligations: ty={:?}", ty); return ty; } @@ -2467,7 +2467,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.select_obligations_where_possible(false, |_| {}); ty = self.resolve_vars_if_possible(&ty); - debug!("resolve_type_vars_with_obligations: ty={:?}", ty); + debug!("resolve_vars_with_obligations: ty={:?}", ty); ty } @@ -3668,7 +3668,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { formal_ret: Ty<'tcx>, formal_args: &[Ty<'tcx>]) -> Vec> { - let formal_ret = self.resolve_type_vars_with_obligations(formal_ret); + let formal_ret = self.resolve_vars_with_obligations(formal_ret); let ret_ty = match expected_ret.only_has_type(self) { Some(ret) => ret, None => return Vec::new() @@ -4517,7 +4517,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { err.span_suggestion( span, "try adding a return type", - format!("-> {} ", self.resolve_type_vars_with_obligations(found)), + format!("-> {} ", self.resolve_vars_with_obligations(found)), Applicability::MachineApplicable); true } @@ -4993,7 +4993,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // If no resolution is possible, then an error is reported. // Numeric inference variables may be left unresolved. pub fn structurally_resolved_type(&self, sp: Span, ty: Ty<'tcx>) -> Ty<'tcx> { - let ty = self.resolve_type_vars_with_obligations(ty); + let ty = self.resolve_vars_with_obligations(ty); if !ty.is_ty_var() { ty } else { diff --git a/src/librustc_typeck/check/op.rs b/src/librustc_typeck/check/op.rs index f9df2d1d848ff..819c347d3ae95 100644 --- a/src/librustc_typeck/check/op.rs +++ b/src/librustc_typeck/check/op.rs @@ -179,7 +179,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.check_expr_with_needs(lhs_expr, Needs::MutPlace) } }; - let lhs_ty = self.resolve_type_vars_with_obligations(lhs_ty); + let lhs_ty = self.resolve_vars_with_obligations(lhs_ty); // N.B., as we have not yet type-checked the RHS, we don't have the // type at hand. Make a variable to represent it. The whole reason @@ -196,7 +196,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // see `NB` above let rhs_ty = self.check_expr_coercable_to_type(rhs_expr, rhs_ty_var); - let rhs_ty = self.resolve_type_vars_with_obligations(rhs_ty); + let rhs_ty = self.resolve_vars_with_obligations(rhs_ty); let return_ty = match result { Ok(method) => { diff --git a/src/librustc_typeck/check/pat.rs b/src/librustc_typeck/check/pat.rs index 53ee0777c7c1d..97c30f208f5ec 100644 --- a/src/librustc_typeck/check/pat.rs +++ b/src/librustc_typeck/check/pat.rs @@ -251,7 +251,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { expected: Ty<'tcx>, mut def_bm: BindingMode, ) -> (Ty<'tcx>, BindingMode) { - let mut expected = self.resolve_type_vars_with_obligations(&expected); + let mut expected = self.resolve_vars_with_obligations(&expected); // Peel off as many `&` or `&mut` from the scrutinee type as possible. For example, // for `match &&&mut Some(5)` the loop runs three times, aborting when it reaches diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index a68b7fdf931a4..3fa13f08d3ab6 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -67,7 +67,7 @@ pub struct Globals { impl Globals { fn new(edition: Edition) -> Globals { Globals { - // We have no idea how many attributes their will be, so just + // We have no idea how many attributes there will be, so just // initiate the vectors with 0 bits. We'll grow them as necessary. used_attrs: Lock::new(GrowableBitSet::new_empty()), known_attrs: Lock::new(GrowableBitSet::new_empty()), diff --git a/src/test/ui/const-generics/const-argument-cross-crate-mismatch.stderr b/src/test/ui/const-generics/const-argument-cross-crate-mismatch.stderr index b7fd29ce7067a..7090cb880fd49 100644 --- a/src/test/ui/const-generics/const-argument-cross-crate-mismatch.stderr +++ b/src/test/ui/const-generics/const-argument-cross-crate-mismatch.stderr @@ -1,20 +1,20 @@ error[E0308]: mismatched types - --> $DIR/const-argument-cross-crate-mismatch.rs:6:41 + --> $DIR/const-argument-cross-crate-mismatch.rs:6:67 | LL | let _ = const_generic_lib::function(const_generic_lib::Struct([0u8, 1u8])); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `3usize`, found `2usize` + | ^^^^^^^^^^ expected an array with a fixed size of 3 elements, found one with 2 elements | - = note: expected type `const_generic_lib::Struct<3usize>` - found type `const_generic_lib::Struct<_: usize>` + = note: expected type `[u8; 3]` + found type `[u8; 2]` error[E0308]: mismatched types - --> $DIR/const-argument-cross-crate-mismatch.rs:8:39 + --> $DIR/const-argument-cross-crate-mismatch.rs:8:65 | LL | let _: const_generic_lib::Alias = const_generic_lib::Struct([0u8, 1u8, 2u8]); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `2usize`, found `3usize` + | ^^^^^^^^^^^^^^^ expected an array with a fixed size of 2 elements, found one with 3 elements | - = note: expected type `const_generic_lib::Struct<2usize>` - found type `const_generic_lib::Struct<_: usize>` + = note: expected type `[u8; 2]` + found type `[u8; 3]` error: aborting due to 2 previous errors