Skip to content

Commit

Permalink
Auto merge of rust-lang#77247 - jonas-schievink:rollup-r6ehh8h, r=jon…
Browse files Browse the repository at this point in the history
…as-schievink

Rollup of 10 pull requests

Successful merges:

 - rust-lang#76917 (Add missing code examples on HashMap types)
 - rust-lang#77107 (Enable const propagation into operands at mir_opt_level=2)
 - rust-lang#77129 (Update cargo)
 - rust-lang#77167 (Fix FIXME in core::num test: Check sign of zero in min/max tests.)
 - rust-lang#77184 (Rust vec bench import specific rand::RngCore)
 - rust-lang#77208 (Late link args order)
 - rust-lang#77209 (Fix documentation highlighting in ty::BorrowKind)
 - rust-lang#77231 (Move helper function for `missing_const_for_fn` out of rustc to clippy)
 - rust-lang#77235 (pretty-print-reparse hack: Rename some variables for clarity)
 - rust-lang#77243 (Test more attributes in test issue-75930-derive-cfg.rs)

Failed merges:

r? `@ghost`
  • Loading branch information
bors committed Sep 27, 2020
2 parents 623fb90 + b7c05a3 commit 1ec980d
Show file tree
Hide file tree
Showing 16 changed files with 1,852 additions and 157 deletions.
6 changes: 3 additions & 3 deletions compiler/rustc_codegen_ssa/src/back/link.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1333,9 +1333,6 @@ fn add_late_link_args(
crate_type: CrateType,
codegen_results: &CodegenResults,
) {
if let Some(args) = sess.target.target.options.late_link_args.get(&flavor) {
cmd.args(args);
}
let any_dynamic_crate = crate_type == CrateType::Dylib
|| codegen_results.crate_info.dependency_formats.iter().any(|(ty, list)| {
*ty == crate_type && list.iter().any(|&linkage| linkage == Linkage::Dynamic)
Expand All @@ -1349,6 +1346,9 @@ fn add_late_link_args(
cmd.args(args);
}
}
if let Some(args) = sess.target.target.options.late_link_args.get(&flavor) {
cmd.args(args);
}
}

/// Add arbitrary "post-link" args defined by the target spec.
Expand Down
26 changes: 16 additions & 10 deletions compiler/rustc_middle/src/ty/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -682,25 +682,31 @@ pub enum BorrowKind {
/// implicit closure bindings. It is needed when the closure
/// is borrowing or mutating a mutable referent, e.g.:
///
/// let x: &mut isize = ...;
/// let y = || *x += 5;
/// ```
/// let x: &mut isize = ...;
/// let y = || *x += 5;
/// ```
///
/// If we were to try to translate this closure into a more explicit
/// form, we'd encounter an error with the code as written:
///
/// struct Env { x: & &mut isize }
/// let x: &mut isize = ...;
/// let y = (&mut Env { &x }, fn_ptr); // Closure is pair of env and fn
/// fn fn_ptr(env: &mut Env) { **env.x += 5; }
/// ```
/// struct Env { x: & &mut isize }
/// let x: &mut isize = ...;
/// let y = (&mut Env { &x }, fn_ptr); // Closure is pair of env and fn
/// fn fn_ptr(env: &mut Env) { **env.x += 5; }
/// ```
///
/// This is then illegal because you cannot mutate a `&mut` found
/// in an aliasable location. To solve, you'd have to translate with
/// an `&mut` borrow:
///
/// struct Env { x: & &mut isize }
/// let x: &mut isize = ...;
/// let y = (&mut Env { &mut x }, fn_ptr); // changed from &x to &mut x
/// fn fn_ptr(env: &mut Env) { **env.x += 5; }
/// ```
/// struct Env { x: & &mut isize }
/// let x: &mut isize = ...;
/// let y = (&mut Env { &mut x }, fn_ptr); // changed from &x to &mut x
/// fn fn_ptr(env: &mut Env) { **env.x += 5; }
/// ```
///
/// Now the assignment to `**env.x` is legal, but creating a
/// mutable pointer to `x` is not because `x` is not mutable. We
Expand Down
10 changes: 5 additions & 5 deletions compiler/rustc_mir/src/transform/const_prop.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1046,9 +1046,9 @@ impl<'mir, 'tcx> MutVisitor<'tcx> for ConstPropagator<'mir, 'tcx> {
fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
self.super_operand(operand, location);

// Only const prop copies and moves on `mir_opt_level=3` as doing so
// currently increases compile time.
if self.tcx.sess.opts.debugging_opts.mir_opt_level >= 3 {
// Only const prop copies and moves on `mir_opt_level=2` as doing so
// currently slightly increases compile time in some cases.
if self.tcx.sess.opts.debugging_opts.mir_opt_level >= 2 {
self.propagate_operand(operand)
}
}
Expand Down Expand Up @@ -1246,8 +1246,8 @@ impl<'mir, 'tcx> MutVisitor<'tcx> for ConstPropagator<'mir, 'tcx> {
| TerminatorKind::InlineAsm { .. } => {}
// Every argument in our function calls have already been propagated in `visit_operand`.
//
// NOTE: because LLVM codegen gives performance regressions with it, so this is gated
// on `mir_opt_level=3`.
// NOTE: because LLVM codegen gives slight performance regressions with it, so this is
// gated on `mir_opt_level=2`.
TerminatorKind::Call { .. } => {}
}

Expand Down
1 change: 0 additions & 1 deletion compiler/rustc_mir/src/transform/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ pub mod match_branches;
pub mod no_landing_pads;
pub mod nrvo;
pub mod promote_consts;
pub mod qualify_min_const_fn;
pub mod remove_noop_landing_pads;
pub mod remove_unneeded_drops;
pub mod required_consts;
Expand Down
42 changes: 22 additions & 20 deletions compiler/rustc_parse/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
#![feature(or_patterns)]

use rustc_ast as ast;
use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind};
use rustc_ast::token::{self, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{self, Spacing, TokenStream, TokenTree};
use rustc_ast_pretty::pprust;
use rustc_data_structures::sync::Lrc;
Expand Down Expand Up @@ -299,7 +299,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
// FIXME(#43081): Avoid this pretty-print + reparse hack
let source = pprust::nonterminal_to_string(nt);
let filename = FileName::macro_expansion_source_code(&source);
let tokens_for_real = parse_stream_from_source_str(filename, source, sess, Some(span));
let reparsed_tokens = parse_stream_from_source_str(filename, source, sess, Some(span));

// During early phases of the compiler the AST could get modified
// directly (e.g., attributes added or removed) and the internal cache
Expand All @@ -325,17 +325,17 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
// modifications, including adding/removing typically non-semantic
// tokens such as extra braces and commas, don't happen.
if let Some(tokens) = tokens {
if tokenstream_probably_equal_for_proc_macro(&tokens, &tokens_for_real, sess) {
if tokenstream_probably_equal_for_proc_macro(&tokens, &reparsed_tokens, sess) {
return tokens;
}
info!(
"cached tokens found, but they're not \"probably equal\", \
going with stringified version"
);
info!("cached tokens: {:?}", tokens);
info!("reparsed tokens: {:?}", tokens_for_real);
info!("reparsed tokens: {:?}", reparsed_tokens);
}
tokens_for_real
reparsed_tokens
}

// See comments in `Nonterminal::to_tokenstream` for why we care about
Expand All @@ -344,8 +344,8 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
// This is otherwise the same as `eq_unspanned`, only recursing with a
// different method.
pub fn tokenstream_probably_equal_for_proc_macro(
first: &TokenStream,
other: &TokenStream,
tokens: &TokenStream,
reparsed_tokens: &TokenStream,
sess: &ParseSess,
) -> bool {
// When checking for `probably_eq`, we ignore certain tokens that aren't
Expand All @@ -359,9 +359,6 @@ pub fn tokenstream_probably_equal_for_proc_macro(
// The pretty printer tends to add trailing commas to
// everything, and in particular, after struct fields.
| token::Comma
// The pretty printer emits `NoDelim` as whitespace.
| token::OpenDelim(DelimToken::NoDelim)
| token::CloseDelim(DelimToken::NoDelim)
// The pretty printer collapses many semicolons into one.
| token::Semi
// We don't preserve leading `|` tokens in patterns, so
Expand Down Expand Up @@ -460,10 +457,11 @@ pub fn tokenstream_probably_equal_for_proc_macro(

// Break tokens after we expand any nonterminals, so that we break tokens
// that are produced as a result of nonterminal expansion.
let t1 = first.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens);
let t2 = other.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens);
let tokens = tokens.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens);
let reparsed_tokens =
reparsed_tokens.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens);

t1.eq_by(t2, |t1, t2| tokentree_probably_equal_for_proc_macro(&t1, &t2, sess))
tokens.eq_by(reparsed_tokens, |t, rt| tokentree_probably_equal_for_proc_macro(&t, &rt, sess))
}

// See comments in `Nonterminal::to_tokenstream` for why we care about
Expand All @@ -472,16 +470,20 @@ pub fn tokenstream_probably_equal_for_proc_macro(
// This is otherwise the same as `eq_unspanned`, only recursing with a
// different method.
pub fn tokentree_probably_equal_for_proc_macro(
first: &TokenTree,
other: &TokenTree,
token: &TokenTree,
reparsed_token: &TokenTree,
sess: &ParseSess,
) -> bool {
match (first, other) {
(TokenTree::Token(token), TokenTree::Token(token2)) => {
token_probably_equal_for_proc_macro(token, token2)
match (token, reparsed_token) {
(TokenTree::Token(token), TokenTree::Token(reparsed_token)) => {
token_probably_equal_for_proc_macro(token, reparsed_token)
}
(TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
delim == delim2 && tokenstream_probably_equal_for_proc_macro(&tts, &tts2, sess)
(
TokenTree::Delimited(_, delim, tokens),
TokenTree::Delimited(_, reparsed_delim, reparsed_tokens),
) => {
delim == reparsed_delim
&& tokenstream_probably_equal_for_proc_macro(tokens, reparsed_tokens, sess)
}
_ => false,
}
Expand Down
7 changes: 1 addition & 6 deletions compiler/rustc_target/src/spec/windows_gnu_base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ pub fn opts() -> TargetOptions {
"-lmsvcrt".to_string(),
"-lmingwex".to_string(),
"-lmingw32".to_string(),
"-lgcc".to_string(), // alas, mingw* libraries above depend on libgcc
// mingw's msvcrt is a weird hybrid import library and static library.
// And it seems that the linker fails to use import symbols from msvcrt
// that are required from functions in msvcrt in certain cases. For example
Expand All @@ -41,8 +42,6 @@ pub fn opts() -> TargetOptions {
// the shared libgcc_s-dw2-1.dll. This is required to support
// unwinding across DLL boundaries.
"-lgcc_s".to_string(),
"-lgcc".to_string(),
"-lkernel32".to_string(),
];
late_link_args_dynamic.insert(LinkerFlavor::Gcc, dynamic_unwind_libs.clone());
late_link_args_dynamic.insert(LinkerFlavor::Lld(LldFlavor::Ld), dynamic_unwind_libs);
Expand All @@ -54,10 +53,6 @@ pub fn opts() -> TargetOptions {
// boundaries when unwinding across FFI boundaries.
"-lgcc_eh".to_string(),
"-l:libpthread.a".to_string(),
"-lgcc".to_string(),
// libpthread depends on libmsvcrt, so we need to link it *again*.
"-lmsvcrt".to_string(),
"-lkernel32".to_string(),
];
late_link_args_static.insert(LinkerFlavor::Gcc, static_unwind_libs.clone());
late_link_args_static.insert(LinkerFlavor::Lld(LldFlavor::Ld), static_unwind_libs);
Expand Down
2 changes: 1 addition & 1 deletion library/alloc/benches/vec.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use rand::prelude::*;
use rand::RngCore;
use std::iter::{repeat, FromIterator};
use test::{black_box, Bencher};

Expand Down
14 changes: 13 additions & 1 deletion library/core/tests/num/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -634,14 +634,18 @@ assume_usize_width! {
macro_rules! test_float {
($modname: ident, $fty: ty, $inf: expr, $neginf: expr, $nan: expr) => {
mod $modname {
// FIXME(nagisa): these tests should test for sign of -0.0
#[test]
fn min() {
assert_eq!((0.0 as $fty).min(0.0), 0.0);
assert!((0.0 as $fty).min(0.0).is_sign_positive());
assert_eq!((-0.0 as $fty).min(-0.0), -0.0);
assert!((-0.0 as $fty).min(-0.0).is_sign_negative());
assert_eq!((9.0 as $fty).min(9.0), 9.0);
assert_eq!((-9.0 as $fty).min(0.0), -9.0);
assert_eq!((0.0 as $fty).min(9.0), 0.0);
assert!((0.0 as $fty).min(9.0).is_sign_positive());
assert_eq!((-0.0 as $fty).min(9.0), -0.0);
assert!((-0.0 as $fty).min(9.0).is_sign_negative());
assert_eq!((-0.0 as $fty).min(-9.0), -9.0);
assert_eq!(($inf as $fty).min(9.0), 9.0);
assert_eq!((9.0 as $fty).min($inf), 9.0);
Expand All @@ -660,11 +664,19 @@ macro_rules! test_float {
#[test]
fn max() {
assert_eq!((0.0 as $fty).max(0.0), 0.0);
assert!((0.0 as $fty).max(0.0).is_sign_positive());
assert_eq!((-0.0 as $fty).max(-0.0), -0.0);
assert!((-0.0 as $fty).max(-0.0).is_sign_negative());
assert_eq!((9.0 as $fty).max(9.0), 9.0);
assert_eq!((-9.0 as $fty).max(0.0), 0.0);
assert!((-9.0 as $fty).max(0.0).is_sign_positive());
assert_eq!((-9.0 as $fty).max(-0.0), -0.0);
assert!((-9.0 as $fty).max(-0.0).is_sign_negative());
assert_eq!((0.0 as $fty).max(9.0), 9.0);
assert_eq!((0.0 as $fty).max(-9.0), 0.0);
assert!((0.0 as $fty).max(-9.0).is_sign_positive());
assert_eq!((-0.0 as $fty).max(-9.0), -0.0);
assert!((-0.0 as $fty).max(-9.0).is_sign_negative());
assert_eq!(($inf as $fty).max(9.0), $inf);
assert_eq!((9.0 as $fty).max($inf), $inf);
assert_eq!(($inf as $fty).max(-9.0), $inf);
Expand Down
Loading

0 comments on commit 1ec980d

Please sign in to comment.