From 4449e9bed548dc3aa4985ab93a8837bd07063633 Mon Sep 17 00:00:00 2001 From: Nika Layzell Date: Sun, 18 Jul 2021 15:53:06 -0400 Subject: [PATCH 1/6] proc_macro: Add an expand_expr method to TokenStream This feature is aimed at giving proc macros access to powers similar to those used by builtin macros such as `format_args!` or `concat!`. These macros are able to accept macros in place of string literal parameters, such as the format string, as they perform recursive macro expansion while being expanded. This can be especially useful in many cases thanks to helper macros like `concat!`, `stringify!` and `include_str!` which are often used to construct string literals at compile-time in user code. For now, this method only allows expanding macros which produce literals, although more expresisons will be supported before the method is stabilized. --- compiler/rustc_expand/src/proc_macro.rs | 28 +-- .../rustc_expand/src/proc_macro_server.rs | 162 ++++++++++++------ library/proc_macro/src/bridge/mod.rs | 1 + library/proc_macro/src/lib.rs | 46 +++++ .../ui/proc-macro/auxiliary/expand-expr.rs | 52 ++++++ .../ui/proc-macro/auxiliary/included-file.txt | 1 + src/test/ui/proc-macro/expand-expr.rs | 122 +++++++++++++ src/test/ui/proc-macro/expand-expr.stderr | 129 ++++++++++++++ 8 files changed, 473 insertions(+), 68 deletions(-) create mode 100644 src/test/ui/proc-macro/auxiliary/expand-expr.rs create mode 100644 src/test/ui/proc-macro/auxiliary/included-file.txt create mode 100644 src/test/ui/proc-macro/expand-expr.rs create mode 100644 src/test/ui/proc-macro/expand-expr.stderr diff --git a/compiler/rustc_expand/src/proc_macro.rs b/compiler/rustc_expand/src/proc_macro.rs index 3f84979ac05e7..42c17a60a5d4e 100644 --- a/compiler/rustc_expand/src/proc_macro.rs +++ b/compiler/rustc_expand/src/proc_macro.rs @@ -24,8 +24,9 @@ impl base::ProcMacro for BangProcMacro { span: Span, input: TokenStream, ) -> Result { + let proc_macro_backtrace = ecx.ecfg.proc_macro_backtrace; let server = proc_macro_server::Rustc::new(ecx); - self.client.run(&EXEC_STRATEGY, server, input, ecx.ecfg.proc_macro_backtrace).map_err(|e| { + self.client.run(&EXEC_STRATEGY, server, input, proc_macro_backtrace).map_err(|e| { let mut err = ecx.struct_span_err(span, "proc macro panicked"); if let Some(s) = e.as_str() { err.help(&format!("message: {}", s)); @@ -48,9 +49,10 @@ impl base::AttrProcMacro for AttrProcMacro { annotation: TokenStream, annotated: TokenStream, ) -> Result { + let proc_macro_backtrace = ecx.ecfg.proc_macro_backtrace; let server = proc_macro_server::Rustc::new(ecx); self.client - .run(&EXEC_STRATEGY, server, annotation, annotated, ecx.ecfg.proc_macro_backtrace) + .run(&EXEC_STRATEGY, server, annotation, annotated, proc_macro_backtrace) .map_err(|e| { let mut err = ecx.struct_span_err(span, "custom attribute panicked"); if let Some(s) = e.as_str() { @@ -97,19 +99,19 @@ impl MultiItemModifier for ProcMacroDerive { nt_to_tokenstream(&item, &ecx.sess.parse_sess, CanSynthesizeMissingTokens::No) }; + let proc_macro_backtrace = ecx.ecfg.proc_macro_backtrace; let server = proc_macro_server::Rustc::new(ecx); - let stream = - match self.client.run(&EXEC_STRATEGY, server, input, ecx.ecfg.proc_macro_backtrace) { - Ok(stream) => stream, - Err(e) => { - let mut err = ecx.struct_span_err(span, "proc-macro derive panicked"); - if let Some(s) = e.as_str() { - err.help(&format!("message: {}", s)); - } - err.emit(); - return ExpandResult::Ready(vec![]); + let stream = match self.client.run(&EXEC_STRATEGY, server, input, proc_macro_backtrace) { + Ok(stream) => stream, + Err(e) => { + let mut err = ecx.struct_span_err(span, "proc-macro derive panicked"); + if let Some(s) = e.as_str() { + err.help(&format!("message: {}", s)); } - }; + err.emit(); + return ExpandResult::Ready(vec![]); + } + }; let error_count_before = ecx.sess.parse_sess.span_diagnostic.err_count(); let mut parser = diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index 55700f7b0d495..54fa73567b19f 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -1,4 +1,4 @@ -use crate::base::{ExtCtxt, ResolverExpand}; +use crate::base::ExtCtxt; use rustc_ast as ast; use rustc_ast::token::{self, Nonterminal, NtIdent}; @@ -7,7 +7,7 @@ use rustc_ast::tokenstream::{DelimSpan, Spacing::*, TokenStream, TreeAndSpacing} use rustc_ast_pretty::pprust; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync::Lrc; -use rustc_errors::Diagnostic; +use rustc_errors::{Diagnostic, PResult}; use rustc_lint_defs::builtin::PROC_MACRO_BACK_COMPAT; use rustc_lint_defs::BuiltinLintDiagnostics; use rustc_parse::lexer::nfc_normalize; @@ -53,11 +53,11 @@ impl ToInternal for Delimiter { } } -impl FromInternal<(TreeAndSpacing, &'_ mut Vec, &mut Rustc<'_>)> +impl FromInternal<(TreeAndSpacing, &'_ mut Vec, &mut Rustc<'_, '_>)> for TokenTree { fn from_internal( - ((tree, spacing), stack, rustc): (TreeAndSpacing, &mut Vec, &mut Rustc<'_>), + ((tree, spacing), stack, rustc): (TreeAndSpacing, &mut Vec, &mut Rustc<'_, '_>), ) -> Self { use rustc_ast::token::*; @@ -146,10 +146,10 @@ impl FromInternal<(TreeAndSpacing, &'_ mut Vec, &mut Rustc<'_>)> SingleQuote => op!('\''), Ident(name, false) if name == kw::DollarCrate => tt!(Ident::dollar_crate()), - Ident(name, is_raw) => tt!(Ident::new(rustc.sess, name, is_raw)), + Ident(name, is_raw) => tt!(Ident::new(rustc.sess(), name, is_raw)), Lifetime(name) => { let ident = symbol::Ident::new(name, span).without_first_quote(); - stack.push(tt!(Ident::new(rustc.sess, ident.name, false))); + stack.push(tt!(Ident::new(rustc.sess(), ident.name, false))); tt!(Punct::new('\'', true)) } Literal(lit) => tt!(Literal { lit }), @@ -181,15 +181,15 @@ impl FromInternal<(TreeAndSpacing, &'_ mut Vec, &mut Rustc<'_>)> Interpolated(nt) if let Some((name, is_raw)) = ident_name_compatibility_hack(&nt, span, rustc) => { - TokenTree::Ident(Ident::new(rustc.sess, name.name, is_raw, name.span)) + TokenTree::Ident(Ident::new(rustc.sess(), name.name, is_raw, name.span)) } Interpolated(nt) => { - let stream = nt_to_tokenstream(&nt, rustc.sess, CanSynthesizeMissingTokens::No); + let stream = nt_to_tokenstream(&nt, rustc.sess(), CanSynthesizeMissingTokens::No); TokenTree::Group(Group { delimiter: Delimiter::None, stream, span: DelimSpan::from_single(span), - flatten: crate::base::pretty_printing_compatibility_hack(&nt, rustc.sess), + flatten: crate::base::pretty_printing_compatibility_hack(&nt, rustc.sess()), }) } @@ -355,38 +355,38 @@ pub struct Literal { span: Span, } -pub(crate) struct Rustc<'a> { - resolver: &'a dyn ResolverExpand, - sess: &'a ParseSess, +pub(crate) struct Rustc<'a, 'b> { + ecx: &'a mut ExtCtxt<'b>, def_site: Span, call_site: Span, mixed_site: Span, - span_debug: bool, krate: CrateNum, rebased_spans: FxHashMap, } -impl<'a> Rustc<'a> { - pub fn new(cx: &'a ExtCtxt<'_>) -> Self { - let expn_data = cx.current_expansion.id.expn_data(); +impl<'a, 'b> Rustc<'a, 'b> { + pub fn new(ecx: &'a mut ExtCtxt<'b>) -> Self { + let expn_data = ecx.current_expansion.id.expn_data(); Rustc { - resolver: cx.resolver, - sess: cx.parse_sess(), - def_site: cx.with_def_site_ctxt(expn_data.def_site), - call_site: cx.with_call_site_ctxt(expn_data.call_site), - mixed_site: cx.with_mixed_site_ctxt(expn_data.call_site), - span_debug: cx.ecfg.span_debug, + def_site: ecx.with_def_site_ctxt(expn_data.def_site), + call_site: ecx.with_call_site_ctxt(expn_data.call_site), + mixed_site: ecx.with_mixed_site_ctxt(expn_data.call_site), krate: expn_data.macro_def_id.unwrap().krate, rebased_spans: FxHashMap::default(), + ecx, } } + fn sess(&self) -> &ParseSess { + self.ecx.parse_sess() + } + fn lit(&mut self, kind: token::LitKind, symbol: Symbol, suffix: Option) -> Literal { Literal { lit: token::Lit::new(kind, symbol, suffix), span: server::Span::call_site(self) } } } -impl server::Types for Rustc<'_> { +impl server::Types for Rustc<'_, '_> { type FreeFunctions = FreeFunctions; type TokenStream = TokenStream; type TokenStreamBuilder = tokenstream::TokenStreamBuilder; @@ -401,17 +401,20 @@ impl server::Types for Rustc<'_> { type Span = Span; } -impl server::FreeFunctions for Rustc<'_> { +impl server::FreeFunctions for Rustc<'_, '_> { fn track_env_var(&mut self, var: &str, value: Option<&str>) { - self.sess.env_depinfo.borrow_mut().insert((Symbol::intern(var), value.map(Symbol::intern))); + self.sess() + .env_depinfo + .borrow_mut() + .insert((Symbol::intern(var), value.map(Symbol::intern))); } fn track_path(&mut self, path: &str) { - self.sess.file_depinfo.borrow_mut().insert(Symbol::intern(path)); + self.sess().file_depinfo.borrow_mut().insert(Symbol::intern(path)); } } -impl server::TokenStream for Rustc<'_> { +impl server::TokenStream for Rustc<'_, '_> { fn new(&mut self) -> Self::TokenStream { TokenStream::default() } @@ -422,13 +425,62 @@ impl server::TokenStream for Rustc<'_> { parse_stream_from_source_str( FileName::proc_macro_source_code(src), src.to_string(), - self.sess, + self.sess(), Some(self.call_site), ) } fn to_string(&mut self, stream: &Self::TokenStream) -> String { pprust::tts_to_string(stream) } + fn expand_expr(&mut self, stream: &Self::TokenStream) -> Result { + // Parse the expression from our tokenstream. + let expr: PResult<'_, _> = try { + let mut p = rustc_parse::stream_to_parser( + self.sess(), + stream.clone(), + Some("proc_macro expand expr"), + ); + let expr = p.parse_expr()?; + if p.token != token::Eof { + p.unexpected()?; + } + expr + }; + let expr = expr.map_err(|mut err| err.emit())?; + + // Perform eager expansion on the expression. + let expr = self + .ecx + .expander() + .fully_expand_fragment(crate::expand::AstFragment::Expr(expr)) + .make_expr(); + + // NOTE: For now, limit `expand_expr` to exclusively expand to literals. + // This may be relaxed in the future. + // We don't use `nt_to_tokenstream` as the tokenstream currently cannot + // be recovered in the general case. + match &expr.kind { + ast::ExprKind::Lit(l) => { + Ok(tokenstream::TokenTree::token(token::Literal(l.token), l.span).into()) + } + ast::ExprKind::Unary(ast::UnOp::Neg, e) => match &e.kind { + ast::ExprKind::Lit(l) => match l.token { + token::Lit { kind: token::Integer | token::Float, .. } => { + Ok(std::array::IntoIter::new([ + // FIXME: The span of the `-` token is lost when + // parsing, so we cannot faithfully recover it here. + tokenstream::TokenTree::token(token::BinOp(token::Minus), e.span), + tokenstream::TokenTree::token(token::Literal(l.token), l.span), + ]) + .collect()) + } + _ => Err(()), + }, + _ => Err(()), + }, + _ => Err(()), + } + } fn from_token_tree( &mut self, tree: TokenTree, @@ -440,7 +492,7 @@ impl server::TokenStream for Rustc<'_> { } } -impl server::TokenStreamBuilder for Rustc<'_> { +impl server::TokenStreamBuilder for Rustc<'_, '_> { fn new(&mut self) -> Self::TokenStreamBuilder { tokenstream::TokenStreamBuilder::new() } @@ -452,7 +504,7 @@ impl server::TokenStreamBuilder for Rustc<'_> { } } -impl server::TokenStreamIter for Rustc<'_> { +impl server::TokenStreamIter for Rustc<'_, '_> { fn next( &mut self, iter: &mut Self::TokenStreamIter, @@ -477,7 +529,7 @@ impl server::TokenStreamIter for Rustc<'_> { } } -impl server::Group for Rustc<'_> { +impl server::Group for Rustc<'_, '_> { fn new(&mut self, delimiter: Delimiter, stream: Self::TokenStream) -> Self::Group { Group { delimiter, @@ -506,7 +558,7 @@ impl server::Group for Rustc<'_> { } } -impl server::Punct for Rustc<'_> { +impl server::Punct for Rustc<'_, '_> { fn new(&mut self, ch: char, spacing: Spacing) -> Self::Punct { Punct::new(ch, spacing == Spacing::Joint, server::Span::call_site(self)) } @@ -524,9 +576,9 @@ impl server::Punct for Rustc<'_> { } } -impl server::Ident for Rustc<'_> { +impl server::Ident for Rustc<'_, '_> { fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident { - Ident::new(self.sess, Symbol::intern(string), is_raw, span) + Ident::new(self.sess(), Symbol::intern(string), is_raw, span) } fn span(&mut self, ident: Self::Ident) -> Self::Span { ident.span @@ -536,10 +588,10 @@ impl server::Ident for Rustc<'_> { } } -impl server::Literal for Rustc<'_> { +impl server::Literal for Rustc<'_, '_> { fn from_str(&mut self, s: &str) -> Result { let name = FileName::proc_macro_source_code(s); - let mut parser = rustc_parse::new_parser_from_source_str(self.sess, name, s.to_owned()); + let mut parser = rustc_parse::new_parser_from_source_str(self.sess(), name, s.to_owned()); let first_span = parser.token.span.data(); let minus_present = parser.eat(&token::BinOp(token::Minus)); @@ -675,7 +727,7 @@ impl server::Literal for Rustc<'_> { } } -impl server::SourceFile for Rustc<'_> { +impl server::SourceFile for Rustc<'_, '_> { fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool { Lrc::ptr_eq(file1, file2) } @@ -695,7 +747,7 @@ impl server::SourceFile for Rustc<'_> { } } -impl server::MultiSpan for Rustc<'_> { +impl server::MultiSpan for Rustc<'_, '_> { fn new(&mut self) -> Self::MultiSpan { vec![] } @@ -704,7 +756,7 @@ impl server::MultiSpan for Rustc<'_> { } } -impl server::Diagnostic for Rustc<'_> { +impl server::Diagnostic for Rustc<'_, '_> { fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic { let mut diag = Diagnostic::new(level.to_internal(), msg); diag.set_span(MultiSpan::from_spans(spans)); @@ -720,13 +772,13 @@ impl server::Diagnostic for Rustc<'_> { diag.sub(level.to_internal(), msg, MultiSpan::from_spans(spans), None); } fn emit(&mut self, diag: Self::Diagnostic) { - self.sess.span_diagnostic.emit_diagnostic(&diag); + self.sess().span_diagnostic.emit_diagnostic(&diag); } } -impl server::Span for Rustc<'_> { +impl server::Span for Rustc<'_, '_> { fn debug(&mut self, span: Self::Span) -> String { - if self.span_debug { + if self.ecx.ecfg.span_debug { format!("{:?}", span) } else { format!("{:?} bytes({}..{})", span.ctxt(), span.lo().0, span.hi().0) @@ -742,7 +794,7 @@ impl server::Span for Rustc<'_> { self.mixed_site } fn source_file(&mut self, span: Self::Span) -> Self::SourceFile { - self.sess.source_map().lookup_char_pos(span.lo()).file + self.sess().source_map().lookup_char_pos(span.lo()).file } fn parent(&mut self, span: Self::Span) -> Option { span.parent() @@ -751,16 +803,16 @@ impl server::Span for Rustc<'_> { span.source_callsite() } fn start(&mut self, span: Self::Span) -> LineColumn { - let loc = self.sess.source_map().lookup_char_pos(span.lo()); + let loc = self.sess().source_map().lookup_char_pos(span.lo()); LineColumn { line: loc.line, column: loc.col.to_usize() } } fn end(&mut self, span: Self::Span) -> LineColumn { - let loc = self.sess.source_map().lookup_char_pos(span.hi()); + let loc = self.sess().source_map().lookup_char_pos(span.hi()); LineColumn { line: loc.line, column: loc.col.to_usize() } } fn join(&mut self, first: Self::Span, second: Self::Span) -> Option { - let self_loc = self.sess.source_map().lookup_char_pos(first.lo()); - let other_loc = self.sess.source_map().lookup_char_pos(second.lo()); + let self_loc = self.sess().source_map().lookup_char_pos(first.lo()); + let other_loc = self.sess().source_map().lookup_char_pos(second.lo()); if self_loc.file.name != other_loc.file.name { return None; @@ -772,7 +824,7 @@ impl server::Span for Rustc<'_> { span.with_ctxt(at.ctxt()) } fn source_text(&mut self, span: Self::Span) -> Option { - self.sess.source_map().span_to_snippet(span).ok() + self.sess().source_map().span_to_snippet(span).ok() } /// Saves the provided span into the metadata of /// *the crate we are currently compiling*, which must @@ -799,10 +851,10 @@ impl server::Span for Rustc<'_> { /// since we've loaded `my_proc_macro` from disk in order to execute it). /// In this way, we have obtained a span pointing into `my_proc_macro` fn save_span(&mut self, span: Self::Span) -> usize { - self.sess.save_proc_macro_span(span) + self.sess().save_proc_macro_span(span) } fn recover_proc_macro_span(&mut self, id: usize) -> Self::Span { - let (resolver, krate, def_site) = (self.resolver, self.krate, self.def_site); + let (resolver, krate, def_site) = (&*self.ecx.resolver, self.krate, self.def_site); *self.rebased_spans.entry(id).or_insert_with(|| { // FIXME: `SyntaxContext` for spans from proc macro crates is lost during encoding, // replace it with a def-site context until we are encoding it properly. @@ -815,11 +867,11 @@ impl server::Span for Rustc<'_> { fn ident_name_compatibility_hack( nt: &Nonterminal, orig_span: Span, - rustc: &mut Rustc<'_>, + rustc: &mut Rustc<'_, '_>, ) -> Option<(rustc_span::symbol::Ident, bool)> { if let NtIdent(ident, is_raw) = nt { if let ExpnKind::Macro(_, macro_name) = orig_span.ctxt().outer_expn_data().kind { - let source_map = rustc.sess.source_map(); + let source_map = rustc.sess().source_map(); let filename = source_map.span_to_filename(orig_span); if let FileName::Real(RealFileName::LocalPath(path)) = filename { let matches_prefix = |prefix, filename| { @@ -840,7 +892,7 @@ fn ident_name_compatibility_hack( let snippet = source_map.span_to_snippet(orig_span); if snippet.as_deref() == Ok("$name") { if time_macros_impl { - rustc.sess.buffer_lint_with_diagnostic( + rustc.sess().buffer_lint_with_diagnostic( &PROC_MACRO_BACK_COMPAT, orig_span, ast::CRATE_NODE_ID, @@ -865,7 +917,7 @@ fn ident_name_compatibility_hack( .and_then(|c| c.parse::().ok()) .map_or(false, |v| v < 40) { - rustc.sess.buffer_lint_with_diagnostic( + rustc.sess().buffer_lint_with_diagnostic( &PROC_MACRO_BACK_COMPAT, orig_span, ast::CRATE_NODE_ID, @@ -888,7 +940,7 @@ fn ident_name_compatibility_hack( source_map.span_to_filename(rustc.def_site) { if macro_path.to_string_lossy().contains("pin-project-internal-0.") { - rustc.sess.buffer_lint_with_diagnostic( + rustc.sess().buffer_lint_with_diagnostic( &PROC_MACRO_BACK_COMPAT, orig_span, ast::CRATE_NODE_ID, diff --git a/library/proc_macro/src/bridge/mod.rs b/library/proc_macro/src/bridge/mod.rs index 7001e827ad845..4de2b2c8bcf60 100644 --- a/library/proc_macro/src/bridge/mod.rs +++ b/library/proc_macro/src/bridge/mod.rs @@ -62,6 +62,7 @@ macro_rules! with_api { fn clone($self: &$S::TokenStream) -> $S::TokenStream; fn new() -> $S::TokenStream; fn is_empty($self: &$S::TokenStream) -> bool; + fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>; fn from_str(src: &str) -> $S::TokenStream; fn to_string($self: &$S::TokenStream) -> String; fn from_token_tree( diff --git a/library/proc_macro/src/lib.rs b/library/proc_macro/src/lib.rs index 243922b18b565..a41b98a474d64 100644 --- a/library/proc_macro/src/lib.rs +++ b/library/proc_macro/src/lib.rs @@ -109,6 +109,34 @@ impl !Send for LexError {} #[stable(feature = "proc_macro_lib", since = "1.15.0")] impl !Sync for LexError {} +/// Error returned from `TokenStream::expand_expr`. +#[unstable(feature = "proc_macro_expand", issue = "none")] +#[non_exhaustive] +#[derive(Debug)] +pub struct ExpandError; + +impl ExpandError { + fn new() -> Self { + ExpandError + } +} + +#[unstable(feature = "proc_macro_expand", issue = "none")] +impl fmt::Display for ExpandError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("macro expansion failed") + } +} + +#[unstable(feature = "proc_macro_expand", issue = "none")] +impl error::Error for ExpandError {} + +#[unstable(feature = "proc_macro_expand", issue = "none")] +impl !Send for ExpandError {} + +#[unstable(feature = "proc_macro_expand", issue = "none")] +impl !Sync for ExpandError {} + impl TokenStream { /// Returns an empty `TokenStream` containing no token trees. #[stable(feature = "proc_macro_lib2", since = "1.29.0")] @@ -121,6 +149,24 @@ impl TokenStream { pub fn is_empty(&self) -> bool { self.0.is_empty() } + + /// Parses this `TokenStream` as an expression and attempts to expand any + /// macros within it. Returns the expanded `TokenStream`. + /// + /// Currently only expressions expanding to literals will succeed, although + /// this may be relaxed in the future. + /// + /// NOTE: In error conditions, `expand_expr` may leave macros unexpanded, + /// report an error, failing compilation, and/or return an `Err(..)`. The + /// specific behavior for any error condition, and what conditions are + /// considered errors, is unspecified and may change in the future. + #[unstable(feature = "proc_macro_expand", issue = "none")] + pub fn expand_expr(&self) -> Result { + match bridge::client::TokenStream::expand_expr(&self.0) { + Ok(stream) => Ok(TokenStream(stream)), + Err(_) => Err(ExpandError::new()), + } + } } /// Attempts to break the string into tokens and parse those tokens into a token stream. diff --git a/src/test/ui/proc-macro/auxiliary/expand-expr.rs b/src/test/ui/proc-macro/auxiliary/expand-expr.rs new file mode 100644 index 0000000000000..3de703e8c263b --- /dev/null +++ b/src/test/ui/proc-macro/auxiliary/expand-expr.rs @@ -0,0 +1,52 @@ +// force-host +// no-prefer-dynamic + +#![crate_type = "proc-macro"] +#![deny(warnings)] +#![feature(proc_macro_expand, proc_macro_span)] + +extern crate proc_macro; + +use proc_macro::*; +use std::str::FromStr; + +#[proc_macro] +pub fn expand_expr_is(input: TokenStream) -> TokenStream { + let mut iter = input.into_iter(); + let mut expected_tts = Vec::new(); + loop { + match iter.next() { + Some(TokenTree::Punct(ref p)) if p.as_char() == ',' => break, + Some(tt) => expected_tts.push(tt), + None => panic!("expected comma"), + } + } + + let expected = expected_tts.into_iter().collect::(); + let expanded = iter.collect::().expand_expr().expect("expand_expr failed"); + assert!( + expected.to_string() == expanded.to_string(), + "assert failed\nexpected: `{:?}`\nexpanded: `{:?}`", + expected.to_string(), + expanded.to_string() + ); + + TokenStream::new() +} + +#[proc_macro] +pub fn recursive_expand(_: TokenStream) -> TokenStream { + // Recursively call until we hit the recursion limit and get an error. + // + // NOTE: This doesn't panic if expansion fails because that'll cause a very + // large number of errors to fill the output. + TokenStream::from_str("recursive_expand!{}") + .unwrap() + .expand_expr() + .unwrap_or(std::iter::once(TokenTree::Literal(Literal::u32_suffixed(0))).collect()) +} + +#[proc_macro] +pub fn echo_pm(input: TokenStream) -> TokenStream { + input +} diff --git a/src/test/ui/proc-macro/auxiliary/included-file.txt b/src/test/ui/proc-macro/auxiliary/included-file.txt new file mode 100644 index 0000000000000..b4720047d6cff --- /dev/null +++ b/src/test/ui/proc-macro/auxiliary/included-file.txt @@ -0,0 +1 @@ +Included file contents diff --git a/src/test/ui/proc-macro/expand-expr.rs b/src/test/ui/proc-macro/expand-expr.rs new file mode 100644 index 0000000000000..a9958290a99a5 --- /dev/null +++ b/src/test/ui/proc-macro/expand-expr.rs @@ -0,0 +1,122 @@ +// aux-build:expand-expr.rs + +extern crate expand_expr; + +use expand_expr::{echo_pm, expand_expr_is, recursive_expand}; + +// Check builtin macros can be expanded. + +expand_expr_is!(9u32, line!()); +expand_expr_is!(24u32, column!()); + +expand_expr_is!("Hello, World!", concat!("Hello, ", "World", "!")); +expand_expr_is!("int10floats5.3booltrue", concat!("int", 10, "floats", 5.3, "bool", true)); +expand_expr_is!("Hello", concat!(r##"Hello"##)); + +expand_expr_is!("Included file contents\n", include_str!("auxiliary/included-file.txt")); +expand_expr_is!(b"Included file contents\n", include_bytes!("auxiliary/included-file.txt")); + +expand_expr_is!( + "contents: Included file contents\n", + concat!("contents: ", include_str!("auxiliary/included-file.txt")) +); + +// Correct value is checked using stderr checking to handle paths +expand_expr_is!("", file!()); //~ ERROR: proc macro panicked + +expand_expr_is!("hello", stringify!(hello)); +expand_expr_is!("10 + 20", stringify!(10 + 20)); + +macro_rules! echo_tts { + ($($t:tt)*) => { $($t)* }; //~ ERROR: expected expression, found `$` +} + +macro_rules! echo_lit { + ($l:literal) => { + $l + }; +} + +macro_rules! echo_expr { + ($e:expr) => { + $e + }; +} + +macro_rules! simple_lit { + ($l:literal) => { + expand_expr_is!($l, $l); + expand_expr_is!($l, echo_lit!($l)); + expand_expr_is!($l, echo_expr!($l)); + expand_expr_is!($l, echo_tts!($l)); + expand_expr_is!($l, echo_pm!($l)); + const _: () = { + macro_rules! mac { + () => { + $l + }; + } + expand_expr_is!($l, mac!()); + expand_expr_is!($l, echo_expr!(mac!())); + expand_expr_is!($l, echo_tts!(mac!())); + expand_expr_is!($l, echo_pm!(mac!())); + }; + }; +} + +simple_lit!("Hello, World"); +simple_lit!('c'); +simple_lit!(b'c'); +simple_lit!(10); +simple_lit!(10.0); +simple_lit!(10.0f64); +simple_lit!(-3.14159); +simple_lit!(-3.5e10); +simple_lit!(0xFEED); +simple_lit!(-0xFEED); +simple_lit!(0b0100); +simple_lit!(-0b0100); +simple_lit!("string"); +simple_lit!(r##"raw string"##); +simple_lit!(b"byte string"); +simple_lit!(br##"raw byte string"##); +simple_lit!(true); +simple_lit!(false); + +// Ensure char escapes aren't normalized by expansion +simple_lit!("\u{0}"); +simple_lit!("\0"); +simple_lit!("\x00"); +simple_lit!('\u{0}'); +simple_lit!('\0'); +simple_lit!('\x00'); +simple_lit!(b"\x00"); +simple_lit!(b"\0"); +simple_lit!(b'\x00'); +simple_lit!(b'\0'); + +// Extra tokens after the string literal aren't ignored +expand_expr_is!("string", "string"; hello); //~ ERROR: expected one of `.`, `?`, or an operator, found `;` + //~| ERROR: proc macro panicked + +// Invalid expressions produce errors in addition to returning `Err(())`. +expand_expr_is!("fail", $); //~ ERROR: expected expression, found `$` + //~| ERROR: proc macro panicked +expand_expr_is!("fail", echo_tts!($)); //~ ERROR: proc macro panicked +expand_expr_is!("fail", echo_pm!($)); //~ ERROR: expected expression, found `$` + //~| ERROR: proc macro panicked + +// We get errors reported and recover during macro expansion if the macro +// doesn't produce a valid expression. +expand_expr_is!("string", echo_tts!("string"; hello)); //~ ERROR: macro expansion ignores token `hello` and any following +expand_expr_is!("string", echo_pm!("string"; hello)); //~ ERROR: macro expansion ignores token `;` and any following + +// For now, fail if a non-literal expression is expanded. +expand_expr_is!("fail", arbitrary_expression() + "etc"); //~ ERROR: proc macro panicked +expand_expr_is!("fail", echo_tts!(arbitrary_expression() + "etc")); //~ ERROR: proc macro panicked +expand_expr_is!("fail", echo_expr!(arbitrary_expression() + "etc")); //~ ERROR: proc macro panicked +expand_expr_is!("fail", echo_pm!(arbitrary_expression() + "etc")); //~ ERROR: proc macro panicked + +const _: u32 = recursive_expand!(); //~ ERROR: recursion limit reached while expanding `recursive_expand!` + +fn main() {} diff --git a/src/test/ui/proc-macro/expand-expr.stderr b/src/test/ui/proc-macro/expand-expr.stderr new file mode 100644 index 0000000000000..fa8b27b87c6d1 --- /dev/null +++ b/src/test/ui/proc-macro/expand-expr.stderr @@ -0,0 +1,129 @@ +error: proc macro panicked + --> $DIR/expand-expr.rs:25:1 + | +LL | expand_expr_is!("", file!()); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: message: assert failed + expected: `"\"\""` + expanded: `"\"$DIR/expand-expr.rs\""` + +error: expected one of `.`, `?`, or an operator, found `;` + --> $DIR/expand-expr.rs:99:35 + | +LL | expand_expr_is!("string", "string"; hello); + | ^ expected one of `.`, `?`, or an operator + +error: proc macro panicked + --> $DIR/expand-expr.rs:99:1 + | +LL | expand_expr_is!("string", "string"; hello); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: message: expand_expr failed: ExpandError + +error: expected expression, found `$` + --> $DIR/expand-expr.rs:103:25 + | +LL | expand_expr_is!("fail", $); + | ^ expected expression + +error: proc macro panicked + --> $DIR/expand-expr.rs:103:1 + | +LL | expand_expr_is!("fail", $); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: message: expand_expr failed: ExpandError + +error: expected expression, found `$` + --> $DIR/expand-expr.rs:31:23 + | +LL | ($($t:tt)*) => { $($t)* }; + | ^^^^ expected expression + +error: proc macro panicked + --> $DIR/expand-expr.rs:105:1 + | +LL | expand_expr_is!("fail", echo_tts!($)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: message: expand_expr failed: ExpandError + +error: expected expression, found `$` + --> $DIR/expand-expr.rs:106:34 + | +LL | expand_expr_is!("fail", echo_pm!($)); + | ^ expected expression + +error: proc macro panicked + --> $DIR/expand-expr.rs:106:1 + | +LL | expand_expr_is!("fail", echo_pm!($)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: message: expand_expr failed: ExpandError + +error: macro expansion ignores token `hello` and any following + --> $DIR/expand-expr.rs:111:47 + | +LL | expand_expr_is!("string", echo_tts!("string"; hello)); + | --------------------^^^^^-- help: you might be missing a semicolon here: `;` + | | + | caused by the macro expansion here + | + = note: the usage of `echo_tts!` is likely invalid in expression context + +error: macro expansion ignores token `;` and any following + --> $DIR/expand-expr.rs:112:44 + | +LL | expand_expr_is!("string", echo_pm!("string"; hello)); + | -----------------^-------- help: you might be missing a semicolon here: `;` + | | + | caused by the macro expansion here + | + = note: the usage of `echo_pm!` is likely invalid in expression context + +error: proc macro panicked + --> $DIR/expand-expr.rs:115:1 + | +LL | expand_expr_is!("fail", arbitrary_expression() + "etc"); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: message: expand_expr failed: ExpandError + +error: proc macro panicked + --> $DIR/expand-expr.rs:116:1 + | +LL | expand_expr_is!("fail", echo_tts!(arbitrary_expression() + "etc")); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: message: expand_expr failed: ExpandError + +error: proc macro panicked + --> $DIR/expand-expr.rs:117:1 + | +LL | expand_expr_is!("fail", echo_expr!(arbitrary_expression() + "etc")); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: message: expand_expr failed: ExpandError + +error: proc macro panicked + --> $DIR/expand-expr.rs:118:1 + | +LL | expand_expr_is!("fail", echo_pm!(arbitrary_expression() + "etc")); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: message: expand_expr failed: ExpandError + +error: recursion limit reached while expanding `recursive_expand!` + --> $DIR/expand-expr.rs:120:16 + | +LL | const _: u32 = recursive_expand!(); + | ^^^^^^^^^^^^^^^^^^^ + | + = help: consider adding a `#![recursion_limit="256"]` attribute to your crate (`expand_expr`) + = note: this error originates in the macro `recursive_expand` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: aborting due to 16 previous errors + From 5bf3a5dfc72e68b4a631301b48b30cf913b8cfee Mon Sep 17 00:00:00 2001 From: Joshua Nelson Date: Mon, 8 Nov 2021 04:16:34 +0000 Subject: [PATCH 2/6] Make `compiler-docs` only control the default instead of being a hard off-switch This also fixes `x doc src/tools/clippy` when compiler-docs is disabled. --- src/bootstrap/dist.rs | 6 ++---- src/bootstrap/doc.rs | 15 +++------------ 2 files changed, 5 insertions(+), 16 deletions(-) diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs index d4875cfe1b066..a799732adde9d 100644 --- a/src/bootstrap/dist.rs +++ b/src/bootstrap/dist.rs @@ -96,7 +96,8 @@ impl Step for RustcDocs { const DEFAULT: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/librustc") + let builder = run.builder; + run.path("rustc-docs").default_condition(builder.config.compiler_docs) } fn make_run(run: RunConfig<'_>) { @@ -106,9 +107,6 @@ impl Step for RustcDocs { /// Builds the `rustc-docs` installer component. fn run(self, builder: &Builder<'_>) -> Option { let host = self.host; - if !builder.config.compiler_docs { - return None; - } builder.default_doc(&[]); let mut tarball = Tarball::new(builder, "rustc-docs", &host.triple); diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs index 2804e7119fbc1..f0f31c447bda4 100644 --- a/src/bootstrap/doc.rs +++ b/src/bootstrap/doc.rs @@ -529,7 +529,7 @@ impl Step for Rustc { fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { let builder = run.builder; - run.krate("rustc-main").path("compiler").default_condition(builder.config.docs) + run.krate("rustc-main").path("compiler").default_condition(builder.config.compiler_docs) } fn make_run(run: RunConfig<'_>) { @@ -560,11 +560,6 @@ impl Step for Rustc { }) .collect::>(); - if !builder.config.compiler_docs && !builder.was_invoked_explicitly::() { - builder.info("\tskipping - compiler/librustdoc docs disabled"); - return; - } - // This is the intended out directory for compiler documentation. let out = builder.compiler_doc_out(target); t!(fs::create_dir_all(&out)); @@ -674,7 +669,8 @@ macro_rules! tool_doc { const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.krate($should_run) + let builder = run.builder; + run.krate($should_run).default_condition(builder.config.compiler_docs) } fn make_run(run: RunConfig<'_>) { @@ -705,11 +701,6 @@ macro_rules! tool_doc { let compiler = builder.compiler(stage, builder.config.build); - if !builder.config.compiler_docs && !builder.was_invoked_explicitly::() { - builder.info("\tskipping - compiler/tool docs disabled"); - return; - } - // Build rustc docs so that we generate relative links. builder.ensure(Rustc { stage, target }); From 6192246e398b405c01d9d3ee2ec759946e5677bd Mon Sep 17 00:00:00 2001 From: Joshua Nelson Date: Mon, 8 Nov 2021 04:45:15 +0000 Subject: [PATCH 3/6] x.py: remove fixme by deleting code As far as I can tell, this parameter was never used, so just delete it as unnecessary. --- src/bootstrap/tool.rs | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs index af6f4bb0e5fcb..1317c3f983975 100644 --- a/src/bootstrap/tool.rs +++ b/src/bootstrap/tool.rs @@ -286,7 +286,6 @@ macro_rules! bootstrap_tool { $name:ident, $path:expr, $tool_name:expr $(,is_external_tool = $external:expr)* $(,is_unstable_tool = $unstable:expr)* - $(,features = $features:expr)* ; )+) => { #[derive(Copy, PartialEq, Eq, Clone)] @@ -349,12 +348,7 @@ macro_rules! bootstrap_tool { } else { SourceType::InTree }, - extra_features: { - // FIXME(#60643): avoid this lint by using `_` - let mut _tmp = Vec::new(); - $(_tmp.extend($features);)* - _tmp - }, + extra_features: vec![], }).expect("expected to build -- essential tool") } } From fefe1e9192696c07f1655ed2726c4e114b70b096 Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Mon, 8 Nov 2021 16:59:36 +0100 Subject: [PATCH 4/6] Record more artifact sizes during self-profiling. --- compiler/rustc_codegen_llvm/src/back/write.rs | 46 +++++++++++++++++++ compiler/rustc_codegen_ssa/src/back/link.rs | 13 ++++++ compiler/rustc_metadata/src/rmeta/encoder.rs | 3 ++ .../src/partitioning/mod.rs | 11 +++++ 4 files changed, 73 insertions(+) diff --git a/compiler/rustc_codegen_llvm/src/back/write.rs b/compiler/rustc_codegen_llvm/src/back/write.rs index 82c3c2006eb14..4abda33b2aa89 100644 --- a/compiler/rustc_codegen_llvm/src/back/write.rs +++ b/compiler/rustc_codegen_llvm/src/back/write.rs @@ -17,6 +17,7 @@ use rustc_codegen_ssa::back::write::{ }; use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::{CompiledModule, ModuleCodegen}; +use rustc_data_structures::profiling::SelfProfilerRef; use rustc_data_structures::small_c_str::SmallCStr; use rustc_errors::{FatalError, Handler, Level}; use rustc_fs_util::{link_or_copy, path_to_c_string}; @@ -53,6 +54,7 @@ pub fn write_output_file( output: &Path, dwo_output: Option<&Path>, file_type: llvm::FileType, + self_profiler_ref: &SelfProfilerRef, ) -> Result<(), FatalError> { unsafe { let output_c = path_to_c_string(output); @@ -76,6 +78,19 @@ pub fn write_output_file( file_type, ) }; + + // Record artifact sizes for self-profiling + if result == llvm::LLVMRustResult::Success { + let artifact_kind = match file_type { + llvm::FileType::ObjectFile => "object_file", + llvm::FileType::AssemblyFile => "assembly_file", + }; + record_artifact_size(self_profiler_ref, artifact_kind, output); + if let Some(dwo_file) = dwo_output { + record_artifact_size(self_profiler_ref, "dwo_file", dwo_file); + } + } + result.into_result().map_err(|()| { let msg = format!("could not write output to {}", output.display()); llvm_err(handler, &msg) @@ -752,6 +767,14 @@ pub(crate) unsafe fn codegen( let thin = ThinBuffer::new(llmod); let data = thin.data(); + if let Some(bitcode_filename) = bc_out.file_name() { + cgcx.prof.artifact_size( + "llvm_bitcode", + bitcode_filename.to_string_lossy(), + data.len() as u64, + ); + } + if config.emit_bc || config.emit_obj == EmitObj::Bitcode { let _timer = cgcx.prof.generic_activity_with_arg( "LLVM_module_codegen_emit_bitcode", @@ -812,6 +835,11 @@ pub(crate) unsafe fn codegen( } let result = llvm::LLVMRustPrintModule(llmod, out_c.as_ptr(), demangle_callback); + + if result == llvm::LLVMRustResult::Success { + record_artifact_size(&cgcx.prof, "llvm_ir", &out); + } + result.into_result().map_err(|()| { let msg = format!("failed to write LLVM IR to {}", out.display()); llvm_err(diag_handler, &msg) @@ -842,6 +870,7 @@ pub(crate) unsafe fn codegen( &path, None, llvm::FileType::AssemblyFile, + &cgcx.prof, ) })?; } @@ -875,6 +904,7 @@ pub(crate) unsafe fn codegen( &obj_out, dwo_out, llvm::FileType::ObjectFile, + &cgcx.prof, ) })?; } @@ -1131,3 +1161,19 @@ fn create_msvc_imps( symbol_name.starts_with(b"__llvm_profile_") } } + +fn record_artifact_size( + self_profiler_ref: &SelfProfilerRef, + artifact_kind: &'static str, + path: &Path, +) { + // Don't stat the file if we are not going to record its size. + if !self_profiler_ref.enabled() { + return; + } + + if let Some(artifact_name) = path.file_name() { + let file_size = std::fs::metadata(path).map(|m| m.len()).unwrap_or(0); + self_profiler_ref.artifact_size(artifact_kind, artifact_name.to_string_lossy(), file_size); + } +} diff --git a/compiler/rustc_codegen_ssa/src/back/link.rs b/compiler/rustc_codegen_ssa/src/back/link.rs index 6c02543bd7cc4..1ba0c4fa05b5b 100644 --- a/compiler/rustc_codegen_ssa/src/back/link.rs +++ b/compiler/rustc_codegen_ssa/src/back/link.rs @@ -121,6 +121,19 @@ pub fn link_binary<'a, B: ArchiveBuilder<'a>>( if sess.opts.json_artifact_notifications { sess.parse_sess.span_diagnostic.emit_artifact_notification(&out_filename, "link"); } + + if sess.prof.enabled() { + if let Some(artifact_name) = out_filename.file_name() { + // Record size for self-profiling + let file_size = std::fs::metadata(&out_filename).map(|m| m.len()).unwrap_or(0); + + sess.prof.artifact_size( + "linked_artifact", + artifact_name.to_string_lossy(), + file_size, + ); + } + } } } diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index 0dbef66ac37d7..49c3472a20266 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -2187,5 +2187,8 @@ fn encode_metadata_impl(tcx: TyCtxt<'_>) -> EncodedMetadata { result[header + 2] = (pos >> 8) as u8; result[header + 3] = (pos >> 0) as u8; + // Record metadata size for self-profiling + tcx.prof.artifact_size("crate_metadata", "crate_metadata", result.len() as u64); + EncodedMetadata { raw_data: result } } diff --git a/compiler/rustc_monomorphize/src/partitioning/mod.rs b/compiler/rustc_monomorphize/src/partitioning/mod.rs index 7a7a56a034ed2..658c9028ca1a1 100644 --- a/compiler/rustc_monomorphize/src/partitioning/mod.rs +++ b/compiler/rustc_monomorphize/src/partitioning/mod.rs @@ -361,6 +361,17 @@ fn collect_and_partition_mono_items<'tcx>( ) }); + if tcx.prof.enabled() { + // Record CGU size estimates for self-profiling. + for cgu in codegen_units { + tcx.prof.artifact_size( + "codegen_unit_size_estimate", + &cgu.name().as_str()[..], + cgu.size_estimate() as u64, + ); + } + } + let mono_items: DefIdSet = items .iter() .filter_map(|mono_item| match *mono_item { From db4e60b29f863bbf78351b7f095b221ffe7b93a6 Mon Sep 17 00:00:00 2001 From: asquared31415 <34665709+asquared31415@users.noreply.github.com> Date: Tue, 9 Nov 2021 07:13:53 -0500 Subject: [PATCH 5/6] document Box and box_free connection --- library/alloc/src/boxed.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs index bd4f52560421b..f6332b072cf30 100644 --- a/library/alloc/src/boxed.rs +++ b/library/alloc/src/boxed.rs @@ -169,6 +169,9 @@ use crate::vec::Vec; #[lang = "owned_box"] #[fundamental] #[stable(feature = "rust1", since = "1.0.0")] +// The declaration of the `Box` struct must be kept in sync with the +// `alloc::alloc::box_free` function or ICEs will happen. See the comment +// on `box_free` for more details. pub struct Box< T: ?Sized, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, From 10d65a9636907ff5555307e9a3ebe0123ca2c3e6 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Fri, 5 Nov 2021 15:58:14 +0100 Subject: [PATCH 6/6] Allow to run a specific rustdoc-js* test --- src/bootstrap/test.rs | 53 +++++++++++++--------------------- src/bootstrap/util.rs | 32 ++++++++++++++++++++ src/tools/rustdoc-js/tester.js | 18 ++++++++---- 3 files changed, 64 insertions(+), 39 deletions(-) diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs index 8594fa42266b4..22bf6b8a9d4dc 100644 --- a/src/bootstrap/test.rs +++ b/src/bootstrap/test.rs @@ -763,7 +763,7 @@ impl Step for RustdocJSStd { const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/test/rustdoc-js-std") + run.suite_path("src/test/rustdoc-js-std") } fn make_run(run: RunConfig<'_>) { @@ -783,6 +783,17 @@ impl Step for RustdocJSStd { .arg(builder.doc_out(self.target)) .arg("--test-folder") .arg(builder.src.join("src/test/rustdoc-js-std")); + for path in &builder.paths { + if let Some(p) = + util::is_valid_test_suite_arg(path, "src/test/rustdoc-js-std", builder) + { + if !p.ends_with(".js") { + eprintln!("A non-js file was given: `{}`", path.display()); + panic!("Cannot run rustdoc-js-std tests"); + } + command.arg("--test-file").arg(path); + } + } builder.ensure(crate::doc::Std { target: self.target, stage: builder.top_stage }); builder.run(&mut command); } else { @@ -803,7 +814,7 @@ impl Step for RustdocJSNotStd { const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/test/rustdoc-js") + run.suite_path("src/test/rustdoc-js") } fn make_run(run: RunConfig<'_>) { @@ -938,8 +949,12 @@ impl Step for RustdocGUI { .arg("--tests-folder") .arg(builder.build.src.join("src/test/rustdoc-gui")); for path in &builder.paths { - if let Some(name) = path.file_name().and_then(|f| f.to_str()) { - if name.ends_with(".goml") { + if let Some(p) = util::is_valid_test_suite_arg(path, "src/test/rustdoc-gui", builder) { + if !p.ends_with(".goml") { + eprintln!("A non-goml file was given: `{}`", path.display()); + panic!("Cannot run rustdoc-gui tests"); + } + if let Some(name) = path.file_name().and_then(|f| f.to_str()) { command.arg("--file").arg(name); } } @@ -1416,35 +1431,7 @@ note: if you're sure you want to do this, please open an issue as to why. In the // Get test-args by striping suite path let mut test_args: Vec<&str> = paths .iter() - .map(|p| match p.strip_prefix(".") { - Ok(path) => path, - Err(_) => p, - }) - .filter(|p| p.starts_with(suite_path)) - .filter(|p| { - let exists = p.is_dir() || p.is_file(); - if !exists { - if let Some(p) = p.to_str() { - builder.info(&format!( - "Warning: Skipping \"{}\": not a regular file or directory", - p - )); - } - } - exists - }) - .filter_map(|p| { - // Since test suite paths are themselves directories, if we don't - // specify a directory or file, we'll get an empty string here - // (the result of the test suite directory without its suite prefix). - // Therefore, we need to filter these out, as only the first --test-args - // flag is respected, so providing an empty --test-args conflicts with - // any following it. - match p.strip_prefix(suite_path).ok().and_then(|p| p.to_str()) { - Some(s) if !s.is_empty() => Some(s), - _ => None, - } - }) + .filter_map(|p| util::is_valid_test_suite_arg(p, suite_path, builder)) .collect(); test_args.append(&mut builder.config.cmd.test_args()); diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs index 112979b0bebc8..57178aa382ffd 100644 --- a/src/bootstrap/util.rs +++ b/src/bootstrap/util.rs @@ -310,3 +310,35 @@ pub fn use_host_linker(target: TargetSelection) -> bool { || target.contains("fuchsia") || target.contains("bpf")) } + +pub fn is_valid_test_suite_arg<'a, P: AsRef>( + path: &'a Path, + suite_path: P, + builder: &Builder<'_>, +) -> Option<&'a str> { + let suite_path = suite_path.as_ref(); + let path = match path.strip_prefix(".") { + Ok(p) => p, + Err(_) => path, + }; + if !path.starts_with(suite_path) { + return None; + } + let exists = path.is_dir() || path.is_file(); + if !exists { + if let Some(p) = path.to_str() { + builder.info(&format!("Warning: Skipping \"{}\": not a regular file or directory", p)); + } + return None; + } + // Since test suite paths are themselves directories, if we don't + // specify a directory or file, we'll get an empty string here + // (the result of the test suite directory without its suite prefix). + // Therefore, we need to filter these out, as only the first --test-args + // flag is respected, so providing an empty --test-args conflicts with + // any following it. + match path.strip_prefix(suite_path).ok().and_then(|p| p.to_str()) { + Some(s) if !s.is_empty() => Some(s), + _ => None, + } +} diff --git a/src/tools/rustdoc-js/tester.js b/src/tools/rustdoc-js/tester.js index a673e425dfff9..4f73a7f634098 100644 --- a/src/tools/rustdoc-js/tester.js +++ b/src/tools/rustdoc-js/tester.js @@ -401,7 +401,8 @@ function showHelp() { console.log(" --doc-folder [PATH] : location of the generated doc folder"); console.log(" --help : show this message then quit"); console.log(" --crate-name [STRING] : crate name to be used"); - console.log(" --test-file [PATH] : location of the JS test file"); + console.log(" --test-file [PATHs] : location of the JS test files (can be called " + + "multiple times)"); console.log(" --test-folder [PATH] : location of the JS tests folder"); console.log(" --resource-suffix [STRING] : suffix to refer to the correct files"); } @@ -412,7 +413,7 @@ function parseOptions(args) { "resource_suffix": "", "doc_folder": "", "test_folder": "", - "test_file": "", + "test_file": [], }; var correspondences = { "--resource-suffix": "resource_suffix", @@ -429,7 +430,11 @@ function parseOptions(args) { console.log("Missing argument after `" + args[i - 1] + "` option."); return null; } - opts[correspondences[args[i - 1]]] = args[i]; + if (args[i - 1] !== "--test-file") { + opts[correspondences[args[i - 1]]] = args[i]; + } else { + opts[correspondences[args[i - 1]]].push(args[i]); + } } else if (args[i] === "--help") { showHelp(); process.exit(0); @@ -471,9 +476,10 @@ function main(argv) { var errors = 0; if (opts["test_file"].length !== 0) { - errors += checkFile(opts["test_file"], opts, loaded, index); - } - if (opts["test_folder"].length !== 0) { + opts["test_file"].forEach(function(file) { + errors += checkFile(file, opts, loaded, index); + }); + } else if (opts["test_folder"].length !== 0) { fs.readdirSync(opts["test_folder"]).forEach(function(file) { if (!file.endsWith(".js")) { return;