From 9d6b7c7cd14e44962ed4bc883e8fe4caf3571868 Mon Sep 17 00:00:00 2001 From: bjorn3 Date: Tue, 17 Jan 2017 19:06:00 +0100 Subject: [PATCH 1/4] Better tokentree pretty printer --- src/libsyntax/print/pprust.rs | 173 +++++++++++++----- src/test/run-fail/while-panic.rs | 3 + .../run-make/trace-macros-flag/hello.trace | 2 +- .../proc-macro/auxiliary/attr-args.rs | 4 +- .../run-pass/syntax-extension-source-utils.rs | 4 +- 5 files changed, 137 insertions(+), 49 deletions(-) diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index f8f1820d0b97e..f098037484cb9 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1450,57 +1450,111 @@ impl<'a> State<'a> { } } - /// This doesn't deserve to be called "pretty" printing, but it should be - /// meaning-preserving. A quick hack that might help would be to look at the - /// spans embedded in the TTs to decide where to put spaces and newlines. - /// But it'd be better to parse these according to the grammar of the - /// appropriate macro, transcribe back into the grammar we just parsed from, - /// and then pretty-print the resulting AST nodes (so, e.g., we print - /// expression arguments as expressions). It can be done! I think. + /// Forwards to print_tts pub fn print_tt(&mut self, tt: &tokenstream::TokenTree) -> io::Result<()> { - match *tt { - TokenTree::Token(_, ref tk) => { - word(&mut self.s, &token_to_string(tk))?; - match *tk { - parse::token::DocComment(..) => { - hardbreak(&mut self.s) - } - _ => Ok(()) - } + self.print_tts(&[tt.clone()]) + } + + /// This uses heuristics to be meaning-preserving while making + /// it look nicer than just printing the seperate tokens + pub fn print_tts(&mut self, tts: &[tokenstream::TokenTree]) -> io::Result<()> { + let mut tts_iter = tts.into_iter().peekable(); + while let Some(tt) = tts_iter.next() { + fn is_dot(token: &Token) -> bool { + *token == Token::Dot || *token == Token::DotDot || *token == Token::DotDotDot } - TokenTree::Delimited(_, ref delimed) => { - word(&mut self.s, &token_to_string(&delimed.open_token()))?; - space(&mut self.s)?; - self.print_tts(&delimed.tts)?; - space(&mut self.s)?; - word(&mut self.s, &token_to_string(&delimed.close_token())) - }, - TokenTree::Sequence(_, ref seq) => { - word(&mut self.s, "$(")?; - for tt_elt in &seq.tts { - self.print_tt(tt_elt)?; + + match *tt { + TokenTree::Token(_, ref token) => { + if *token == Token::Semi { + word(&mut self.s, ";")?; + hardbreak(&mut self.s)?; + } else { + word(&mut self.s, &token_to_string(token))?; + match (token, tts_iter.peek()) { + (_, None) => {} // {abc} + // ^^^ + (&parse::token::DocComment(..), _) => hardbreak(&mut self.s)?, // ///abc + // ^^^--- + (_, Some(&&TokenTree::Token(_, Token::Semi))) => {} // let a = 0; + // ^- + (ref a, Some(&&TokenTree::Token(_, ref b))) if is_dot(a) && + !is_dot(b) => {} // ..a + // ^^- + (ref a, Some(&&TokenTree::Token(_, ref b))) if is_dot(a) && + is_dot(b) => { // ... .. + self.nbsp()? // ^^^ -- + } + (&Token::Ident(_), Some(&&TokenTree::Token(_, Token::Not))) => {} // abc! + // ^^^- + (&Token::Literal(_, _), Some(&&TokenTree::Token(_, ref a))) // self.0 .0 + if is_dot(a) => { // ^ - + self.nbsp()? + } + (_, Some(&&TokenTree::Delimited(_, ref delimed))) + if delimed.delim == + parse::token::DelimToken::Paren => {} // abc() + // ^^^-- + (_, Some(&&TokenTree::Token(_, Token::Dot))) => {} // a. + // ^- + _ => self.nbsp()?, + } + } } - word(&mut self.s, ")")?; - if let Some(ref tk) = seq.separator { - word(&mut self.s, &token_to_string(tk))?; + TokenTree::Delimited(_, ref delimed) => { + if delimed.delim == parse::token::DelimToken::Brace { + if delimed.tts.is_empty() { // {} + // ++ + word(&mut self.s, "{}")?; + space(&mut self.s)?; + } else { + hardbreak(&mut self.s)?; + self.head("")?; + self.bopen()?; + space(&mut self.s)?; + + self.print_tts(&delimed.tts)?; + + self.bclose(::ext::quote::rt::DUMMY_SP)?; + if let Some(&&TokenTree::Token(_, Token::Semi)) = tts_iter.peek() { + // {abc}; + // ^^^^^- + } else { + space(&mut self.s)?; + hardbreak(&mut self.s)?; + } + } + } else { + self.ibox(0)?; + + word(&mut self.s, &token_to_string(&delimed.open_token()))?; + self.print_tts(&delimed.tts)?; + word(&mut self.s, &token_to_string(&delimed.close_token()))?; + if let Some(&&TokenTree::Token(_, Token::Semi)) = tts_iter.peek() { + } else { + space(&mut self.s)?; + } + + self.end()?; + } } - match seq.op { - tokenstream::KleeneOp::ZeroOrMore => word(&mut self.s, "*"), - tokenstream::KleeneOp::OneOrMore => word(&mut self.s, "+"), + TokenTree::Sequence(_, ref seq) => { + word(&mut self.s, "$(")?; + space(&mut self.s)?; + self.print_tts(&seq.tts)?; + space(&mut self.s)?; + word(&mut self.s, ")")?; + if let Some(ref tk) = seq.separator { + word(&mut self.s, &token_to_string(tk))?; + } + match seq.op { + tokenstream::KleeneOp::ZeroOrMore => self.word_nbsp("*")?, + tokenstream::KleeneOp::OneOrMore => self.word_nbsp("+")?, + } } } } - } - - pub fn print_tts(&mut self, tts: &[tokenstream::TokenTree]) -> io::Result<()> { - self.ibox(0)?; - for (i, tt) in tts.iter().enumerate() { - if i != 0 { - space(&mut self.s)?; - } - self.print_tt(tt)?; - } - self.end() + Ok(()) } pub fn print_variant(&mut self, v: &ast::Variant) -> io::Result<()> { @@ -3116,4 +3170,35 @@ mod tests { let varstr = variant_to_string(&var); assert_eq!(varstr, "principal_skinner"); } + + #[test] + fn test_pretty_print_tokentrees() { + use parse::parse_tts_from_source_str as parse_tts; + // ignore-tidy-linelength + let original = r#"fn main() +{ + let program = "+ + * - /"; + let mut accumulator = 0; + . .. ...; + a.b; + .. .a; + a. ..; + for token in program.chars() + { + match token + { + \'+\' => accumulator += 1 , \'-\' => accumulator -= 1 , \'*\' => accumulator *= 2 , \'/\' => accumulator /= 2 , _ => {} + + } + + } + + println!("The program \"{}\" calculates the value {}" , program , accumulator); +} +"#; + let sess = ParseSess::new(); + let tts = parse_tts("".to_string(), original.to_string(), &sess).unwrap(); + let pretty = tts_to_string(&*tts); + assert_eq!(original, &*pretty); + } } diff --git a/src/test/run-fail/while-panic.rs b/src/test/run-fail/while-panic.rs index e410684cd349a..70f129d5e1afe 100644 --- a/src/test/run-fail/while-panic.rs +++ b/src/test/run-fail/while-panic.rs @@ -8,6 +8,9 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +// ignore-pretty +// pretty printer wants to add a newline after line 21 which tidy dislikes + #![allow(while_true)] // error-pattern:giraffe diff --git a/src/test/run-make/trace-macros-flag/hello.trace b/src/test/run-make/trace-macros-flag/hello.trace index cf733339eadf6..553186998a3ff 100644 --- a/src/test/run-make/trace-macros-flag/hello.trace +++ b/src/test/run-make/trace-macros-flag/hello.trace @@ -1,2 +1,2 @@ println! { "Hello, World!" } -print! { concat ! ( "Hello, World!" , "\n" ) } +print! { concat!("Hello, World!" , "\n") } diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-args.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-args.rs index 6e1eb395a0a19..8440a4e672f77 100644 --- a/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-args.rs +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-args.rs @@ -20,11 +20,11 @@ use proc_macro::TokenStream; pub fn attr_with_args(args: TokenStream, input: TokenStream) -> TokenStream { let args = args.to_string(); - assert_eq!(args, r#"( text = "Hello, world!" )"#); + assert_eq!(args, r#"(text = "Hello, world!")"#); let input = input.to_string(); - assert_eq!(input, "fn foo ( ) { }"); + assert_eq!(input, "fn foo() {}"); r#" fn foo() -> &'static str { "Hello, world!" } diff --git a/src/test/run-pass/syntax-extension-source-utils.rs b/src/test/run-pass/syntax-extension-source-utils.rs index 3b5f033d07b7d..a113e5c1fabec 100644 --- a/src/test/run-pass/syntax-extension-source-utils.rs +++ b/src/test/run-pass/syntax-extension-source-utils.rs @@ -25,7 +25,7 @@ pub fn main() { assert_eq!(column!(), 4); assert_eq!(indirect_line!(), 26); assert!((file!().ends_with("syntax-extension-source-utils.rs"))); - assert_eq!(stringify!((2*3) + 5).to_string(), "( 2 * 3 ) + 5".to_string()); + assert_eq!(stringify!((2*3) + 5).to_string(), "(2 * 3) + 5".to_string()); assert!(include!("syntax-extension-source-utils-files/includeme.\ fragment").to_string() == "victory robot 6".to_string()); @@ -40,5 +40,5 @@ pub fn main() { // The Windows tests are wrapped in an extra module for some reason assert!((m1::m2::where_am_i().ends_with("m1::m2"))); - assert_eq!((43, "( 2 * 3 ) + 5"), (line!(), stringify!((2*3) + 5))); + assert_eq!((43, "(2 * 3) + 5"), (line!(), stringify!((2*3) + 5))); } From 74c639b111d268c9078cae4474ce505d63d4f4eb Mon Sep 17 00:00:00 2001 From: bjorn3 Date: Sun, 5 Feb 2017 15:58:00 +0100 Subject: [PATCH 2/4] Even better tokentree pretty printer --- src/libsyntax/print/pprust.rs | 68 +++++++++++++++++++++-------------- 1 file changed, 42 insertions(+), 26 deletions(-) diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index f098037484cb9..bfc0ca4d8e145 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1476,6 +1476,9 @@ impl<'a> State<'a> { // ^^^ (&parse::token::DocComment(..), _) => hardbreak(&mut self.s)?, // ///abc // ^^^--- + (_, Some(&&TokenTree::Token(_, Token::Comma))) => { // abc(a, b); + zerobreak(&mut self.s)? // ^- + }, (_, Some(&&TokenTree::Token(_, Token::Semi))) => {} // let a = 0; // ^- (ref a, Some(&&TokenTree::Token(_, ref b))) if is_dot(a) && @@ -1485,10 +1488,14 @@ impl<'a> State<'a> { is_dot(b) => { // ... .. self.nbsp()? // ^^^ -- } - (&Token::Ident(_), Some(&&TokenTree::Token(_, Token::Not))) => {} // abc! - // ^^^- - (&Token::Literal(_, _), Some(&&TokenTree::Token(_, ref a))) // self.0 .0 - if is_dot(a) => { // ^ - + (&Token::Ident(_), Some(&&TokenTree::Token(_, Token::Not))) => { + // abc! + // ^^^- + } + (&Token::Literal(_, _), Some(&&TokenTree::Token(_, ref a))) + // self.0 .0 + // ^ - + if is_dot(a) => { self.nbsp()? } (_, Some(&&TokenTree::Delimited(_, ref delimed))) @@ -1506,22 +1513,28 @@ impl<'a> State<'a> { if delimed.tts.is_empty() { // {} // ++ word(&mut self.s, "{}")?; - space(&mut self.s)?; + zerobreak(&mut self.s)?; } else { + word(&mut self.s, "{")?; hardbreak(&mut self.s)?; - self.head("")?; - self.bopen()?; + self.cbox(4)?; space(&mut self.s)?; self.print_tts(&delimed.tts)?; - self.bclose(::ext::quote::rt::DUMMY_SP)?; - if let Some(&&TokenTree::Token(_, Token::Semi)) = tts_iter.peek() { - // {abc}; - // ^^^^^- - } else { - space(&mut self.s)?; - hardbreak(&mut self.s)?; + self.end()?; + hardbreak(&mut self.s)?; + word(&mut self.s, "}")?; + + match tts_iter.peek(){ + None => {}, + Some(&&TokenTree::Token(_, Token::Semi)) => { // {abc}; + // ^^^^^- + }, + _ => { + space(&mut self.s)?; + hardbreak(&mut self.s)?; + } } } } else { @@ -3174,28 +3187,31 @@ mod tests { #[test] fn test_pretty_print_tokentrees() { use parse::parse_tts_from_source_str as parse_tts; - // ignore-tidy-linelength - let original = r#"fn main() -{ + let original = r#"fn main() { + let program = "+ + * - /"; let mut accumulator = 0; . .. ...; a.b; .. .a; a. ..; - for token in program.chars() - { - match token - { - \'+\' => accumulator += 1 , \'-\' => accumulator -= 1 , \'*\' => accumulator *= 2 , \'/\' => accumulator /= 2 , _ => {} + for token in program.chars() { - } + match token { + + '+' => accumulator += 1 + , '-' => accumulator -= 1 + , '*' => accumulator *= 2 + , '/' => accumulator /= 2 + , _ => {} + } } - println!("The program \"{}\" calculates the value {}" , program , accumulator); -} -"#; + println!("The program \"{}\" calculates the value {}", program + , accumulator); + +}"#; let sess = ParseSess::new(); let tts = parse_tts("".to_string(), original.to_string(), &sess).unwrap(); let pretty = tts_to_string(&*tts); From 0380fc3463f5d1fed873e3dda6ccae0b41fb2423 Mon Sep 17 00:00:00 2001 From: bjorn3 Date: Mon, 6 Feb 2017 19:23:53 +0100 Subject: [PATCH 3/4] Prettier --- src/libsyntax/print/pprust.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index bfc0ca4d8e145..c75d5238fbcb4 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1476,8 +1476,10 @@ impl<'a> State<'a> { // ^^^ (&parse::token::DocComment(..), _) => hardbreak(&mut self.s)?, // ///abc // ^^^--- + (&Token::Comma, _) => {} // abc(a, b); + // ^- (_, Some(&&TokenTree::Token(_, Token::Comma))) => { // abc(a, b); - zerobreak(&mut self.s)? // ^- + zerobreak(&mut self.s)? // ^ - }, (_, Some(&&TokenTree::Token(_, Token::Semi))) => {} // let a = 0; // ^- From c010b1e5702eb81586232486a5dd73df889e2ef3 Mon Sep 17 00:00:00 2001 From: bjorn3 Date: Fri, 10 Feb 2017 20:36:10 +0100 Subject: [PATCH 4/4] Space after comma --- src/libsyntax/print/pprust.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index c75d5238fbcb4..77128489dbfc8 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1479,7 +1479,7 @@ impl<'a> State<'a> { (&Token::Comma, _) => {} // abc(a, b); // ^- (_, Some(&&TokenTree::Token(_, Token::Comma))) => { // abc(a, b); - zerobreak(&mut self.s)? // ^ - + space(&mut self.s, 1)? // ^ - }, (_, Some(&&TokenTree::Token(_, Token::Semi))) => {} // let a = 0; // ^-