@@ -1450,57 +1450,111 @@ impl<'a> State<'a> {
1450
1450
}
1451
1451
}
1452
1452
1453
- /// This doesn't deserve to be called "pretty" printing, but it should be
1454
- /// meaning-preserving. A quick hack that might help would be to look at the
1455
- /// spans embedded in the TTs to decide where to put spaces and newlines.
1456
- /// But it'd be better to parse these according to the grammar of the
1457
- /// appropriate macro, transcribe back into the grammar we just parsed from,
1458
- /// and then pretty-print the resulting AST nodes (so, e.g., we print
1459
- /// expression arguments as expressions). It can be done! I think.
1453
+ /// Forwards to print_tts
1460
1454
pub fn print_tt ( & mut self , tt : & tokenstream:: TokenTree ) -> io:: Result < ( ) > {
1461
- match * tt {
1462
- TokenTree :: Token ( _, ref tk) => {
1463
- word ( & mut self . s , & token_to_string ( tk) ) ?;
1464
- match * tk {
1465
- parse:: token:: DocComment ( ..) => {
1466
- hardbreak ( & mut self . s )
1467
- }
1468
- _ => Ok ( ( ) )
1469
- }
1455
+ self . print_tts ( & [ tt. clone ( ) ] )
1456
+ }
1457
+
1458
+ /// This uses heuristics to be meaning-preserving while making
1459
+ /// it look nicer than just printing the seperate tokens
1460
+ pub fn print_tts ( & mut self , tts : & [ tokenstream:: TokenTree ] ) -> io:: Result < ( ) > {
1461
+ let mut tts_iter = tts. into_iter ( ) . peekable ( ) ;
1462
+ while let Some ( tt) = tts_iter. next ( ) {
1463
+ fn is_dot ( token : & Token ) -> bool {
1464
+ * token == Token :: Dot || * token == Token :: DotDot || * token == Token :: DotDotDot
1470
1465
}
1471
- TokenTree :: Delimited ( _, ref delimed) => {
1472
- word ( & mut self . s , & token_to_string ( & delimed. open_token ( ) ) ) ?;
1473
- space ( & mut self . s ) ?;
1474
- self . print_tts ( & delimed. tts ) ?;
1475
- space ( & mut self . s ) ?;
1476
- word ( & mut self . s , & token_to_string ( & delimed. close_token ( ) ) )
1477
- } ,
1478
- TokenTree :: Sequence ( _, ref seq) => {
1479
- word ( & mut self . s , "$(" ) ?;
1480
- for tt_elt in & seq. tts {
1481
- self . print_tt ( tt_elt) ?;
1466
+
1467
+ match * tt {
1468
+ TokenTree :: Token ( _, ref token) => {
1469
+ if * token == Token :: Semi {
1470
+ word ( & mut self . s , ";" ) ?;
1471
+ hardbreak ( & mut self . s ) ?;
1472
+ } else {
1473
+ word ( & mut self . s , & token_to_string ( token) ) ?;
1474
+ match ( token, tts_iter. peek ( ) ) {
1475
+ ( _, None ) => { } // {abc}
1476
+ // ^^^
1477
+ ( & parse:: token:: DocComment ( ..) , _) => hardbreak ( & mut self . s ) ?, // ///abc
1478
+ // ^^^---
1479
+ ( _, Some ( & & TokenTree :: Token ( _, Token :: Semi ) ) ) => { } // let a = 0;
1480
+ // ^-
1481
+ ( ref a, Some ( & & TokenTree :: Token ( _, ref b) ) ) if is_dot ( a) &&
1482
+ !is_dot ( b) => { } // ..a
1483
+ // ^^-
1484
+ ( ref a, Some ( & & TokenTree :: Token ( _, ref b) ) ) if is_dot ( a) &&
1485
+ is_dot ( b) => { // ... ..
1486
+ self . nbsp ( ) ? // ^^^ --
1487
+ }
1488
+ ( & Token :: Ident ( _) , Some ( & & TokenTree :: Token ( _, Token :: Not ) ) ) => { } // abc!
1489
+ // ^^^-
1490
+ ( & Token :: Literal ( _, _) , Some ( & & TokenTree :: Token ( _, ref a) ) ) // self.0 .0
1491
+ if is_dot ( a) => { // ^ -
1492
+ self . nbsp ( ) ?
1493
+ }
1494
+ ( _, Some ( & & TokenTree :: Delimited ( _, ref delimed) ) )
1495
+ if delimed. delim ==
1496
+ parse:: token:: DelimToken :: Paren => { } // abc()
1497
+ // ^^^--
1498
+ ( _, Some ( & & TokenTree :: Token ( _, Token :: Dot ) ) ) => { } // a.
1499
+ // ^-
1500
+ _ => self . nbsp ( ) ?,
1501
+ }
1502
+ }
1482
1503
}
1483
- word ( & mut self . s , ")" ) ?;
1484
- if let Some ( ref tk) = seq. separator {
1485
- word ( & mut self . s , & token_to_string ( tk) ) ?;
1504
+ TokenTree :: Delimited ( _, ref delimed) => {
1505
+ if delimed. delim == parse:: token:: DelimToken :: Brace {
1506
+ if delimed. tts . is_empty ( ) { // {}
1507
+ // ++
1508
+ word ( & mut self . s , "{}" ) ?;
1509
+ space ( & mut self . s ) ?;
1510
+ } else {
1511
+ hardbreak ( & mut self . s ) ?;
1512
+ self . head ( "" ) ?;
1513
+ self . bopen ( ) ?;
1514
+ space ( & mut self . s ) ?;
1515
+
1516
+ self . print_tts ( & delimed. tts ) ?;
1517
+
1518
+ self . bclose ( :: ext:: quote:: rt:: DUMMY_SP ) ?;
1519
+ if let Some ( & & TokenTree :: Token ( _, Token :: Semi ) ) = tts_iter. peek ( ) {
1520
+ // {abc};
1521
+ // ^^^^^-
1522
+ } else {
1523
+ space ( & mut self . s ) ?;
1524
+ hardbreak ( & mut self . s ) ?;
1525
+ }
1526
+ }
1527
+ } else {
1528
+ self . ibox ( 0 ) ?;
1529
+
1530
+ word ( & mut self . s , & token_to_string ( & delimed. open_token ( ) ) ) ?;
1531
+ self . print_tts ( & delimed. tts ) ?;
1532
+ word ( & mut self . s , & token_to_string ( & delimed. close_token ( ) ) ) ?;
1533
+ if let Some ( & & TokenTree :: Token ( _, Token :: Semi ) ) = tts_iter. peek ( ) {
1534
+ } else {
1535
+ space ( & mut self . s ) ?;
1536
+ }
1537
+
1538
+ self . end ( ) ?;
1539
+ }
1486
1540
}
1487
- match seq. op {
1488
- tokenstream:: KleeneOp :: ZeroOrMore => word ( & mut self . s , "*" ) ,
1489
- tokenstream:: KleeneOp :: OneOrMore => word ( & mut self . s , "+" ) ,
1541
+ TokenTree :: Sequence ( _, ref seq) => {
1542
+ word ( & mut self . s , "$(" ) ?;
1543
+ space ( & mut self . s ) ?;
1544
+ self . print_tts ( & seq. tts ) ?;
1545
+ space ( & mut self . s ) ?;
1546
+ word ( & mut self . s , ")" ) ?;
1547
+ if let Some ( ref tk) = seq. separator {
1548
+ word ( & mut self . s , & token_to_string ( tk) ) ?;
1549
+ }
1550
+ match seq. op {
1551
+ tokenstream:: KleeneOp :: ZeroOrMore => self . word_nbsp ( "*" ) ?,
1552
+ tokenstream:: KleeneOp :: OneOrMore => self . word_nbsp ( "+" ) ?,
1553
+ }
1490
1554
}
1491
1555
}
1492
1556
}
1493
- }
1494
-
1495
- pub fn print_tts ( & mut self , tts : & [ tokenstream:: TokenTree ] ) -> io:: Result < ( ) > {
1496
- self . ibox ( 0 ) ?;
1497
- for ( i, tt) in tts. iter ( ) . enumerate ( ) {
1498
- if i != 0 {
1499
- space ( & mut self . s ) ?;
1500
- }
1501
- self . print_tt ( tt) ?;
1502
- }
1503
- self . end ( )
1557
+ Ok ( ( ) )
1504
1558
}
1505
1559
1506
1560
pub fn print_variant ( & mut self , v : & ast:: Variant ) -> io:: Result < ( ) > {
@@ -3116,4 +3170,35 @@ mod tests {
3116
3170
let varstr = variant_to_string ( & var) ;
3117
3171
assert_eq ! ( varstr, "principal_skinner" ) ;
3118
3172
}
3173
+
3174
+ #[ test]
3175
+ fn test_pretty_print_tokentrees ( ) {
3176
+ use parse:: parse_tts_from_source_str as parse_tts;
3177
+ // ignore-tidy-linelength
3178
+ let original = r#"fn main()
3179
+ {
3180
+ let program = "+ + * - /";
3181
+ let mut accumulator = 0;
3182
+ . .. ...;
3183
+ a.b;
3184
+ .. .a;
3185
+ a. ..;
3186
+ for token in program.chars()
3187
+ {
3188
+ match token
3189
+ {
3190
+ \'+\' => accumulator += 1 , \'-\' => accumulator -= 1 , \'*\' => accumulator *= 2 , \'/\' => accumulator /= 2 , _ => {}
3191
+
3192
+ }
3193
+
3194
+ }
3195
+
3196
+ println!("The program \"{}\" calculates the value {}" , program , accumulator);
3197
+ }
3198
+ "# ;
3199
+ let sess = ParseSess :: new ( ) ;
3200
+ let tts = parse_tts ( "<test>" . to_string ( ) , original. to_string ( ) , & sess) . unwrap ( ) ;
3201
+ let pretty = tts_to_string ( & * tts) ;
3202
+ assert_eq ! ( original, & * pretty) ;
3203
+ }
3119
3204
}
0 commit comments