@@ -19,6 +19,7 @@ pub(crate) use item::FnParseMode;
1919pub use pat:: { CommaRecoveryMode , RecoverColon , RecoverComma } ;
2020pub use path:: PathStyle ;
2121
22+ use core:: fmt;
2223use rustc_ast:: ptr:: P ;
2324use rustc_ast:: token:: { self , Delimiter , Token , TokenKind } ;
2425use rustc_ast:: tokenstream:: { AttributesData , DelimSpacing , DelimSpan , Spacing } ;
@@ -60,7 +61,7 @@ mod mut_visit {
6061}
6162
6263bitflags:: bitflags! {
63- #[ derive( Clone , Copy ) ]
64+ #[ derive( Clone , Copy , Debug ) ]
6465 struct Restrictions : u8 {
6566 const STMT_EXPR = 1 << 0 ;
6667 const NO_STRUCT_LITERAL = 1 << 1 ;
@@ -86,7 +87,7 @@ enum BlockMode {
8687
8788/// Whether or not we should force collection of tokens for an AST node,
8889/// regardless of whether or not it has attributes
89- #[ derive( Clone , Copy , PartialEq ) ]
90+ #[ derive( Clone , Copy , Debug , PartialEq ) ]
9091pub enum ForceCollect {
9192 Yes ,
9293 No ,
@@ -134,7 +135,7 @@ macro_rules! maybe_recover_from_interpolated_ty_qpath {
134135 } ;
135136}
136137
137- #[ derive( Clone , Copy ) ]
138+ #[ derive( Clone , Copy , Debug ) ]
138139pub enum Recovery {
139140 Allowed ,
140141 Forbidden ,
@@ -184,7 +185,7 @@ pub struct Parser<'a> {
184185 capture_state : CaptureState ,
185186 /// This allows us to recover when the user forget to add braces around
186187 /// multiple statements in the closure body.
187- pub current_closure : Option < ClosureSpans > ,
188+ current_closure : Option < ClosureSpans > ,
188189 /// Whether the parser is allowed to do recovery.
189190 /// This is disabled when parsing macro arguments, see #103534
190191 pub recovery : Recovery ,
@@ -196,7 +197,7 @@ pub struct Parser<'a> {
196197rustc_data_structures:: static_assert_size!( Parser <' _>, 264 ) ;
197198
198199/// Stores span information about a closure.
199- #[ derive( Clone ) ]
200+ #[ derive( Clone , Debug ) ]
200201pub struct ClosureSpans {
201202 pub whole_closure : Span ,
202203 pub closing_pipe : Span ,
@@ -225,15 +226,15 @@ pub type ReplaceRange = (Range<u32>, Vec<(FlatToken, Spacing)>);
225226/// Controls how we capture tokens. Capturing can be expensive,
226227/// so we try to avoid performing capturing in cases where
227228/// we will never need an `AttrTokenStream`.
228- #[ derive( Copy , Clone ) ]
229+ #[ derive( Copy , Clone , Debug ) ]
229230pub enum Capturing {
230231 /// We aren't performing any capturing - this is the default mode.
231232 No ,
232233 /// We are capturing tokens
233234 Yes ,
234235}
235236
236- #[ derive( Clone ) ]
237+ #[ derive( Clone , Debug ) ]
237238struct CaptureState {
238239 capturing : Capturing ,
239240 replace_ranges : Vec < ReplaceRange > ,
@@ -244,7 +245,7 @@ struct CaptureState {
244245/// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
245246/// use this type to emit them as a linear sequence. But a linear sequence is
246247/// what the parser expects, for the most part.
247- #[ derive( Clone ) ]
248+ #[ derive( Clone , Debug ) ]
248249struct TokenCursor {
249250 // Cursor for the current (innermost) token stream. The delimiters for this
250251 // token stream are found in `self.stack.last()`; when that is `None` then
@@ -349,6 +350,7 @@ enum TokenExpectType {
349350}
350351
351352/// A sequence separator.
353+ #[ derive( Debug ) ]
352354struct SeqSep {
353355 /// The separator token.
354356 sep : Option < TokenKind > ,
@@ -366,6 +368,7 @@ impl SeqSep {
366368 }
367369}
368370
371+ #[ derive( Debug ) ]
369372pub enum FollowedByType {
370373 Yes ,
371374 No ,
@@ -390,7 +393,7 @@ pub enum Trailing {
390393 Yes ,
391394}
392395
393- #[ derive( Clone , Copy , PartialEq , Eq ) ]
396+ #[ derive( Clone , Copy , Debug , PartialEq , Eq ) ]
394397pub enum TokenDescription {
395398 ReservedIdentifier ,
396399 Keyword ,
@@ -1548,6 +1551,47 @@ impl<'a> Parser<'a> {
15481551 } )
15491552 }
15501553
1554+ // debug view of the parser's token stream, up to `{lookahead}` tokens
1555+ pub fn debug_lookahead ( & self , lookahead : usize ) -> impl fmt:: Debug + ' _ {
1556+ struct DebugParser < ' dbg > {
1557+ parser : & ' dbg Parser < ' dbg > ,
1558+ lookahead : usize ,
1559+ }
1560+
1561+ impl fmt:: Debug for DebugParser < ' _ > {
1562+ fn fmt ( & self , f : & mut fmt:: Formatter < ' _ > ) -> fmt:: Result {
1563+ let Self { parser, lookahead } = self ;
1564+ let mut dbg_fmt = f. debug_struct ( "Parser" ) ; // or at least, one view of
1565+
1566+ // we don't need N spans, but we want at least one, so print all of prev_token
1567+ dbg_fmt. field ( "prev_token" , & parser. prev_token ) ;
1568+ // make it easier to peek farther ahead by taking TokenKinds only until EOF
1569+ let tokens = ( 0 ..* lookahead)
1570+ . map ( |i| parser. look_ahead ( i, |tok| tok. kind . clone ( ) ) )
1571+ . scan ( parser. prev_token == TokenKind :: Eof , |eof, tok| {
1572+ let current = eof. then_some ( tok. clone ( ) ) ; // include a trailing EOF token
1573+ * eof |= & tok == & TokenKind :: Eof ;
1574+ current
1575+ } ) ;
1576+ dbg_fmt. field_with ( "tokens" , |field| field. debug_list ( ) . entries ( tokens) . finish ( ) ) ;
1577+ dbg_fmt. field ( "approx_token_stream_pos" , & parser. num_bump_calls ) ;
1578+
1579+ // some fields are interesting for certain values, as they relate to macro parsing
1580+ if let Some ( subparser) = parser. subparser_name {
1581+ dbg_fmt. field ( "subparser_name" , & subparser) ;
1582+ }
1583+ if let Recovery :: Forbidden = parser. recovery {
1584+ dbg_fmt. field ( "recovery" , & parser. recovery ) ;
1585+ }
1586+
1587+ // imply there's "more to know" than this view
1588+ dbg_fmt. finish_non_exhaustive ( )
1589+ }
1590+ }
1591+
1592+ DebugParser { parser : self , lookahead }
1593+ }
1594+
15511595 pub fn clear_expected_tokens ( & mut self ) {
15521596 self . expected_tokens . clear ( ) ;
15531597 }
0 commit comments