@@ -19,6 +19,7 @@ pub(crate) use item::FnParseMode;
1919pub use pat:: { CommaRecoveryMode , RecoverColon , RecoverComma } ;
2020pub use path:: PathStyle ;
2121
22+ use core:: fmt;
2223use rustc_ast:: ptr:: P ;
2324use rustc_ast:: token:: { self , Delimiter , Token , TokenKind } ;
2425use rustc_ast:: tokenstream:: { AttributesData , DelimSpacing , DelimSpan , Spacing } ;
@@ -46,7 +47,7 @@ use crate::errors::{
4647} ;
4748
4849bitflags:: bitflags! {
49- #[ derive( Clone , Copy ) ]
50+ #[ derive( Clone , Copy , Debug ) ]
5051 struct Restrictions : u8 {
5152 const STMT_EXPR = 1 << 0 ;
5253 const NO_STRUCT_LITERAL = 1 << 1 ;
@@ -72,7 +73,7 @@ enum BlockMode {
7273
7374/// Whether or not we should force collection of tokens for an AST node,
7475/// regardless of whether or not it has attributes
75- #[ derive( Clone , Copy , PartialEq ) ]
76+ #[ derive( Clone , Copy , Debug , PartialEq ) ]
7677pub enum ForceCollect {
7778 Yes ,
7879 No ,
@@ -120,7 +121,7 @@ macro_rules! maybe_recover_from_interpolated_ty_qpath {
120121 } ;
121122}
122123
123- #[ derive( Clone , Copy ) ]
124+ #[ derive( Clone , Copy , Debug ) ]
124125pub enum Recovery {
125126 Allowed ,
126127 Forbidden ,
@@ -182,7 +183,7 @@ pub struct Parser<'a> {
182183rustc_data_structures:: static_assert_size!( Parser <' _>, 264 ) ;
183184
184185/// Stores span information about a closure.
185- #[ derive( Clone ) ]
186+ #[ derive( Clone , Debug ) ]
186187pub struct ClosureSpans {
187188 pub whole_closure : Span ,
188189 pub closing_pipe : Span ,
@@ -211,15 +212,15 @@ pub type ReplaceRange = (Range<u32>, Vec<(FlatToken, Spacing)>);
211212/// Controls how we capture tokens. Capturing can be expensive,
212213/// so we try to avoid performing capturing in cases where
213214/// we will never need an `AttrTokenStream`.
214- #[ derive( Copy , Clone ) ]
215+ #[ derive( Copy , Clone , Debug ) ]
215216pub enum Capturing {
216217 /// We aren't performing any capturing - this is the default mode.
217218 No ,
218219 /// We are capturing tokens
219220 Yes ,
220221}
221222
222- #[ derive( Clone ) ]
223+ #[ derive( Clone , Debug ) ]
223224struct CaptureState {
224225 capturing : Capturing ,
225226 replace_ranges : Vec < ReplaceRange > ,
@@ -230,7 +231,7 @@ struct CaptureState {
230231/// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
231232/// use this type to emit them as a linear sequence. But a linear sequence is
232233/// what the parser expects, for the most part.
233- #[ derive( Clone ) ]
234+ #[ derive( Clone , Debug ) ]
234235struct TokenCursor {
235236 // Cursor for the current (innermost) token stream. The delimiters for this
236237 // token stream are found in `self.stack.last()`; when that is `None` then
@@ -335,6 +336,7 @@ enum TokenExpectType {
335336}
336337
337338/// A sequence separator.
339+ #[ derive( Debug ) ]
338340struct SeqSep {
339341 /// The separator token.
340342 sep : Option < TokenKind > ,
@@ -352,6 +354,7 @@ impl SeqSep {
352354 }
353355}
354356
357+ #[ derive( Debug ) ]
355358pub enum FollowedByType {
356359 Yes ,
357360 No ,
@@ -376,7 +379,7 @@ pub enum Trailing {
376379 Yes ,
377380}
378381
379- #[ derive( Clone , Copy , PartialEq , Eq ) ]
382+ #[ derive( Clone , Copy , Debug , PartialEq , Eq ) ]
380383pub enum TokenDescription {
381384 ReservedIdentifier ,
382385 Keyword ,
0 commit comments