@@ -24,7 +24,7 @@ use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
2424use rustc_ast:: util:: case:: Case ;
2525use rustc_ast:: AttrId ;
2626use rustc_ast:: DUMMY_NODE_ID ;
27- use rustc_ast:: { self as ast, AnonConst , AttrStyle , Const , DelimArgs , Extern } ;
27+ use rustc_ast:: { self as ast, AnonConst , Const , DelimArgs , Extern } ;
2828use rustc_ast:: { Async , AttrArgs , AttrArgsEq , Expr , ExprKind , MacDelimiter , Mutability , StrLit } ;
2929use rustc_ast:: { HasAttrs , HasTokens , Unsafe , Visibility , VisibilityKind } ;
3030use rustc_ast_pretty:: pprust;
@@ -38,7 +38,7 @@ use rustc_session::parse::ParseSess;
3838use rustc_span:: source_map:: { Span , DUMMY_SP } ;
3939use rustc_span:: symbol:: { kw, sym, Ident , Symbol } ;
4040use std:: ops:: Range ;
41- use std:: { cmp , mem, slice} ;
41+ use std:: { mem, slice} ;
4242use thin_vec:: ThinVec ;
4343use tracing:: debug;
4444
@@ -224,11 +224,6 @@ struct TokenCursor {
224224 // because it's the outermost token stream which never has delimiters.
225225 stack : Vec < ( TokenTreeCursor , Delimiter , DelimSpan ) > ,
226226
227- // We need to desugar doc comments from `/// foo` form into `#[doc =
228- // r"foo"]` form when parsing declarative macro inputs in `parse_tt`,
229- // because some declarative macros look for `doc` attributes.
230- desugar_doc_comments : bool ,
231-
232227 // Counts the number of calls to `{,inlined_}next`.
233228 num_next_calls : usize ,
234229
@@ -271,23 +266,11 @@ impl TokenCursor {
271266 if let Some ( tree) = self . tree_cursor . next_ref ( ) {
272267 match tree {
273268 & TokenTree :: Token ( ref token, spacing) => {
274- match ( self . desugar_doc_comments , token) {
275- (
276- true ,
277- & Token { kind : token:: DocComment ( _, attr_style, data) , span } ,
278- ) => {
279- let desugared = self . desugar ( attr_style, data, span) ;
280- self . tree_cursor . replace_prev_and_rewind ( desugared) ;
281- // Continue to get the first token of the desugared doc comment.
282- }
283- _ => {
284- debug_assert ! ( !matches!(
285- token. kind,
286- token:: OpenDelim ( _) | token:: CloseDelim ( _)
287- ) ) ;
288- return ( token. clone ( ) , spacing) ;
289- }
290- }
269+ debug_assert ! ( !matches!(
270+ token. kind,
271+ token:: OpenDelim ( _) | token:: CloseDelim ( _)
272+ ) ) ;
273+ return ( token. clone ( ) , spacing) ;
291274 }
292275 & TokenTree :: Delimited ( sp, delim, ref tts) => {
293276 let trees = tts. clone ( ) . into_trees ( ) ;
@@ -311,52 +294,6 @@ impl TokenCursor {
311294 }
312295 }
313296 }
314-
315- // Desugar a doc comment into something like `#[doc = r"foo"]`.
316- fn desugar ( & mut self , attr_style : AttrStyle , data : Symbol , span : Span ) -> Vec < TokenTree > {
317- // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
318- // required to wrap the text. E.g.
319- // - `abc d` is wrapped as `r"abc d"` (num_of_hashes = 0)
320- // - `abc "d"` is wrapped as `r#"abc "d""#` (num_of_hashes = 1)
321- // - `abc "##d##"` is wrapped as `r###"abc ##"d"##"###` (num_of_hashes = 3)
322- let mut num_of_hashes = 0 ;
323- let mut count = 0 ;
324- for ch in data. as_str ( ) . chars ( ) {
325- count = match ch {
326- '"' => 1 ,
327- '#' if count > 0 => count + 1 ,
328- _ => 0 ,
329- } ;
330- num_of_hashes = cmp:: max ( num_of_hashes, count) ;
331- }
332-
333- // `/// foo` becomes `doc = r"foo"`.
334- let delim_span = DelimSpan :: from_single ( span) ;
335- let body = TokenTree :: Delimited (
336- delim_span,
337- Delimiter :: Bracket ,
338- [
339- TokenTree :: token_alone ( token:: Ident ( sym:: doc, false ) , span) ,
340- TokenTree :: token_alone ( token:: Eq , span) ,
341- TokenTree :: token_alone (
342- TokenKind :: lit ( token:: StrRaw ( num_of_hashes) , data, None ) ,
343- span,
344- ) ,
345- ]
346- . into_iter ( )
347- . collect :: < TokenStream > ( ) ,
348- ) ;
349-
350- if attr_style == AttrStyle :: Inner {
351- vec ! [
352- TokenTree :: token_alone( token:: Pound , span) ,
353- TokenTree :: token_alone( token:: Not , span) ,
354- body,
355- ]
356- } else {
357- vec ! [ TokenTree :: token_alone( token:: Pound , span) , body]
358- }
359- }
360297}
361298
362299#[ derive( Debug , Clone , PartialEq ) ]
@@ -451,10 +388,14 @@ pub(super) fn token_descr(token: &Token) -> String {
451388impl < ' a > Parser < ' a > {
452389 pub fn new (
453390 sess : & ' a ParseSess ,
454- tokens : TokenStream ,
391+ mut stream : TokenStream ,
455392 desugar_doc_comments : bool ,
456393 subparser_name : Option < & ' static str > ,
457394 ) -> Self {
395+ if desugar_doc_comments {
396+ stream. desugar_doc_comments ( ) ;
397+ }
398+
458399 let mut parser = Parser {
459400 sess,
460401 token : Token :: dummy ( ) ,
@@ -464,10 +405,9 @@ impl<'a> Parser<'a> {
464405 restrictions : Restrictions :: empty ( ) ,
465406 expected_tokens : Vec :: new ( ) ,
466407 token_cursor : TokenCursor {
467- tree_cursor : tokens . into_trees ( ) ,
408+ tree_cursor : stream . into_trees ( ) ,
468409 stack : Vec :: new ( ) ,
469410 num_next_calls : 0 ,
470- desugar_doc_comments,
471411 break_last_token : false ,
472412 } ,
473413 unmatched_angle_bracket_count : 0 ,
0 commit comments