|
7 | 7 | #![feature(or_patterns)] |
8 | 8 |
|
9 | 9 | use rustc_ast::ast; |
10 | | -use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind}; |
11 | | -use rustc_ast::tokenstream::{self, IsJoint, TokenStream, TokenTree}; |
| 10 | +use rustc_ast::token::{self, Nonterminal, Token, TokenKind, DelimToken}; |
| 11 | +use rustc_ast::tokenstream::{self, TokenStream, TokenTree}; |
12 | 12 | use rustc_ast_pretty::pprust; |
13 | 13 | use rustc_data_structures::sync::Lrc; |
14 | 14 | use rustc_errors::{Diagnostic, FatalError, Level, PResult}; |
15 | 15 | use rustc_session::parse::ParseSess; |
16 | | -use rustc_span::symbol::kw; |
17 | 16 | use rustc_span::{FileName, SourceFile, Span}; |
| 17 | +use rustc_span::symbol::kw; |
18 | 18 |
|
19 | | -use std::mem; |
20 | 19 | use std::path::Path; |
21 | 20 | use std::str; |
| 21 | +use std::mem; |
22 | 22 |
|
23 | | -use log::{debug, info}; |
| 23 | +use log::info; |
24 | 24 |
|
25 | 25 | pub const MACRO_ARGUMENTS: Option<&'static str> = Some("macro arguments"); |
26 | 26 |
|
@@ -308,7 +308,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke |
308 | 308 | // modifications, including adding/removing typically non-semantic |
309 | 309 | // tokens such as extra braces and commas, don't happen. |
310 | 310 | if let Some(tokens) = tokens { |
311 | | - if tokenstream_probably_equal_for_proc_macro(&tokens, &tokens_for_real, sess) { |
| 311 | + if tokenstream_probably_equal_for_proc_macro(&tokens, &tokens_for_real) { |
312 | 312 | return tokens; |
313 | 313 | } |
314 | 314 | info!( |
@@ -389,11 +389,7 @@ fn prepend_attrs( |
389 | 389 | // |
390 | 390 | // This is otherwise the same as `eq_unspanned`, only recursing with a |
391 | 391 | // different method. |
392 | | -pub fn tokenstream_probably_equal_for_proc_macro( |
393 | | - first: &TokenStream, |
394 | | - other: &TokenStream, |
395 | | - sess: &ParseSess, |
396 | | -) -> bool { |
| 392 | +pub fn tokenstream_probably_equal_for_proc_macro(first: &TokenStream, other: &TokenStream) -> bool { |
397 | 393 | // When checking for `probably_eq`, we ignore certain tokens that aren't |
398 | 394 | // preserved in the AST. Because they are not preserved, the pretty |
399 | 395 | // printer arbitrarily adds or removes them when printing as token |
@@ -421,83 +417,10 @@ pub fn tokenstream_probably_equal_for_proc_macro( |
421 | 417 | true |
422 | 418 | } |
423 | 419 |
|
424 | | - // When comparing two `TokenStream`s, we ignore the `IsJoint` information. |
425 | | - // |
426 | | - // However, `rustc_parse::lexer::tokentrees::TokenStreamBuilder` will |
427 | | - // use `Token.glue` on adjacent tokens with the proper `IsJoint`. |
428 | | - // Since we are ignoreing `IsJoint`, a 'glued' token (e.g. `BinOp(Shr)`) |
429 | | - // and its 'split'/'unglued' compoenents (e.g. `Gt, Gt`) are equivalent |
430 | | - // when determining if two `TokenStream`s are 'probably equal'. |
431 | | - // |
432 | | - // Therefore, we use `break_two_token_op` to convert all tokens |
433 | | - // to the 'unglued' form (if it exists). This ensures that two |
434 | | - // `TokenStream`s which differ only in how their tokens are glued |
435 | | - // will be considered 'probably equal', which allows us to keep spans. |
436 | | - // |
437 | | - // This is important when the original `TokenStream` contained |
438 | | - // extra spaces (e.g. `f :: < Vec < _ > > ( ) ;'). These extra spaces |
439 | | - // will be omitted when we pretty-print, which can cause the original |
440 | | - // and reparsed `TokenStream`s to differ in the assignment of `IsJoint`, |
441 | | - // leading to some tokens being 'glued' together in one stream but not |
442 | | - // the other. See #68489 for more details. |
443 | | - fn break_tokens(tree: TokenTree) -> impl Iterator<Item = TokenTree> { |
444 | | - // In almost all cases, we should have either zero or one levels |
445 | | - // of 'unglueing'. However, in some unusual cases, we may need |
446 | | - // to iterate breaking tokens mutliple times. For example: |
447 | | - // '[BinOpEq(Shr)] => [Gt, Ge] -> [Gt, Gt, Eq]' |
448 | | - let mut token_trees: SmallVec<[_; 2]>; |
449 | | - if let TokenTree::Token(token) = &tree { |
450 | | - let mut out = SmallVec::<[_; 2]>::new(); |
451 | | - out.push(token.clone()); |
452 | | - // Iterate to fixpoint: |
453 | | - // * We start off with 'out' containing our initial token, and `temp` empty |
454 | | - // * If we are able to break any tokens in `out`, then `out` will have |
455 | | - // at least one more element than 'temp', so we will try to break tokens |
456 | | - // again. |
457 | | - // * If we cannot break any tokens in 'out', we are done |
458 | | - loop { |
459 | | - let mut temp = SmallVec::<[_; 2]>::new(); |
460 | | - let mut changed = false; |
461 | | - |
462 | | - for token in out.into_iter() { |
463 | | - if let Some((first, second)) = token.kind.break_two_token_op() { |
464 | | - temp.push(Token::new(first, DUMMY_SP)); |
465 | | - temp.push(Token::new(second, DUMMY_SP)); |
466 | | - changed = true; |
467 | | - } else { |
468 | | - temp.push(token); |
469 | | - } |
470 | | - } |
471 | | - out = temp; |
472 | | - if !changed { |
473 | | - break; |
474 | | - } |
475 | | - } |
476 | | - token_trees = out.into_iter().map(|t| TokenTree::Token(t)).collect(); |
477 | | - if token_trees.len() != 1 { |
478 | | - debug!("break_tokens: broke {:?} to {:?}", tree, token_trees); |
479 | | - } |
480 | | - } else { |
481 | | - token_trees = SmallVec::new(); |
482 | | - token_trees.push(tree); |
483 | | - } |
484 | | - token_trees.into_iter() |
485 | | - } |
486 | | - |
487 | | - let expand_nt = |tree: TokenTree| { |
488 | | - if let TokenTree::Token(Token { kind: TokenKind::Interpolated(nt), span }) = &tree { |
489 | | - nt_to_tokenstream(nt, sess, *span).into_trees() |
490 | | - } else { |
491 | | - TokenStream::new(vec![(tree, IsJoint::NonJoint)]).into_trees() |
492 | | - } |
493 | | - }; |
494 | | - |
495 | | - // Break tokens after we expand any nonterminals, so that we break tokens |
496 | | - // that are produced as a result of nonterminal expansion. |
497 | | - let mut t1 = first.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens); |
498 | | - let mut t2 = other.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens); |
| 420 | + let mut t1 = first.trees().filter(semantic_tree); |
| 421 | + let mut t2 = other.trees().filter(semantic_tree); |
499 | 422 | for (t1, t2) in t1.by_ref().zip(t2.by_ref()) { |
500 | | - if !tokentree_probably_equal_for_proc_macro(&t1, &t2, sess) { |
| 423 | + if !tokentree_probably_equal_for_proc_macro(&t1, &t2) { |
501 | 424 | return false; |
502 | 425 | } |
503 | 426 | } |
@@ -556,29 +479,25 @@ crate fn token_probably_equal_for_proc_macro(first: &Token, other: &Token) -> bo |
556 | 479 | b == d && (a == c || a == kw::DollarCrate || c == kw::DollarCrate) |
557 | 480 | } |
558 | 481 |
|
559 | | - // Expanded by `tokenstream_probably_equal_for_proc_macro` |
560 | | - (&Interpolated(_), &Interpolated(_)) => unreachable!(), |
| 482 | + (&Interpolated(_), &Interpolated(_)) => false, |
561 | 483 |
|
562 | 484 | _ => panic!("forgot to add a token?"), |
563 | 485 | } |
564 | 486 | } |
565 | 487 |
|
| 488 | + |
566 | 489 | // See comments in `Nonterminal::to_tokenstream` for why we care about |
567 | 490 | // *probably* equal here rather than actual equality |
568 | 491 | // |
569 | 492 | // This is otherwise the same as `eq_unspanned`, only recursing with a |
570 | 493 | // different method. |
571 | | -pub fn tokentree_probably_equal_for_proc_macro( |
572 | | - first: &TokenTree, |
573 | | - other: &TokenTree, |
574 | | - sess: &ParseSess, |
575 | | -) -> bool { |
| 494 | +pub fn tokentree_probably_equal_for_proc_macro(first: &TokenTree, other: &TokenTree) -> bool { |
576 | 495 | match (first, other) { |
577 | 496 | (TokenTree::Token(token), TokenTree::Token(token2)) => { |
578 | 497 | token_probably_equal_for_proc_macro(token, token2) |
579 | 498 | } |
580 | 499 | (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => { |
581 | | - delim == delim2 && tokenstream_probably_equal_for_proc_macro(&tts, &tts2, sess) |
| 500 | + delim == delim2 && tokenstream_probably_equal_for_proc_macro(&tts, &tts2) |
582 | 501 | } |
583 | 502 | _ => false, |
584 | 503 | } |
|
0 commit comments