44//! This quasiquoter uses macros 2.0 hygiene to reliably access
55//! items from `proc_macro`, to build a `proc_macro::TokenStream`.
66
7- use crate :: { Delimiter , Group , Ident , Literal , Punct , Spacing , Span , TokenStream , TokenTree } ;
7+ use crate :: {
8+ Delimiter , Group , Ident , Literal , Punct , Spacing , Span , ToTokens , TokenStream , TokenTree ,
9+ } ;
810
911macro_rules! minimal_quote_tt {
1012 ( ( $( $t: tt) * ) ) => { Group :: new( Delimiter :: Parenthesis , minimal_quote!( $( $t) * ) ) } ;
@@ -24,16 +26,15 @@ macro_rules! minimal_quote_tt {
2426macro_rules! minimal_quote_ts {
2527 ( ( @ $( $t: tt) * ) ) => { $( $t) * } ;
2628 ( :: ) => {
27- [
28- TokenTree :: from( Punct :: new( ':' , Spacing :: Joint ) ) ,
29- TokenTree :: from( Punct :: new( ':' , Spacing :: Alone ) ) ,
30- ] . iter( )
31- . cloned( )
32- . map( |mut x| {
33- x. set_span( Span :: def_site( ) ) ;
34- x
35- } )
36- . collect:: <TokenStream >( )
29+ {
30+ let mut c = (
31+ TokenTree :: from( Punct :: new( ':' , Spacing :: Joint ) ) ,
32+ TokenTree :: from( Punct :: new( ':' , Spacing :: Alone ) )
33+ ) ;
34+ c. 0 . set_span( Span :: def_site( ) ) ;
35+ c. 1 . set_span( Span :: def_site( ) ) ;
36+ [ c. 0 , c. 1 ] . into_iter( ) . collect:: <TokenStream >( )
37+ }
3738 } ;
3839 ( $t: tt) => { TokenTree :: from( minimal_quote_tt!( $t) ) } ;
3940}
@@ -47,11 +48,13 @@ macro_rules! minimal_quote_ts {
4748/// Note: supported tokens are a subset of the real `quote!`, but
4849/// unquoting is different: instead of `$x`, this uses `(@ expr)`.
4950macro_rules! minimal_quote {
50- ( ) => { TokenStream :: new( ) } ;
5151 ( $( $t: tt) * ) => {
52- [
53- $( TokenStream :: from( minimal_quote_ts!( $t) ) , ) *
54- ] . iter( ) . cloned( ) . collect:: <TokenStream >( )
52+ {
53+ #[ allow( unused_mut) ] // In case the expansion is empty
54+ let mut ts = TokenStream :: new( ) ;
55+ $( ToTokens :: to_tokens( & minimal_quote_ts!( $t) , & mut ts) ; ) *
56+ ts
57+ }
5558 } ;
5659}
5760
@@ -66,48 +69,56 @@ pub fn quote(stream: TokenStream) -> TokenStream {
6669 }
6770 let proc_macro_crate = minimal_quote ! ( crate ) ;
6871 let mut after_dollar = false ;
69- let tokens = stream
70- . into_iter ( )
71- . filter_map ( |tree| {
72- if after_dollar {
73- after_dollar = false ;
74- match tree {
75- TokenTree :: Ident ( _) => {
76- return Some ( minimal_quote ! ( Into :: <crate :: TokenStream >:: into(
77- Clone :: clone( & ( @ tree) ) ) , ) ) ;
78- }
79- TokenTree :: Punct ( ref tt) if tt. as_char ( ) == '$' => { }
80- _ => panic ! ( "`$` must be followed by an ident or `$` in `quote!`" ) ,
81- }
82- } else if let TokenTree :: Punct ( ref tt) = tree {
83- if tt. as_char ( ) == '$' {
84- after_dollar = true ;
85- return None ;
72+
73+ let mut tokens = crate :: TokenStream :: new ( ) ;
74+ for tree in stream {
75+ if after_dollar {
76+ after_dollar = false ;
77+ match tree {
78+ TokenTree :: Ident ( _) => {
79+ minimal_quote ! ( crate :: ToTokens :: to_tokens( & ( @ tree) , & mut ts) ; )
80+ . to_tokens ( & mut tokens) ;
81+ continue ;
8682 }
83+ TokenTree :: Punct ( ref tt) if tt. as_char ( ) == '$' => { }
84+ _ => panic ! ( "`$` must be followed by an ident or `$` in `quote!`" ) ,
8785 }
86+ } else if let TokenTree :: Punct ( ref tt) = tree {
87+ if tt. as_char ( ) == '$' {
88+ after_dollar = true ;
89+ continue ;
90+ }
91+ }
8892
89- Some ( minimal_quote ! ( crate :: TokenStream :: from( ( @ match tree {
90- TokenTree :: Punct ( tt) => minimal_quote!( crate :: TokenTree :: Punct ( crate :: Punct :: new(
93+ match tree {
94+ TokenTree :: Punct ( tt) => {
95+ minimal_quote ! ( crate :: ToTokens :: to_tokens( & crate :: TokenTree :: Punct ( crate :: Punct :: new(
9196 ( @ TokenTree :: from( Literal :: character( tt. as_char( ) ) ) ) ,
9297 ( @ match tt. spacing( ) {
9398 Spacing :: Alone => minimal_quote!( crate :: Spacing :: Alone ) ,
9499 Spacing :: Joint => minimal_quote!( crate :: Spacing :: Joint ) ,
95100 } ) ,
96- ) ) ) ,
97- TokenTree :: Group ( tt) => minimal_quote!( crate :: TokenTree :: Group ( crate :: Group :: new(
101+ ) ) , & mut ts) ; )
102+ }
103+ TokenTree :: Group ( tt) => {
104+ minimal_quote ! ( crate :: ToTokens :: to_tokens( & crate :: TokenTree :: Group ( crate :: Group :: new(
98105 ( @ match tt. delimiter( ) {
99106 Delimiter :: Parenthesis => minimal_quote!( crate :: Delimiter :: Parenthesis ) ,
100107 Delimiter :: Brace => minimal_quote!( crate :: Delimiter :: Brace ) ,
101108 Delimiter :: Bracket => minimal_quote!( crate :: Delimiter :: Bracket ) ,
102109 Delimiter :: None => minimal_quote!( crate :: Delimiter :: None ) ,
103110 } ) ,
104111 ( @ quote( tt. stream( ) ) ) ,
105- ) ) ) ,
106- TokenTree :: Ident ( tt) => minimal_quote!( crate :: TokenTree :: Ident ( crate :: Ident :: new(
112+ ) ) , & mut ts) ; )
113+ }
114+ TokenTree :: Ident ( tt) => {
115+ minimal_quote ! ( crate :: ToTokens :: to_tokens( & crate :: TokenTree :: Ident ( crate :: Ident :: new(
107116 ( @ TokenTree :: from( Literal :: string( & tt. to_string( ) ) ) ) ,
108117 ( @ quote_span( proc_macro_crate. clone( ) , tt. span( ) ) ) ,
109- ) ) ) ,
110- TokenTree :: Literal ( tt) => minimal_quote!( crate :: TokenTree :: Literal ( {
118+ ) ) , & mut ts) ; )
119+ }
120+ TokenTree :: Literal ( tt) => {
121+ minimal_quote ! ( crate :: ToTokens :: to_tokens( & crate :: TokenTree :: Literal ( {
111122 let mut iter = ( @ TokenTree :: from( Literal :: string( & tt. to_string( ) ) ) )
112123 . parse:: <crate :: TokenStream >( )
113124 . unwrap( )
@@ -120,16 +131,22 @@ pub fn quote(stream: TokenStream) -> TokenStream {
120131 } else {
121132 unreachable!( )
122133 }
123- } ) )
124- } ) ) , ) )
125- } )
126- . collect :: < TokenStream > ( ) ;
127-
134+ } ) , & mut ts ) ; )
135+ }
136+ }
137+ . to_tokens ( & mut tokens ) ;
138+ }
128139 if after_dollar {
129140 panic ! ( "unexpected trailing `$` in `quote!`" ) ;
130141 }
131142
132- minimal_quote ! ( [ ( @ tokens) ] . iter( ) . cloned( ) . collect:: <crate :: TokenStream >( ) )
143+ minimal_quote ! {
144+ {
145+ let mut ts = crate :: TokenStream :: new( ) ;
146+ ( @ tokens)
147+ ts
148+ }
149+ }
133150}
134151
135152/// Quote a `Span` into a `TokenStream`.
0 commit comments