@@ -216,6 +216,30 @@ struct TokenCursorFrame {
216216 open_delim : bool ,
217217 tree_cursor : tokenstream:: Cursor ,
218218 close_delim : bool ,
219+ last_token : LastToken ,
220+ }
221+
222+ /// This is used in `TokenCursorFrame` above to track tokens that are consumed
223+ /// by the parser, and then that's transitively used to record the tokens that
224+ /// each parse AST item is created with.
225+ ///
226+ /// Right now this has two states, either collecting tokens or not collecting
227+ /// tokens. If we're collecting tokens we just save everything off into a local
228+ /// `Vec`. This should eventually though likely save tokens from the original
229+ /// token stream and just use slicing of token streams to avoid creation of a
230+ /// whole new vector.
231+ ///
232+ /// The second state is where we're passively not recording tokens, but the last
233+ /// token is still tracked for when we want to start recording tokens. This
234+ /// "last token" means that when we start recording tokens we'll want to ensure
235+ /// that this, the first token, is included in the output.
236+ ///
237+ /// You can find some more example usage of this in the `collect_tokens` method
238+ /// on the parser.
239+ #[ derive( Clone ) ]
240+ enum LastToken {
241+ Collecting ( Vec < TokenTree > ) ,
242+ Was ( Option < TokenTree > ) ,
219243}
220244
221245impl TokenCursorFrame {
@@ -226,6 +250,7 @@ impl TokenCursorFrame {
226250 open_delim : delimited. delim == token:: NoDelim ,
227251 tree_cursor : delimited. stream ( ) . into_trees ( ) ,
228252 close_delim : delimited. delim == token:: NoDelim ,
253+ last_token : LastToken :: Was ( None ) ,
229254 }
230255 }
231256}
@@ -250,6 +275,11 @@ impl TokenCursor {
250275 return TokenAndSpan { tok : token:: Eof , sp : syntax_pos:: DUMMY_SP }
251276 } ;
252277
278+ match self . frame . last_token {
279+ LastToken :: Collecting ( ref mut v) => v. push ( tree. clone ( ) ) ,
280+ LastToken :: Was ( ref mut t) => * t = Some ( tree. clone ( ) ) ,
281+ }
282+
253283 match tree {
254284 TokenTree :: Token ( sp, tok) => return TokenAndSpan { tok : tok, sp : sp } ,
255285 TokenTree :: Delimited ( sp, ref delimited) => {
@@ -1209,7 +1239,20 @@ impl<'a> Parser<'a> {
12091239 /// Parse the items in a trait declaration
12101240 pub fn parse_trait_item ( & mut self , at_end : & mut bool ) -> PResult < ' a , TraitItem > {
12111241 maybe_whole ! ( self , NtTraitItem , |x| x) ;
1212- let mut attrs = self . parse_outer_attributes ( ) ?;
1242+ let attrs = self . parse_outer_attributes ( ) ?;
1243+ let ( mut item, tokens) = self . collect_tokens ( |this| {
1244+ this. parse_trait_item_ ( at_end, attrs)
1245+ } ) ?;
1246+ // See `parse_item` for why this clause is here.
1247+ if !item. attrs . iter ( ) . any ( |attr| attr. style == AttrStyle :: Inner ) {
1248+ item. tokens = Some ( tokens) ;
1249+ }
1250+ Ok ( item)
1251+ }
1252+
1253+ fn parse_trait_item_ ( & mut self ,
1254+ at_end : & mut bool ,
1255+ mut attrs : Vec < Attribute > ) -> PResult < ' a , TraitItem > {
12131256 let lo = self . span ;
12141257
12151258 let ( name, node) = if self . eat_keyword ( keywords:: Type ) {
@@ -1304,6 +1347,7 @@ impl<'a> Parser<'a> {
13041347 attrs : attrs,
13051348 node : node,
13061349 span : lo. to ( self . prev_span ) ,
1350+ tokens : None ,
13071351 } )
13081352 }
13091353
@@ -4653,7 +4697,7 @@ impl<'a> Parser<'a> {
46534697 node : node,
46544698 vis : vis,
46554699 span : span,
4656- tokens : None , // TODO: fill this in
4700+ tokens : None ,
46574701 } )
46584702 }
46594703
@@ -4709,8 +4753,21 @@ impl<'a> Parser<'a> {
47094753 /// Parse an impl item.
47104754 pub fn parse_impl_item ( & mut self , at_end : & mut bool ) -> PResult < ' a , ImplItem > {
47114755 maybe_whole ! ( self , NtImplItem , |x| x) ;
4756+ let attrs = self . parse_outer_attributes ( ) ?;
4757+ let ( mut item, tokens) = self . collect_tokens ( |this| {
4758+ this. parse_impl_item_ ( at_end, attrs)
4759+ } ) ?;
4760+
4761+ // See `parse_item` for why this clause is here.
4762+ if !item. attrs . iter ( ) . any ( |attr| attr. style == AttrStyle :: Inner ) {
4763+ item. tokens = Some ( tokens) ;
4764+ }
4765+ Ok ( item)
4766+ }
47124767
4713- let mut attrs = self . parse_outer_attributes ( ) ?;
4768+ fn parse_impl_item_ ( & mut self ,
4769+ at_end : & mut bool ,
4770+ mut attrs : Vec < Attribute > ) -> PResult < ' a , ImplItem > {
47144771 let lo = self . span ;
47154772 let vis = self . parse_visibility ( false ) ?;
47164773 let defaultness = self . parse_defaultness ( ) ?;
@@ -4742,7 +4799,8 @@ impl<'a> Parser<'a> {
47424799 vis : vis,
47434800 defaultness : defaultness,
47444801 attrs : attrs,
4745- node : node
4802+ node : node,
4803+ tokens : None ,
47464804 } )
47474805 }
47484806
@@ -6018,9 +6076,71 @@ impl<'a> Parser<'a> {
60186076 Ok ( None )
60196077 }
60206078
6079+ fn collect_tokens < F , R > ( & mut self , f : F ) -> PResult < ' a , ( R , TokenStream ) >
6080+ where F : FnOnce ( & mut Self ) -> PResult < ' a , R >
6081+ {
6082+ // Record all tokens we parse when parsing this item.
6083+ let mut tokens = Vec :: new ( ) ;
6084+ match self . token_cursor . frame . last_token {
6085+ LastToken :: Collecting ( _) => {
6086+ panic ! ( "cannot collect tokens recursively yet" )
6087+ }
6088+ LastToken :: Was ( ref mut last) => tokens. extend ( last. take ( ) ) ,
6089+ }
6090+ self . token_cursor . frame . last_token = LastToken :: Collecting ( tokens) ;
6091+ let prev = self . token_cursor . stack . len ( ) ;
6092+ let ret = f ( self ) ;
6093+ let last_token = if self . token_cursor . stack . len ( ) == prev {
6094+ & mut self . token_cursor . frame . last_token
6095+ } else {
6096+ & mut self . token_cursor . stack [ prev] . last_token
6097+ } ;
6098+ let mut tokens = match * last_token {
6099+ LastToken :: Collecting ( ref mut v) => mem:: replace ( v, Vec :: new ( ) ) ,
6100+ LastToken :: Was ( _) => panic ! ( "our vector went away?" ) ,
6101+ } ;
6102+
6103+ // If we're not at EOF our current token wasn't actually consumed by
6104+ // `f`, but it'll still be in our list that we pulled out. In that case
6105+ // put it back.
6106+ if self . token == token:: Eof {
6107+ * last_token = LastToken :: Was ( None ) ;
6108+ } else {
6109+ * last_token = LastToken :: Was ( tokens. pop ( ) ) ;
6110+ }
6111+
6112+ Ok ( ( ret?, tokens. into_iter ( ) . collect ( ) ) )
6113+ }
6114+
60216115 pub fn parse_item ( & mut self ) -> PResult < ' a , Option < P < Item > > > {
60226116 let attrs = self . parse_outer_attributes ( ) ?;
6023- self . parse_item_ ( attrs, true , false )
6117+
6118+ let ( ret, tokens) = self . collect_tokens ( |this| {
6119+ this. parse_item_ ( attrs, true , false )
6120+ } ) ?;
6121+
6122+ // Once we've parsed an item and recorded the tokens we got while
6123+ // parsing we may want to store `tokens` into the item we're about to
6124+ // return. Note, though, that we specifically didn't capture tokens
6125+ // related to outer attributes. The `tokens` field here may later be
6126+ // used with procedural macros to convert this item back into a token
6127+ // stream, but during expansion we may be removing attributes as we go
6128+ // along.
6129+ //
6130+ // If we've got inner attributes then the `tokens` we've got above holds
6131+ // these inner attributes. If an inner attribute is expanded we won't
6132+ // actually remove it from the token stream, so we'll just keep yielding
6133+ // it (bad!). To work around this case for now we just avoid recording
6134+ // `tokens` if we detect any inner attributes. This should help keep
6135+ // expansion correct, but we should fix this bug one day!
6136+ Ok ( ret. map ( |item| {
6137+ item. map ( |mut i| {
6138+ if !i. attrs . iter ( ) . any ( |attr| attr. style == AttrStyle :: Inner ) {
6139+ i. tokens = Some ( tokens) ;
6140+ }
6141+ i
6142+ } )
6143+ } ) )
60246144 }
60256145
60266146 fn parse_path_list_items ( & mut self ) -> PResult < ' a , Vec < ast:: PathListItem > > {
0 commit comments