@@ -288,8 +288,8 @@ struct TokenCursorFrame {
288
288
/// on the parser.
289
289
#[ derive( Clone ) ]
290
290
enum LastToken {
291
- Collecting ( Vec < TokenTree > ) ,
292
- Was ( Option < TokenTree > ) ,
291
+ Collecting ( Vec < TokenStream > ) ,
292
+ Was ( Option < TokenStream > ) ,
293
293
}
294
294
295
295
impl TokenCursorFrame {
@@ -326,8 +326,8 @@ impl TokenCursor {
326
326
} ;
327
327
328
328
match self . frame . last_token {
329
- LastToken :: Collecting ( ref mut v) => v. push ( tree. clone ( ) ) ,
330
- LastToken :: Was ( ref mut t) => * t = Some ( tree. clone ( ) ) ,
329
+ LastToken :: Collecting ( ref mut v) => v. push ( tree. clone ( ) . into ( ) ) ,
330
+ LastToken :: Was ( ref mut t) => * t = Some ( tree. clone ( ) . into ( ) ) ,
331
331
}
332
332
333
333
match tree {
@@ -6723,11 +6723,49 @@ impl<'a> Parser<'a> {
6723
6723
}
6724
6724
}
6725
6725
6726
+ fn parse_item_ (
6727
+ & mut self ,
6728
+ attrs : Vec < Attribute > ,
6729
+ macros_allowed : bool ,
6730
+ attributes_allowed : bool ,
6731
+ ) -> PResult < ' a , Option < P < Item > > > {
6732
+ let ( ret, tokens) = self . collect_tokens ( |this| {
6733
+ this. parse_item_implementation ( attrs, macros_allowed, attributes_allowed)
6734
+ } ) ?;
6735
+
6736
+ // Once we've parsed an item and recorded the tokens we got while
6737
+ // parsing we may want to store `tokens` into the item we're about to
6738
+ // return. Note, though, that we specifically didn't capture tokens
6739
+ // related to outer attributes. The `tokens` field here may later be
6740
+ // used with procedural macros to convert this item back into a token
6741
+ // stream, but during expansion we may be removing attributes as we go
6742
+ // along.
6743
+ //
6744
+ // If we've got inner attributes then the `tokens` we've got above holds
6745
+ // these inner attributes. If an inner attribute is expanded we won't
6746
+ // actually remove it from the token stream, so we'll just keep yielding
6747
+ // it (bad!). To work around this case for now we just avoid recording
6748
+ // `tokens` if we detect any inner attributes. This should help keep
6749
+ // expansion correct, but we should fix this bug one day!
6750
+ Ok ( ret. map ( |item| {
6751
+ item. map ( |mut i| {
6752
+ if !i. attrs . iter ( ) . any ( |attr| attr. style == AttrStyle :: Inner ) {
6753
+ i. tokens = Some ( tokens) ;
6754
+ }
6755
+ i
6756
+ } )
6757
+ } ) )
6758
+ }
6759
+
6726
6760
/// Parse one of the items allowed by the flags.
6727
6761
/// NB: this function no longer parses the items inside an
6728
6762
/// extern crate.
6729
- fn parse_item_ ( & mut self , attrs : Vec < Attribute > ,
6730
- macros_allowed : bool , attributes_allowed : bool ) -> PResult < ' a , Option < P < Item > > > {
6763
+ fn parse_item_implementation (
6764
+ & mut self ,
6765
+ attrs : Vec < Attribute > ,
6766
+ macros_allowed : bool ,
6767
+ attributes_allowed : bool ,
6768
+ ) -> PResult < ' a , Option < P < Item > > > {
6731
6769
maybe_whole ! ( self , NtItem , |item| {
6732
6770
let mut item = item. into_inner( ) ;
6733
6771
let mut attrs = attrs;
@@ -7260,12 +7298,15 @@ impl<'a> Parser<'a> {
7260
7298
{
7261
7299
// Record all tokens we parse when parsing this item.
7262
7300
let mut tokens = Vec :: new ( ) ;
7263
- match self . token_cursor . frame . last_token {
7264
- LastToken :: Collecting ( _ ) => {
7265
- panic ! ( "cannot collect tokens recursively yet" )
7301
+ let prev_collecting = match self . token_cursor . frame . last_token {
7302
+ LastToken :: Collecting ( ref mut list ) => {
7303
+ Some ( mem :: replace ( list , Vec :: new ( ) ) )
7266
7304
}
7267
- LastToken :: Was ( ref mut last) => tokens. extend ( last. take ( ) ) ,
7268
- }
7305
+ LastToken :: Was ( ref mut last) => {
7306
+ tokens. extend ( last. take ( ) ) ;
7307
+ None
7308
+ }
7309
+ } ;
7269
7310
self . token_cursor . frame . last_token = LastToken :: Collecting ( tokens) ;
7270
7311
let prev = self . token_cursor . stack . len ( ) ;
7271
7312
let ret = f ( self ) ;
@@ -7274,52 +7315,44 @@ impl<'a> Parser<'a> {
7274
7315
} else {
7275
7316
& mut self . token_cursor . stack [ prev] . last_token
7276
7317
} ;
7277
- let mut tokens = match * last_token {
7318
+
7319
+ // Pull our the toekns that we've collected from the call to `f` above
7320
+ let mut collected_tokens = match * last_token {
7278
7321
LastToken :: Collecting ( ref mut v) => mem:: replace ( v, Vec :: new ( ) ) ,
7279
7322
LastToken :: Was ( _) => panic ! ( "our vector went away?" ) ,
7280
7323
} ;
7281
7324
7282
7325
// If we're not at EOF our current token wasn't actually consumed by
7283
7326
// `f`, but it'll still be in our list that we pulled out. In that case
7284
7327
// put it back.
7285
- if self . token = = token:: Eof {
7286
- * last_token = LastToken :: Was ( None ) ;
7328
+ let extra_token = if self . token ! = token:: Eof {
7329
+ collected_tokens . pop ( )
7287
7330
} else {
7288
- * last_token = LastToken :: Was ( tokens. pop ( ) ) ;
7331
+ None
7332
+ } ;
7333
+
7334
+ // If we were previously collecting tokens, then this was a recursive
7335
+ // call. In that case we need to record all the tokens we collected in
7336
+ // our parent list as well. To do that we push a clone of our stream
7337
+ // onto the previous list.
7338
+ let stream = collected_tokens. into_iter ( ) . collect :: < TokenStream > ( ) ;
7339
+ match prev_collecting {
7340
+ Some ( mut list) => {
7341
+ list. push ( stream. clone ( ) ) ;
7342
+ list. extend ( extra_token) ;
7343
+ * last_token = LastToken :: Collecting ( list) ;
7344
+ }
7345
+ None => {
7346
+ * last_token = LastToken :: Was ( extra_token) ;
7347
+ }
7289
7348
}
7290
7349
7291
- Ok ( ( ret?, tokens . into_iter ( ) . collect ( ) ) )
7350
+ Ok ( ( ret?, stream ) )
7292
7351
}
7293
7352
7294
7353
pub fn parse_item ( & mut self ) -> PResult < ' a , Option < P < Item > > > {
7295
7354
let attrs = self . parse_outer_attributes ( ) ?;
7296
-
7297
- let ( ret, tokens) = self . collect_tokens ( |this| {
7298
- this. parse_item_ ( attrs, true , false )
7299
- } ) ?;
7300
-
7301
- // Once we've parsed an item and recorded the tokens we got while
7302
- // parsing we may want to store `tokens` into the item we're about to
7303
- // return. Note, though, that we specifically didn't capture tokens
7304
- // related to outer attributes. The `tokens` field here may later be
7305
- // used with procedural macros to convert this item back into a token
7306
- // stream, but during expansion we may be removing attributes as we go
7307
- // along.
7308
- //
7309
- // If we've got inner attributes then the `tokens` we've got above holds
7310
- // these inner attributes. If an inner attribute is expanded we won't
7311
- // actually remove it from the token stream, so we'll just keep yielding
7312
- // it (bad!). To work around this case for now we just avoid recording
7313
- // `tokens` if we detect any inner attributes. This should help keep
7314
- // expansion correct, but we should fix this bug one day!
7315
- Ok ( ret. map ( |item| {
7316
- item. map ( |mut i| {
7317
- if !i. attrs . iter ( ) . any ( |attr| attr. style == AttrStyle :: Inner ) {
7318
- i. tokens = Some ( tokens) ;
7319
- }
7320
- i
7321
- } )
7322
- } ) )
7355
+ self . parse_item_ ( attrs, true , false )
7323
7356
}
7324
7357
7325
7358
/// `::{` or `::*`
0 commit comments