@@ -17,7 +17,7 @@ pub use path::PathStyle;
17
17
use rustc_ast:: ptr:: P ;
18
18
use rustc_ast:: token:: { self , DelimToken , Token , TokenKind } ;
19
19
use rustc_ast:: tokenstream:: { self , DelimSpan , LazyTokenStream , Spacing } ;
20
- use rustc_ast:: tokenstream:: { CreateTokenStream , TokenStream , TokenTree } ;
20
+ use rustc_ast:: tokenstream:: { CreateTokenStream , TokenStream , TokenTree , TreeAndSpacing } ;
21
21
use rustc_ast:: DUMMY_NODE_ID ;
22
22
use rustc_ast:: { self as ast, AnonConst , AttrStyle , AttrVec , Const , CrateSugar , Extern , Unsafe } ;
23
23
use rustc_ast:: { Async , Expr , ExprKind , MacArgs , MacDelimiter , Mutability , StrLit } ;
@@ -132,6 +132,28 @@ struct TokenCursor {
132
132
// Counts the number of calls to `next` or `next_desugared`,
133
133
// depending on whether `desugar_doc_comments` is set.
134
134
num_next_calls : usize ,
135
+ // During parsing, we may sometimes need to 'unglue' a
136
+ // glued token into two component tokens
137
+ // (e.g. '>>' into '>' and '>), so that the parser
138
+ // can consume them one at a time. This process
139
+ // bypasses the normal capturing mechanism
140
+ // (e.g. `num_next_calls` will not be incremented),
141
+ // since the 'unglued' tokens due not exist in
142
+ // the original `TokenStream`.
143
+ //
144
+ // If we end up consuming both unglued tokens,
145
+ // then this is not an issue - we'll end up
146
+ // capturing the single 'glued' token.
147
+ //
148
+ // However, in certain circumstances, we may
149
+ // want to capture just the first 'unglued' token.
150
+ // For example, capturing the `Vec<u8>`
151
+ // in `Option<Vec<u8>>` requires us to unglue
152
+ // the trailing `>>` token. The `append_unglued_token`
153
+ // field is used to track this token - it gets
154
+ // appended to the captured stream when
155
+ // we evaluate a `LazyTokenStream`
156
+ append_unglued_token : Option < TreeAndSpacing > ,
135
157
}
136
158
137
159
#[ derive( Clone ) ]
@@ -336,6 +358,7 @@ impl<'a> Parser<'a> {
336
358
stack : Vec :: new ( ) ,
337
359
num_next_calls : 0 ,
338
360
desugar_doc_comments,
361
+ append_unglued_token : None ,
339
362
} ,
340
363
desugar_doc_comments,
341
364
unmatched_angle_bracket_count : 0 ,
@@ -359,6 +382,10 @@ impl<'a> Parser<'a> {
359
382
self . token_cursor . next ( )
360
383
} ;
361
384
self . token_cursor . num_next_calls += 1 ;
385
+ // We've retrieved an token from the underlying
386
+ // cursor, so we no longer need to worry about
387
+ // an unglued token. See `break_and_eat` for more details
388
+ self . token_cursor . append_unglued_token = None ;
362
389
if next. span . is_dummy ( ) {
363
390
// Tweak the location for better diagnostics, but keep syntactic context intact.
364
391
next. span = fallback_span. with_ctxt ( next. span . ctxt ( ) ) ;
@@ -555,6 +582,14 @@ impl<'a> Parser<'a> {
555
582
let first_span = self . sess . source_map ( ) . start_point ( self . token . span ) ;
556
583
let second_span = self . token . span . with_lo ( first_span. hi ( ) ) ;
557
584
self . token = Token :: new ( first, first_span) ;
585
+ // Keep track of this token - if we end token capturing now,
586
+ // we'll want to append this token to the captured stream.
587
+ //
588
+ // If we consume any additional tokens, then this token
589
+ // is not needed (we'll capture the entire 'glued' token),
590
+ // and `next_tok` will set this field to `None`
591
+ self . token_cursor . append_unglued_token =
592
+ Some ( ( TokenTree :: Token ( self . token . clone ( ) ) , Spacing :: Alone ) ) ;
558
593
// Use the spacing of the glued token as the spacing
559
594
// of the unglued second token.
560
595
self . bump_with ( ( Token :: new ( second, second_span) , self . token_spacing ) ) ;
@@ -1230,6 +1265,7 @@ impl<'a> Parser<'a> {
1230
1265
num_calls : usize ,
1231
1266
desugar_doc_comments : bool ,
1232
1267
trailing_semi : bool ,
1268
+ append_unglued_token : Option < TreeAndSpacing > ,
1233
1269
}
1234
1270
impl CreateTokenStream for LazyTokenStreamImpl {
1235
1271
fn create_token_stream ( & self ) -> TokenStream {
@@ -1253,12 +1289,18 @@ impl<'a> Parser<'a> {
1253
1289
} ) )
1254
1290
. take ( num_calls) ;
1255
1291
1256
- make_token_stream ( tokens)
1292
+ make_token_stream ( tokens, self . append_unglued_token . clone ( ) )
1257
1293
}
1258
1294
fn add_trailing_semi ( & self ) -> Box < dyn CreateTokenStream > {
1259
1295
if self . trailing_semi {
1260
1296
panic ! ( "Called `add_trailing_semi` twice!" ) ;
1261
1297
}
1298
+ if self . append_unglued_token . is_some ( ) {
1299
+ panic ! (
1300
+ "Cannot call `add_trailing_semi` when we have an unglued token {:?}" ,
1301
+ self . append_unglued_token
1302
+ ) ;
1303
+ }
1262
1304
let mut new = self . clone ( ) ;
1263
1305
new. trailing_semi = true ;
1264
1306
Box :: new ( new)
@@ -1271,6 +1313,7 @@ impl<'a> Parser<'a> {
1271
1313
cursor_snapshot,
1272
1314
desugar_doc_comments : self . desugar_doc_comments ,
1273
1315
trailing_semi : false ,
1316
+ append_unglued_token : self . token_cursor . append_unglued_token . clone ( ) ,
1274
1317
} ;
1275
1318
Ok ( ( ret, Some ( LazyTokenStream :: new ( lazy_impl) ) ) )
1276
1319
}
@@ -1325,7 +1368,10 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &Pa
1325
1368
/// Converts a flattened iterator of tokens (including open and close delimiter tokens)
1326
1369
/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
1327
1370
/// of open and close delims.
1328
- fn make_token_stream ( tokens : impl Iterator < Item = ( Token , Spacing ) > ) -> TokenStream {
1371
+ fn make_token_stream (
1372
+ tokens : impl Iterator < Item = ( Token , Spacing ) > ,
1373
+ append_unglued_token : Option < TreeAndSpacing > ,
1374
+ ) -> TokenStream {
1329
1375
#[ derive( Debug ) ]
1330
1376
struct FrameData {
1331
1377
open : Span ,
@@ -1348,14 +1394,17 @@ fn make_token_stream(tokens: impl Iterator<Item = (Token, Spacing)>) -> TokenStr
1348
1394
. inner
1349
1395
. push ( ( delimited, Spacing :: Alone ) ) ;
1350
1396
}
1351
- token => stack
1352
- . last_mut ( )
1353
- . expect ( "Bottom token frame is missing!" )
1354
- . inner
1355
- . push ( ( TokenTree :: Token ( token) , spacing) ) ,
1397
+ token => {
1398
+ stack
1399
+ . last_mut ( )
1400
+ . expect ( "Bottom token frame is missing!" )
1401
+ . inner
1402
+ . push ( ( TokenTree :: Token ( token) , spacing) ) ;
1403
+ }
1356
1404
}
1357
1405
}
1358
- let final_buf = stack. pop ( ) . expect ( "Missing final buf!" ) ;
1406
+ let mut final_buf = stack. pop ( ) . expect ( "Missing final buf!" ) ;
1407
+ final_buf. inner . extend ( append_unglued_token) ;
1359
1408
assert ! ( stack. is_empty( ) , "Stack should be empty: final_buf={:?} stack={:?}" , final_buf, stack) ;
1360
1409
TokenStream :: new ( final_buf. inner )
1361
1410
}
0 commit comments