@@ -57,17 +57,27 @@ impl Token {
57
57
#[ derive( Clone , Copy , Debug , PartialEq , Eq ) ]
58
58
pub enum TokenKind {
59
59
/// A line comment, e.g. `// comment`.
60
- LineComment { doc_style : Option < DocStyle > } ,
60
+ LineComment {
61
+ doc_style : Option < DocStyle > ,
62
+ } ,
61
63
62
64
/// A block comment, e.g. `/* block comment */`.
63
65
///
64
66
/// Block comments can be recursive, so a sequence like `/* /* */`
65
67
/// will not be considered terminated and will result in a parsing error.
66
- BlockComment { doc_style : Option < DocStyle > , terminated : bool } ,
68
+ BlockComment {
69
+ doc_style : Option < DocStyle > ,
70
+ terminated : bool ,
71
+ } ,
67
72
68
73
/// Any whitespace character sequence.
69
74
Whitespace ,
70
75
76
+ Frontmatter {
77
+ has_invalid_preceding_whitespace : bool ,
78
+ invalid_infostring : bool ,
79
+ } ,
80
+
71
81
/// An identifier or keyword, e.g. `ident` or `continue`.
72
82
Ident ,
73
83
@@ -109,10 +119,15 @@ pub enum TokenKind {
109
119
/// this type will need to check for and reject that case.
110
120
///
111
121
/// See [LiteralKind] for more details.
112
- Literal { kind : LiteralKind , suffix_start : u32 } ,
122
+ Literal {
123
+ kind : LiteralKind ,
124
+ suffix_start : u32 ,
125
+ } ,
113
126
114
127
/// A lifetime, e.g. `'a`.
115
- Lifetime { starts_with_number : bool } ,
128
+ Lifetime {
129
+ starts_with_number : bool ,
130
+ } ,
116
131
117
132
/// `;`
118
133
Semi ,
@@ -280,7 +295,7 @@ pub fn strip_shebang(input: &str) -> Option<usize> {
280
295
#[ inline]
281
296
pub fn validate_raw_str ( input : & str , prefix_len : u32 ) -> Result < ( ) , RawStrError > {
282
297
debug_assert ! ( !input. is_empty( ) ) ;
283
- let mut cursor = Cursor :: new ( input) ;
298
+ let mut cursor = Cursor :: new ( input, false ) ;
284
299
// Move past the leading `r` or `br`.
285
300
for _ in 0 ..prefix_len {
286
301
cursor. bump ( ) . unwrap ( ) ;
@@ -290,7 +305,7 @@ pub fn validate_raw_str(input: &str, prefix_len: u32) -> Result<(), RawStrError>
290
305
291
306
/// Creates an iterator that produces tokens from the input string.
292
307
pub fn tokenize ( input : & str ) -> impl Iterator < Item = Token > {
293
- let mut cursor = Cursor :: new ( input) ;
308
+ let mut cursor = Cursor :: new ( input, false ) ;
294
309
std:: iter:: from_fn ( move || {
295
310
let token = cursor. advance_token ( ) ;
296
311
if token. kind != TokenKind :: Eof { Some ( token) } else { None }
@@ -361,7 +376,30 @@ impl Cursor<'_> {
361
376
Some ( c) => c,
362
377
None => return Token :: new ( TokenKind :: Eof , 0 ) ,
363
378
} ;
379
+
364
380
let token_kind = match first_char {
381
+ c if self . frontmatter_allowed && is_whitespace ( c) => {
382
+ let mut last = first_char;
383
+ while is_whitespace ( self . first ( ) ) {
384
+ let Some ( c) = self . bump ( ) else {
385
+ break ;
386
+ } ;
387
+ last = c;
388
+ }
389
+ // invalid frontmatter opening as whitespace preceding it isn't newline.
390
+ // combine the whitespace and the frontmatter to a single token as we shall
391
+ // error later.
392
+ if last != '\n' && self . as_str ( ) . starts_with ( "---" ) {
393
+ self . bump ( ) ;
394
+ self . frontmatter ( true )
395
+ } else {
396
+ Whitespace
397
+ }
398
+ }
399
+ '-' if self . frontmatter_allowed && self . as_str ( ) . starts_with ( "--" ) => {
400
+ // happy path
401
+ self . frontmatter ( false )
402
+ }
365
403
// Slash, comment or block comment.
366
404
'/' => match self . first ( ) {
367
405
'/' => self . line_comment ( ) ,
@@ -464,11 +502,82 @@ impl Cursor<'_> {
464
502
c if !c. is_ascii ( ) && c. is_emoji_char ( ) => self . invalid_ident ( ) ,
465
503
_ => Unknown ,
466
504
} ;
505
+ if self . frontmatter_allowed {
506
+ self . frontmatter_allowed = matches ! ( token_kind, Whitespace ) ;
507
+ }
467
508
let res = Token :: new ( token_kind, self . pos_within_token ( ) ) ;
468
509
self . reset_pos_within_token ( ) ;
469
510
res
470
511
}
471
512
513
+ /// Given that one `-` was eaten, eat the rest of the frontmatter.
514
+ fn frontmatter ( & mut self , has_invalid_preceding_whitespace : bool ) -> TokenKind {
515
+ debug_assert_eq ! ( '-' , self . prev( ) ) ;
516
+
517
+ let pos = self . pos_within_token ( ) ;
518
+ self . eat_while ( |c| c == '-' ) ;
519
+
520
+ // one `-` is eaten by the caller.
521
+ let length_opening = self . pos_within_token ( ) - pos + 1 ;
522
+
523
+ // must be ensured by the caller
524
+ debug_assert ! ( length_opening >= 3 ) ;
525
+
526
+ self . eat_identifier ( ) ;
527
+ self . eat_while ( |ch| ch != '\n' && is_whitespace ( ch) ) ;
528
+ let invalid_infostring = self . first ( ) != '\n' ;
529
+
530
+ let mut s = self . as_str ( ) ;
531
+ let mut found = false ;
532
+ while let Some ( closing) = s. find ( & "-" . repeat ( length_opening as usize ) ) {
533
+ let preceding_chars_start = s[ ..closing] . rfind ( "\n " ) . map_or ( 0 , |i| i + 1 ) ;
534
+ if s[ preceding_chars_start..closing] . chars ( ) . all ( is_whitespace) {
535
+ // candidate found
536
+ self . bump_bytes ( closing) ;
537
+ // in case like
538
+ // ---cargo
539
+ // --- blahblah
540
+ // or
541
+ // ---cargo
542
+ // ----
543
+ // combine those stuff into this frontmatter token such that it gets detected later.
544
+ self . eat_until ( b'\n' ) ;
545
+ found = true ;
546
+ break ;
547
+ } else {
548
+ s = & s[ closing + length_opening as usize ..] ;
549
+ }
550
+ }
551
+
552
+ if !found {
553
+ // recovery strategy: a closing statement might have precending whitespace/newline
554
+ // but not have enough dashes to properly close. In this case, we eat until there,
555
+ // and report a mismatch in the parser.
556
+ let mut potential_closing = None ;
557
+ let mut rest = self . as_str ( ) ;
558
+ while let Some ( closing) = rest. find ( "---" ) {
559
+ let preceding_chars_start = rest[ ..closing] . rfind ( "\n " ) . map_or ( 0 , |i| i + 1 ) ;
560
+ if rest[ preceding_chars_start..closing] . chars ( ) . all ( is_whitespace) {
561
+ // candidate found
562
+ potential_closing = Some ( closing) ;
563
+ break ;
564
+ } else {
565
+ rest = & rest[ closing + 3 ..] ;
566
+ }
567
+ }
568
+ if let Some ( potential_closing) = potential_closing {
569
+ // bump to the potential closing, and eat everything on that line.
570
+ self . bump_bytes ( potential_closing) ;
571
+ self . eat_until ( b'\n' ) ;
572
+ } else {
573
+ // eat everything. this will get reported as an unclosed frontmatter.
574
+ self . eat_while ( |_| true ) ;
575
+ }
576
+ }
577
+
578
+ Frontmatter { has_invalid_preceding_whitespace, invalid_infostring }
579
+ }
580
+
472
581
fn line_comment ( & mut self ) -> TokenKind {
473
582
debug_assert ! ( self . prev( ) == '/' && self . first( ) == '/' ) ;
474
583
self . bump ( ) ;
0 commit comments