Skip to content

Commit 080ee6f

Browse files
committed
Auto merge of #80789 - Aaron1011:fix/stmt-empty, r=petrochenkov
Synthesize a `TokenStream` for `StmtKind::Empty` Fixes #80760
2 parents fd34606 + 3ed6caa commit 080ee6f

File tree

3 files changed

+50
-1
lines changed

3 files changed

+50
-1
lines changed

compiler/rustc_parse/src/lib.rs

+10-1
Original file line numberDiff line numberDiff line change
@@ -257,7 +257,16 @@ pub fn nt_to_tokenstream(
257257
let tokens = match *nt {
258258
Nonterminal::NtItem(ref item) => prepend_attrs(sess, &item.attrs, nt, item.tokens.as_ref()),
259259
Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.as_ref()),
260-
Nonterminal::NtStmt(ref stmt) => prepend_attrs(sess, stmt.attrs(), nt, stmt.tokens()),
260+
Nonterminal::NtStmt(ref stmt) => {
261+
let do_prepend = |tokens| prepend_attrs(sess, stmt.attrs(), nt, tokens);
262+
if let ast::StmtKind::Empty = stmt.kind {
263+
let tokens: TokenStream =
264+
tokenstream::TokenTree::token(token::Semi, stmt.span).into();
265+
do_prepend(Some(&LazyTokenStream::new(tokens)))
266+
} else {
267+
do_prepend(stmt.tokens())
268+
}
269+
}
261270
Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.as_ref()),
262271
Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.as_ref()),
263272
Nonterminal::NtIdent(ident, is_raw) => {
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
// check-pass
2+
// aux-build:test-macros.rs
3+
// compile-flags: -Z span-debug
4+
5+
#![no_std] // Don't load unnecessary hygiene information from std
6+
extern crate std;
7+
8+
#[macro_use]
9+
extern crate test_macros;
10+
11+
macro_rules! empty_stmt {
12+
($s:stmt) => {
13+
print_bang!($s);
14+
15+
// Currently, all attributes are ignored
16+
// on an empty statement
17+
#[print_attr]
18+
#[rustc_dummy(first)]
19+
#[rustc_dummy(second)]
20+
$s
21+
}
22+
}
23+
24+
fn main() {
25+
empty_stmt!(;);
26+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
PRINT-BANG INPUT (DISPLAY): ;
2+
PRINT-BANG INPUT (DEBUG): TokenStream [
3+
Group {
4+
delimiter: None,
5+
stream: TokenStream [
6+
Punct {
7+
ch: ';',
8+
spacing: Alone,
9+
span: $DIR/issue-80760-empty-stmt.rs:25:17: 25:18 (#0),
10+
},
11+
],
12+
span: $DIR/issue-80760-empty-stmt.rs:13:21: 13:23 (#4),
13+
},
14+
]

0 commit comments

Comments
 (0)