Skip to content

Commit fae440c

Browse files
bors[bot]Veykril
andauthored
Merge #10025
10025: Don't mutate syntax trees when preparing proc-macro input r=Veykril a=Veykril Fixes #10013 Co-authored-by: Lukas Wirth <[email protected]>
2 parents 9ea3c4d + 1195cb5 commit fae440c

File tree

9 files changed

+100
-135
lines changed

9 files changed

+100
-135
lines changed

Cargo.lock

+2
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

crates/hir_expand/Cargo.toml

+1
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ log = "0.4.8"
1414
either = "1.5.3"
1515
rustc-hash = "1.0.0"
1616
la-arena = { version = "0.2.0", path = "../../lib/arena" }
17+
itertools = "0.10.0"
1718

1819
base_db = { path = "../base_db", version = "0.0.0" }
1920
cfg = { path = "../cfg", version = "0.0.0" }

crates/hir_expand/src/db.rs

+27-8
Original file line numberDiff line numberDiff line change
@@ -3,19 +3,20 @@
33
use std::sync::Arc;
44

55
use base_db::{salsa, SourceDatabase};
6+
use itertools::Itertools;
67
use limit::Limit;
78
use mbe::{ExpandError, ExpandResult};
89
use parser::{FragmentKind, T};
910
use syntax::{
1011
algo::diff,
11-
ast::{self, NameOwner},
12-
AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken,
12+
ast::{self, AttrsOwner, NameOwner},
13+
AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken, TextRange,
1314
};
1415

1516
use crate::{
16-
ast_id_map::AstIdMap, hygiene::HygieneFrame, input::process_macro_input, BuiltinAttrExpander,
17-
BuiltinDeriveExpander, BuiltinFnLikeExpander, HirFileId, HirFileIdRepr, MacroCallId,
18-
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
17+
ast_id_map::AstIdMap, hygiene::HygieneFrame, BuiltinAttrExpander, BuiltinDeriveExpander,
18+
BuiltinFnLikeExpander, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc,
19+
MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
1920
};
2021

2122
/// Total limit on the number of tokens produced by any macro invocation.
@@ -257,9 +258,28 @@ fn parse_macro_expansion(
257258

258259
fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
259260
let arg = db.macro_arg_text(id)?;
260-
let (mut tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg));
261+
let loc = db.lookup_intern_macro(id);
262+
263+
let node = SyntaxNode::new_root(arg);
264+
let censor = match loc.kind {
265+
MacroCallKind::FnLike { .. } => None,
266+
MacroCallKind::Derive { derive_attr_index, .. } => match ast::Item::cast(node.clone()) {
267+
Some(item) => item
268+
.attrs()
269+
.map(|attr| attr.syntax().text_range())
270+
.take(derive_attr_index as usize + 1)
271+
.fold1(TextRange::cover),
272+
None => None,
273+
},
274+
MacroCallKind::Attr { invoc_attr_index, .. } => match ast::Item::cast(node.clone()) {
275+
Some(item) => {
276+
item.attrs().nth(invoc_attr_index as usize).map(|attr| attr.syntax().text_range())
277+
}
278+
None => None,
279+
},
280+
};
281+
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, censor);
261282

262-
let loc: MacroCallLoc = db.lookup_intern_macro(id);
263283
if loc.def.is_proc_macro() {
264284
// proc macros expect their inputs without parentheses, MBEs expect it with them included
265285
tt.delimiter = None;
@@ -271,7 +291,6 @@ fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree,
271291
fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
272292
let loc = db.lookup_intern_macro(id);
273293
let arg = loc.kind.arg(db)?;
274-
let arg = process_macro_input(&loc.kind, arg);
275294
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
276295
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
277296
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());

crates/hir_expand/src/input.rs

-120
This file was deleted.

crates/hir_expand/src/lib.rs

-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@ pub mod builtin_macro;
1414
pub mod proc_macro;
1515
pub mod quote;
1616
pub mod eager;
17-
mod input;
1817

1918
use base_db::ProcMacroKind;
2019
use either::Either;

crates/mbe/Cargo.toml

+1
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ cov-mark = "2.0.0-pre.1"
1313
rustc-hash = "1.1.0"
1414
smallvec = "1.2.0"
1515
log = "0.4.8"
16+
expect-test = "1.1"
1617

1718
syntax = { path = "../syntax", version = "0.0.0" }
1819
parser = { path = "../parser", version = "0.0.0" }

crates/mbe/src/lib.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ impl fmt::Display for ExpandError {
6767
pub use crate::{
6868
syntax_bridge::{
6969
parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
70-
token_tree_to_syntax_node,
70+
syntax_node_to_token_tree_censored, token_tree_to_syntax_node,
7171
},
7272
token_map::TokenMap,
7373
};

crates/mbe/src/syntax_bridge.rs

+27-5
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
22
3+
use std::iter;
4+
35
use parser::{FragmentKind, ParseError, TreeSink};
46
use rustc_hash::FxHashMap;
57
use syntax::{
@@ -16,8 +18,17 @@ use crate::{ExpandError, TokenMap};
1618
/// Convert the syntax node to a `TokenTree` (what macro
1719
/// will consume).
1820
pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
21+
syntax_node_to_token_tree_censored(node, None)
22+
}
23+
24+
/// Convert the syntax node to a `TokenTree` (what macro will consume)
25+
/// with the censored range excluded.
26+
pub fn syntax_node_to_token_tree_censored(
27+
node: &SyntaxNode,
28+
censor: Option<TextRange>,
29+
) -> (tt::Subtree, TokenMap) {
1930
let global_offset = node.text_range().start();
20-
let mut c = Convertor::new(node, global_offset);
31+
let mut c = Convertor::new(node, global_offset, censor);
2132
let subtree = convert_tokens(&mut c);
2233
c.id_alloc.map.shrink_to_fit();
2334
(subtree, c.id_alloc.map)
@@ -446,16 +457,24 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
446457
struct Convertor {
447458
id_alloc: TokenIdAlloc,
448459
current: Option<SyntaxToken>,
460+
censor: Option<TextRange>,
449461
range: TextRange,
450462
punct_offset: Option<(SyntaxToken, TextSize)>,
451463
}
452464

453465
impl Convertor {
454-
fn new(node: &SyntaxNode, global_offset: TextSize) -> Convertor {
466+
fn new(node: &SyntaxNode, global_offset: TextSize, censor: Option<TextRange>) -> Convertor {
467+
let first = node.first_token();
468+
let current = match censor {
469+
Some(censor) => iter::successors(first, |token| token.next_token())
470+
.find(|token| !censor.contains_range(token.text_range())),
471+
None => first,
472+
};
455473
Convertor {
456474
id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
457-
current: node.first_token(),
475+
current,
458476
range: node.text_range(),
477+
censor,
459478
punct_offset: None,
460479
}
461480
}
@@ -512,8 +531,11 @@ impl TokenConvertor for Convertor {
512531
if !&self.range.contains_range(curr.text_range()) {
513532
return None;
514533
}
515-
self.current = curr.next_token();
516-
534+
self.current = match self.censor {
535+
Some(censor) => iter::successors(curr.next_token(), |token| token.next_token())
536+
.find(|token| !censor.contains_range(token.text_range())),
537+
None => curr.next_token(),
538+
};
517539
let token = if curr.kind().is_punct() {
518540
let range = curr.text_range();
519541
let range = TextRange::at(range.start(), TextSize::of('.'));

crates/mbe/src/tests.rs

+41
Original file line numberDiff line numberDiff line change
@@ -228,3 +228,44 @@ fn debug_dump_ignore_spaces(node: &syntax::SyntaxNode) -> String {
228228

229229
buf
230230
}
231+
232+
#[test]
233+
fn test_node_to_tt_censor() {
234+
use syntax::ast::{AttrsOwner, ModuleItemOwner};
235+
236+
let source = r##"
237+
#[attr0]
238+
#[attr1]
239+
#[attr2]
240+
struct Struct {
241+
field: ()
242+
}
243+
"##;
244+
let source_file = ast::SourceFile::parse(&source).ok().unwrap();
245+
let item = source_file.items().next().unwrap();
246+
let attr = item.attrs().nth(1).unwrap();
247+
248+
let (tt, _) =
249+
syntax_node_to_token_tree_censored(item.syntax(), Some(attr.syntax().text_range()));
250+
expect_test::expect![[r##"# [attr0] # [attr2] struct Struct {field : ()}"##]]
251+
.assert_eq(&tt.to_string());
252+
253+
let source = r##"
254+
#[derive(Derive0)]
255+
#[derive(Derive1)]
256+
#[derive(Derive2)]
257+
struct Struct {
258+
field: ()
259+
}
260+
"##;
261+
let source_file = ast::SourceFile::parse(&source).ok().unwrap();
262+
let item = source_file.items().next().unwrap();
263+
let attr = item.attrs().nth(1).unwrap();
264+
265+
let (tt, _) = syntax_node_to_token_tree_censored(
266+
item.syntax(),
267+
Some(attr.syntax().text_range().cover_offset(0.into())),
268+
);
269+
expect_test::expect![[r##"# [derive (Derive2)] struct Struct {field : ()}"##]]
270+
.assert_eq(&tt.to_string());
271+
}

0 commit comments

Comments
 (0)