Skip to content

Commit d6134b6

Browse files
committed
Don't mutate syntax trees when preparing proc-macro input
1 parent ce4670f commit d6134b6

File tree

7 files changed

+58
-136
lines changed

7 files changed

+58
-136
lines changed

Cargo.lock

+1
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

crates/hir_expand/Cargo.toml

+1
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ log = "0.4.8"
1414
either = "1.5.3"
1515
rustc-hash = "1.0.0"
1616
la-arena = { version = "0.2.0", path = "../../lib/arena" }
17+
itertools = "0.10.0"
1718

1819
base_db = { path = "../base_db", version = "0.0.0" }
1920
cfg = { path = "../cfg", version = "0.0.0" }

crates/hir_expand/src/db.rs

+27-8
Original file line numberDiff line numberDiff line change
@@ -3,19 +3,20 @@
33
use std::sync::Arc;
44

55
use base_db::{salsa, SourceDatabase};
6+
use itertools::Itertools;
67
use limit::Limit;
78
use mbe::{ExpandError, ExpandResult};
89
use parser::{FragmentKind, T};
910
use syntax::{
1011
algo::diff,
11-
ast::{self, NameOwner},
12-
AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken,
12+
ast::{self, AttrsOwner, NameOwner},
13+
AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken, TextRange,
1314
};
1415

1516
use crate::{
16-
ast_id_map::AstIdMap, hygiene::HygieneFrame, input::process_macro_input, BuiltinAttrExpander,
17-
BuiltinDeriveExpander, BuiltinFnLikeExpander, HirFileId, HirFileIdRepr, MacroCallId,
18-
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
17+
ast_id_map::AstIdMap, hygiene::HygieneFrame, BuiltinAttrExpander, BuiltinDeriveExpander,
18+
BuiltinFnLikeExpander, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc,
19+
MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
1920
};
2021

2122
/// Total limit on the number of tokens produced by any macro invocation.
@@ -257,9 +258,28 @@ fn parse_macro_expansion(
257258

258259
fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
259260
let arg = db.macro_arg_text(id)?;
260-
let (mut tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg));
261+
let loc = db.lookup_intern_macro(id);
262+
263+
let node = SyntaxNode::new_root(arg);
264+
let censor = match loc.kind {
265+
MacroCallKind::FnLike { .. } => None,
266+
MacroCallKind::Derive { derive_attr_index, .. } => match ast::Item::cast(node.clone()) {
267+
Some(item) => item
268+
.attrs()
269+
.map(|attr| attr.syntax().text_range())
270+
.take(derive_attr_index as usize + 1)
271+
.fold1(TextRange::cover),
272+
None => None,
273+
},
274+
MacroCallKind::Attr { invoc_attr_index, .. } => match ast::Item::cast(node.clone()) {
275+
Some(item) => {
276+
item.attrs().nth(invoc_attr_index as usize).map(|attr| attr.syntax().text_range())
277+
}
278+
None => None,
279+
},
280+
};
281+
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, censor);
261282

262-
let loc: MacroCallLoc = db.lookup_intern_macro(id);
263283
if loc.def.is_proc_macro() {
264284
// proc macros expect their inputs without parentheses, MBEs expect it with them included
265285
tt.delimiter = None;
@@ -271,7 +291,6 @@ fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree,
271291
fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
272292
let loc = db.lookup_intern_macro(id);
273293
let arg = loc.kind.arg(db)?;
274-
let arg = process_macro_input(&loc.kind, arg);
275294
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
276295
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
277296
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());

crates/hir_expand/src/input.rs

-120
This file was deleted.

crates/hir_expand/src/lib.rs

-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@ pub mod builtin_macro;
1414
pub mod proc_macro;
1515
pub mod quote;
1616
pub mod eager;
17-
mod input;
1817

1918
use base_db::ProcMacroKind;
2019
use either::Either;

crates/mbe/src/lib.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ impl fmt::Display for ExpandError {
6767
pub use crate::{
6868
syntax_bridge::{
6969
parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
70-
token_tree_to_syntax_node,
70+
syntax_node_to_token_tree_censored, token_tree_to_syntax_node,
7171
},
7272
token_map::TokenMap,
7373
};

crates/mbe/src/syntax_bridge.rs

+28-6
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
22
3+
use std::iter;
4+
35
use parser::{FragmentKind, ParseError, TreeSink};
46
use rustc_hash::FxHashMap;
57
use syntax::{
@@ -13,11 +15,20 @@ use tt::buffer::{Cursor, TokenBuffer};
1315
use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter};
1416
use crate::{ExpandError, TokenMap};
1517

16-
/// Convert the syntax node to a `TokenTree` (what macro
18+
/// Convert the syntax node to a `TokenTree` with the censored nodes excluded (what macro
1719
/// will consume).
1820
pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
21+
syntax_node_to_token_tree_censored(node, None)
22+
}
23+
24+
/// Convert the syntax node to a `TokenTree` with the censored nodes excluded (what macro
25+
/// will consume).
26+
pub fn syntax_node_to_token_tree_censored(
27+
node: &SyntaxNode,
28+
censor: Option<TextRange>,
29+
) -> (tt::Subtree, TokenMap) {
1930
let global_offset = node.text_range().start();
20-
let mut c = Convertor::new(node, global_offset);
31+
let mut c = Convertor::new(node, global_offset, censor);
2132
let subtree = convert_tokens(&mut c);
2233
c.id_alloc.map.shrink_to_fit();
2334
(subtree, c.id_alloc.map)
@@ -446,16 +457,24 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
446457
struct Convertor {
447458
id_alloc: TokenIdAlloc,
448459
current: Option<SyntaxToken>,
460+
censor: Option<TextRange>,
449461
range: TextRange,
450462
punct_offset: Option<(SyntaxToken, TextSize)>,
451463
}
452464

453465
impl Convertor {
454-
fn new(node: &SyntaxNode, global_offset: TextSize) -> Convertor {
466+
fn new(node: &SyntaxNode, global_offset: TextSize, censor: Option<TextRange>) -> Convertor {
467+
let first = node.first_token();
468+
let current = match censor {
469+
Some(censor) => iter::successors(first, |token| token.next_token())
470+
.find(|token| !censor.contains_range(token.text_range())),
471+
None => first,
472+
};
455473
Convertor {
456474
id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
457-
current: node.first_token(),
475+
current,
458476
range: node.text_range(),
477+
censor,
459478
punct_offset: None,
460479
}
461480
}
@@ -512,8 +531,11 @@ impl TokenConvertor for Convertor {
512531
if !&self.range.contains_range(curr.text_range()) {
513532
return None;
514533
}
515-
self.current = curr.next_token();
516-
534+
self.current = match self.censor {
535+
Some(censor) => iter::successors(curr.next_token(), |token| token.next_token())
536+
.find(|token| !censor.contains_range(token.text_range())),
537+
None => curr.next_token(),
538+
};
517539
let token = if curr.kind().is_punct() {
518540
let range = curr.text_range();
519541
let range = TextRange::at(range.start(), TextSize::of('.'));

0 commit comments

Comments
 (0)