Skip to content

Commit 880baa9

Browse files
committed
Shuffle hir-expand things around
1 parent e320004 commit 880baa9

File tree

8 files changed

+295
-258
lines changed

8 files changed

+295
-258
lines changed

crates/hir-expand/src/ast_id_map.rs

+8-1
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,14 @@ impl PartialEq for AstIdMap {
155155
impl Eq for AstIdMap {}
156156

157157
impl AstIdMap {
158-
pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
158+
pub(crate) fn ast_id_map(
159+
db: &dyn ExpandDatabase,
160+
file_id: span::HirFileId,
161+
) -> triomphe::Arc<AstIdMap> {
162+
triomphe::Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
163+
}
164+
165+
fn from_source(node: &SyntaxNode) -> AstIdMap {
159166
assert!(node.parent().is_none());
160167
let mut res = AstIdMap::default();
161168

crates/hir-expand/src/db.rs

+27-248
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,14 @@
11
//! Defines database & queries for macro expansion.
22
3-
use std::sync::OnceLock;
4-
53
use base_db::{
64
salsa::{self, debug::DebugQueryTable},
7-
CrateId, Edition, FileId, SourceDatabase, VersionReq,
5+
CrateId, FileId, SourceDatabase,
86
};
97
use either::Either;
108
use limit::Limit;
119
use mbe::{syntax_node_to_token_tree, ValueResult};
1210
use rustc_hash::FxHashSet;
13-
use span::{Span, SyntaxContextId};
11+
use span::SyntaxContextId;
1412
use syntax::{
1513
ast::{self, HasAttrs},
1614
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
@@ -19,13 +17,14 @@ use triomphe::Arc;
1917

2018
use crate::{
2119
ast_id_map::AstIdMap,
22-
attrs::{collect_attrs, RawAttrs},
20+
attrs::collect_attrs,
2321
builtin_attr_macro::pseudo_derive_attr_expansion,
2422
builtin_fn_macro::EagerExpander,
23+
declarative::DeclarativeMacroExpander,
2524
fixup::{self, reverse_fixups, SyntaxFixupUndoInfo},
2625
hygiene::{
27-
apply_mark, span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt,
28-
SyntaxContextData, Transparency,
26+
span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt,
27+
SyntaxContextData,
2928
},
3029
proc_macro::ProcMacros,
3130
span_map::{RealSpanMap, SpanMap, SpanMapRef},
@@ -43,82 +42,6 @@ use crate::{
4342
/// Actual max for `analysis-stats .` at some point: 30672.
4443
static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
4544

46-
#[derive(Debug, Clone, Eq, PartialEq)]
47-
/// Old-style `macro_rules` or the new macros 2.0
48-
pub struct DeclarativeMacroExpander {
49-
pub mac: mbe::DeclarativeMacro<span::Span>,
50-
pub transparency: Transparency,
51-
}
52-
53-
// FIXME: Remove this once we drop support for 1.76
54-
static REQUIREMENT: OnceLock<VersionReq> = OnceLock::new();
55-
56-
impl DeclarativeMacroExpander {
57-
pub fn expand(
58-
&self,
59-
db: &dyn ExpandDatabase,
60-
tt: tt::Subtree,
61-
call_id: MacroCallId,
62-
) -> ExpandResult<tt::Subtree> {
63-
let loc = db.lookup_intern_macro_call(call_id);
64-
let toolchain = &db.crate_graph()[loc.def.krate].toolchain;
65-
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
66-
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
67-
&base_db::Version {
68-
pre: base_db::Prerelease::EMPTY,
69-
build: base_db::BuildMetadata::EMPTY,
70-
major: version.major,
71-
minor: version.minor,
72-
patch: version.patch,
73-
},
74-
)
75-
});
76-
match self.mac.err() {
77-
Some(e) => ExpandResult::new(
78-
tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }),
79-
ExpandError::other(format!("invalid macro definition: {e}")),
80-
),
81-
None => self
82-
.mac
83-
.expand(
84-
&tt,
85-
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
86-
new_meta_vars,
87-
loc.call_site,
88-
)
89-
.map_err(Into::into),
90-
}
91-
}
92-
93-
pub fn expand_unhygienic(
94-
&self,
95-
db: &dyn ExpandDatabase,
96-
tt: tt::Subtree,
97-
krate: CrateId,
98-
call_site: Span,
99-
) -> ExpandResult<tt::Subtree> {
100-
let toolchain = &db.crate_graph()[krate].toolchain;
101-
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
102-
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
103-
&base_db::Version {
104-
pre: base_db::Prerelease::EMPTY,
105-
build: base_db::BuildMetadata::EMPTY,
106-
major: version.major,
107-
minor: version.minor,
108-
patch: version.patch,
109-
},
110-
)
111-
});
112-
match self.mac.err() {
113-
Some(e) => ExpandResult::new(
114-
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
115-
ExpandError::other(format!("invalid macro definition: {e}")),
116-
),
117-
None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into),
118-
}
119-
}
120-
}
121-
12245
#[derive(Debug, Clone, Eq, PartialEq)]
12346
pub enum TokenExpander {
12447
/// Old-style `macro_rules` or the new macros 2.0
@@ -141,6 +64,7 @@ pub trait ExpandDatabase: SourceDatabase {
14164
#[salsa::input]
14265
fn proc_macros(&self) -> Arc<ProcMacros>;
14366

67+
#[salsa::invoke(AstIdMap::ast_id_map)]
14468
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
14569

14670
/// Main public API -- parses a hir file, not caring whether it's a real
@@ -156,8 +80,10 @@ pub trait ExpandDatabase: SourceDatabase {
15680
macro_file: MacroFileId,
15781
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
15882
#[salsa::transparent]
83+
#[salsa::invoke(SpanMap::new)]
15984
fn span_map(&self, file_id: HirFileId) -> SpanMap;
16085

86+
#[salsa::invoke(crate::span_map::real_span_map)]
16187
fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>;
16288

16389
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
@@ -173,6 +99,7 @@ pub trait ExpandDatabase: SourceDatabase {
17399
#[salsa::transparent]
174100
fn setup_syntax_context_root(&self) -> ();
175101
#[salsa::transparent]
102+
#[salsa::invoke(crate::hygiene::dump_syntax_contexts)]
176103
fn dump_syntax_contexts(&self) -> String;
177104

178105
/// Lowers syntactic macro call to a token tree representation. That's a firewall
@@ -184,8 +111,10 @@ pub trait ExpandDatabase: SourceDatabase {
184111
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>>;
185112
/// Fetches the expander for this macro.
186113
#[salsa::transparent]
114+
#[salsa::invoke(TokenExpander::macro_expander)]
187115
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
188116
/// Fetches (and compiles) the expander of this decl macro.
117+
#[salsa::invoke(DeclarativeMacroExpander::expander)]
189118
fn decl_macro_expander(
190119
&self,
191120
def_crate: CrateId,
@@ -203,36 +132,6 @@ pub trait ExpandDatabase: SourceDatabase {
203132
) -> ExpandResult<Box<[SyntaxError]>>;
204133
}
205134

206-
#[inline]
207-
pub fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
208-
match file_id.repr() {
209-
HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
210-
HirFileIdRepr::MacroFile(m) => {
211-
SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
212-
}
213-
}
214-
}
215-
216-
pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> {
217-
use syntax::ast::HasModuleItem;
218-
let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
219-
let ast_id_map = db.ast_id_map(file_id.into());
220-
let tree = db.parse(file_id).tree();
221-
// FIXME: Descend into modules and other item containing items that are not annotated with attributes
222-
// and allocate pairs for those as well. This gives us finer grained span anchors resulting in
223-
// better incrementality
224-
pairs.extend(
225-
tree.items()
226-
.map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())),
227-
);
228-
229-
Arc::new(RealSpanMap::from_file(
230-
file_id,
231-
pairs.into_boxed_slice(),
232-
tree.syntax().text_range().end(),
233-
))
234-
}
235-
236135
/// This expands the given macro call, but with different arguments. This is
237136
/// used for completion, where we want to see what 'would happen' if we insert a
238137
/// token. The `token_to_map` mapped down into the expansion, with the mapped
@@ -357,10 +256,6 @@ pub fn expand_speculative(
357256
Some((node.syntax_node(), token))
358257
}
359258

360-
fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
361-
Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
362-
}
363-
364259
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
365260
match file_id.repr() {
366261
HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
@@ -412,7 +307,10 @@ fn parse_macro_expansion_error(
412307
.map(|it| it.0.errors().to_vec().into_boxed_slice())
413308
}
414309

415-
fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse<SyntaxNode>, SpanMap) {
310+
pub(crate) fn parse_with_map(
311+
db: &dyn ExpandDatabase,
312+
file_id: HirFileId,
313+
) -> (Parse<SyntaxNode>, SpanMap) {
416314
match file_id.repr() {
417315
HirFileIdRepr::FileId(file_id) => {
418316
(db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
@@ -581,100 +479,18 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
581479
.unwrap_or_default()
582480
}
583481

584-
fn decl_macro_expander(
585-
db: &dyn ExpandDatabase,
586-
def_crate: CrateId,
587-
id: AstId<ast::Macro>,
588-
) -> Arc<DeclarativeMacroExpander> {
589-
let crate_data = &db.crate_graph()[def_crate];
590-
let is_2021 = crate_data.edition >= Edition::Edition2021;
591-
let (root, map) = parse_with_map(db, id.file_id);
592-
let root = root.syntax_node();
593-
594-
let transparency = |node| {
595-
// ... would be nice to have the item tree here
596-
let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
597-
match &*attrs
598-
.iter()
599-
.find(|it| {
600-
it.path.as_ident().and_then(|it| it.as_str()) == Some("rustc_macro_transparency")
601-
})?
602-
.token_tree_value()?
603-
.token_trees
604-
{
605-
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text {
606-
"transparent" => Some(Transparency::Transparent),
607-
"semitransparent" => Some(Transparency::SemiTransparent),
608-
"opaque" => Some(Transparency::Opaque),
609-
_ => None,
610-
},
611-
_ => None,
612-
}
613-
};
614-
let toolchain = crate_data.toolchain.as_ref();
615-
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
616-
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
617-
&base_db::Version {
618-
pre: base_db::Prerelease::EMPTY,
619-
build: base_db::BuildMetadata::EMPTY,
620-
major: version.major,
621-
minor: version.minor,
622-
patch: version.patch,
623-
},
624-
)
625-
});
626-
627-
let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
628-
ast::Macro::MacroRules(macro_rules) => (
629-
match macro_rules.token_tree() {
630-
Some(arg) => {
631-
let tt = mbe::syntax_node_to_token_tree(
632-
arg.syntax(),
633-
map.as_ref(),
634-
map.span_for_range(macro_rules.macro_rules_token().unwrap().text_range()),
635-
);
636-
637-
mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars)
638-
}
639-
None => mbe::DeclarativeMacro::from_err(
640-
mbe::ParseError::Expected("expected a token tree".into()),
641-
is_2021,
642-
),
643-
},
644-
transparency(&macro_rules).unwrap_or(Transparency::SemiTransparent),
645-
),
646-
ast::Macro::MacroDef(macro_def) => (
647-
match macro_def.body() {
648-
Some(arg) => {
649-
let tt = mbe::syntax_node_to_token_tree(
650-
arg.syntax(),
651-
map.as_ref(),
652-
map.span_for_range(macro_def.macro_token().unwrap().text_range()),
653-
);
654-
655-
mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars)
656-
}
657-
None => mbe::DeclarativeMacro::from_err(
658-
mbe::ParseError::Expected("expected a token tree".into()),
659-
is_2021,
660-
),
661-
},
662-
transparency(&macro_def).unwrap_or(Transparency::Opaque),
663-
),
664-
};
665-
Arc::new(DeclarativeMacroExpander { mac, transparency })
666-
}
667-
668-
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
669-
match id.kind {
670-
MacroDefKind::Declarative(ast_id) => {
671-
TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
482+
impl TokenExpander {
483+
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
484+
match id.kind {
485+
MacroDefKind::Declarative(ast_id) => {
486+
TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
487+
}
488+
MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander),
489+
MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander),
490+
MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander),
491+
MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander),
492+
MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander),
672493
}
673-
MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander),
674-
MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander),
675-
MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander),
676-
MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander),
677-
MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander),
678494
}
679495
}
680496

@@ -862,40 +678,3 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {
862678
fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
863679
db.intern_syntax_context(SyntaxContextData::root());
864680
}
865-
866-
fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
867-
let mut s = String::from("Expansions:");
868-
let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
869-
entries.sort_by_key(|e| e.key);
870-
for e in entries {
871-
let id = e.key;
872-
let expn_data = e.value.as_ref().unwrap();
873-
s.push_str(&format!(
874-
"\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
875-
id,
876-
expn_data.kind.file_id(),
877-
expn_data.call_site,
878-
SyntaxContextId::ROOT, // FIXME expn_data.def_site,
879-
expn_data.kind.descr(),
880-
));
881-
}
882-
883-
s.push_str("\n\nSyntaxContexts:\n");
884-
let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
885-
entries.sort_by_key(|e| e.key);
886-
for e in entries {
887-
struct SyntaxContextDebug<'a>(
888-
&'a dyn ExpandDatabase,
889-
SyntaxContextId,
890-
&'a SyntaxContextData,
891-
);
892-
893-
impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> {
894-
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
895-
self.2.fancy_debug(self.1, self.0, f)
896-
}
897-
}
898-
stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
899-
}
900-
s
901-
}

0 commit comments

Comments
 (0)