Skip to content

Implement tokenization for some items in proc_macro #43230

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Jul 28, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
79 changes: 71 additions & 8 deletions src/libproc_macro/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -509,14 +509,49 @@ impl TokenTree {
Ident(ident) | Lifetime(ident) => TokenNode::Term(Term(ident.name)),
Literal(..) | DocComment(..) => TokenNode::Literal(self::Literal(token)),

Interpolated(ref nt) => __internal::with_sess(|(sess, _)| {
TokenNode::Group(Delimiter::None, TokenStream(nt.1.force(|| {
// FIXME(jseyfried): Avoid this pretty-print + reparse hack
let name = "<macro expansion>".to_owned();
let source = pprust::token_to_string(&token);
parse_stream_from_source_str(name, source, sess, Some(span))
})))
}),
Interpolated(ref nt) => {
// An `Interpolated` token means that we have a `Nonterminal`
// which is often a parsed AST item. At this point we now need
// to convert the parsed AST to an actual token stream, e.g.
// un-parse it basically.
//
// Unfortunately there's not really a great way to do that in a
// guaranteed lossless fashion right now. The fallback here is
// to just stringify the AST node and reparse it, but this loses
// all span information.
//
// As a result, some AST nodes are annotated with the token
// stream they came from. Attempt to extract these lossless
// token streams before we fall back to the stringification.
let mut tokens = None;

match nt.0 {
Nonterminal::NtItem(ref item) => {
tokens = prepend_attrs(&item.attrs, item.tokens.as_ref(), span);
}
Nonterminal::NtTraitItem(ref item) => {
tokens = prepend_attrs(&item.attrs, item.tokens.as_ref(), span);
}
Nonterminal::NtImplItem(ref item) => {
tokens = prepend_attrs(&item.attrs, item.tokens.as_ref(), span);
}
_ => {}
}

tokens.map(|tokens| {
TokenNode::Group(Delimiter::None,
TokenStream(tokens.clone()))
}).unwrap_or_else(|| {
__internal::with_sess(|(sess, _)| {
TokenNode::Group(Delimiter::None, TokenStream(nt.1.force(|| {
// FIXME(jseyfried): Avoid this pretty-print + reparse hack
let name = "<macro expansion>".to_owned();
let source = pprust::token_to_string(&token);
parse_stream_from_source_str(name, source, sess, Some(span))
})))
})
})
}

OpenDelim(..) | CloseDelim(..) => unreachable!(),
Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
Expand Down Expand Up @@ -580,6 +615,34 @@ impl TokenTree {
}
}

fn prepend_attrs(attrs: &[ast::Attribute],
tokens: Option<&tokenstream::TokenStream>,
span: syntax_pos::Span)
-> Option<tokenstream::TokenStream>
{
let tokens = match tokens {
Some(tokens) => tokens,
None => return None,
};
if attrs.len() == 0 {
return Some(tokens.clone())
}
let mut builder = tokenstream::TokenStreamBuilder::new();
for attr in attrs {
assert_eq!(attr.style, ast::AttrStyle::Outer,
"inner attributes should prevent cached tokens from existing");
let stream = __internal::with_sess(|(sess, _)| {
// FIXME: Avoid this pretty-print + reparse hack as bove
let name = "<macro expansion>".to_owned();
let source = pprust::attr_to_string(attr);
parse_stream_from_source_str(name, source, sess, Some(span))
});
builder.push(stream);
}
builder.push(tokens.clone());
Some(builder.build())
}

/// Permanently unstable internal implementation details of this crate. This
/// should not be used.
///
Expand Down
1 change: 1 addition & 0 deletions src/librustc_metadata/cstore_impl.rs
Original file line number Diff line number Diff line change
Expand Up @@ -389,6 +389,7 @@ impl CrateStore for cstore::CStore {
legacy: def.legacy,
}),
vis: ast::Visibility::Inherited,
tokens: None,
})
}

Expand Down
13 changes: 13 additions & 0 deletions src/libsyntax/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1149,6 +1149,8 @@ pub struct TraitItem {
pub attrs: Vec<Attribute>,
pub node: TraitItemKind,
pub span: Span,
/// See `Item::tokens` for what this is
pub tokens: Option<TokenStream>,
}

#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
Expand All @@ -1168,6 +1170,8 @@ pub struct ImplItem {
pub attrs: Vec<Attribute>,
pub node: ImplItemKind,
pub span: Span,
/// See `Item::tokens` for what this is
pub tokens: Option<TokenStream>,
}

#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
Expand Down Expand Up @@ -1812,6 +1816,15 @@ pub struct Item {
pub node: ItemKind,
pub vis: Visibility,
pub span: Span,

/// Original tokens this item was parsed from. This isn't necessarily
/// available for all items, although over time more and more items should
/// have this be `Some`. Right now this is primarily used for procedural
/// macros, notably custom attributes.
///
/// Note that the tokens here do not include the outer attributes, but will
/// include inner attributes.
pub tokens: Option<TokenStream>,
}

#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
Expand Down
1 change: 1 addition & 0 deletions src/libsyntax/diagnostics/plugin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
),
vis: ast::Visibility::Public,
span: span,
tokens: None,
})
]))
}
6 changes: 4 additions & 2 deletions src/libsyntax/ext/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -979,7 +979,8 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
id: ast::DUMMY_NODE_ID,
node: node,
vis: ast::Visibility::Inherited,
span: span
span: span,
tokens: None,
})
}

Expand Down Expand Up @@ -1147,7 +1148,8 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
attrs: vec![],
node: ast::ItemKind::Use(vp),
vis: vis,
span: sp
span: sp,
tokens: None,
})
}

Expand Down
1 change: 1 addition & 0 deletions src/libsyntax/ext/expand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
ident: keywords::Invalid.ident(),
id: ast::DUMMY_NODE_ID,
vis: ast::Visibility::Public,
tokens: None,
})));

match self.expand(krate_item).make_items().pop().map(P::unwrap) {
Expand Down
3 changes: 3 additions & 0 deletions src/libsyntax/ext/placeholders.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,15 +46,18 @@ pub fn placeholder(kind: ExpansionKind, id: ast::NodeId) -> Expansion {
ExpansionKind::Items => Expansion::Items(SmallVector::one(P(ast::Item {
id: id, span: span, ident: ident, vis: vis, attrs: attrs,
node: ast::ItemKind::Mac(mac_placeholder()),
tokens: None,
}))),
ExpansionKind::TraitItems => Expansion::TraitItems(SmallVector::one(ast::TraitItem {
id: id, span: span, ident: ident, attrs: attrs,
node: ast::TraitItemKind::Macro(mac_placeholder()),
tokens: None,
})),
ExpansionKind::ImplItems => Expansion::ImplItems(SmallVector::one(ast::ImplItem {
id: id, span: span, ident: ident, vis: vis, attrs: attrs,
node: ast::ImplItemKind::Macro(mac_placeholder()),
defaultness: ast::Defaultness::Final,
tokens: None,
})),
ExpansionKind::Pat => Expansion::Pat(P(ast::Pat {
id: id, span: span, node: ast::PatKind::Mac(mac_placeholder()),
Expand Down
15 changes: 11 additions & 4 deletions src/libsyntax/fold.rs
Original file line number Diff line number Diff line change
Expand Up @@ -957,7 +957,8 @@ pub fn noop_fold_trait_item<T: Folder>(i: TraitItem, folder: &mut T)
TraitItemKind::Macro(folder.fold_mac(mac))
}
},
span: folder.new_span(i.span)
span: folder.new_span(i.span),
tokens: i.tokens,
})
}

Expand All @@ -980,7 +981,8 @@ pub fn noop_fold_impl_item<T: Folder>(i: ImplItem, folder: &mut T)
ast::ImplItemKind::Type(ty) => ast::ImplItemKind::Type(folder.fold_ty(ty)),
ast::ImplItemKind::Macro(mac) => ast::ImplItemKind::Macro(folder.fold_mac(mac))
},
span: folder.new_span(i.span)
span: folder.new_span(i.span),
tokens: i.tokens,
})
}

Expand All @@ -1000,6 +1002,7 @@ pub fn noop_fold_crate<T: Folder>(Crate {module, attrs, span}: Crate,
vis: ast::Visibility::Public,
span: span,
node: ast::ItemKind::Mod(module),
tokens: None,
})).into_iter();

let (module, attrs, span) = match items.next() {
Expand Down Expand Up @@ -1032,15 +1035,19 @@ pub fn noop_fold_item<T: Folder>(i: P<Item>, folder: &mut T) -> SmallVector<P<It
}

// fold one item into exactly one item
pub fn noop_fold_item_simple<T: Folder>(Item {id, ident, attrs, node, vis, span}: Item,
pub fn noop_fold_item_simple<T: Folder>(Item {id, ident, attrs, node, vis, span, tokens}: Item,
folder: &mut T) -> Item {
Item {
id: folder.new_id(id),
vis: folder.fold_vis(vis),
ident: folder.fold_ident(ident),
attrs: fold_attrs(attrs, folder),
node: folder.fold_item_kind(node),
span: folder.new_span(span)
span: folder.new_span(span),

// FIXME: if this is replaced with a call to `folder.fold_tts` it causes
// an ICE during resolve... odd!
tokens: tokens,
}
}

Expand Down
9 changes: 8 additions & 1 deletion src/libsyntax/parse/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -843,11 +843,18 @@ mod tests {
// check the contents of the tt manually:
#[test] fn parse_fundecl () {
// this test depends on the intern order of "fn" and "i32"
assert_eq!(string_to_item("fn a (b : i32) { b; }".to_string()),
let item = string_to_item("fn a (b : i32) { b; }".to_string()).map(|m| {
m.map(|mut m| {
m.tokens = None;
m
})
});
assert_eq!(item,
Some(
P(ast::Item{ident:Ident::from_str("a"),
attrs:Vec::new(),
id: ast::DUMMY_NODE_ID,
tokens: None,
node: ast::ItemKind::Fn(P(ast::FnDecl {
inputs: vec![ast::Arg{
ty: P(ast::Ty{id: ast::DUMMY_NODE_ID,
Expand Down
Loading