2169: MBE: Mapping spans for goto definition r=matklad a=edwin0cheng

Currently, go to definition gives the wrong span in MBE.  This PR implement a mapping mechanism to fix it and it could be used for future MBE hygiene implementation.

The basic idea of the mapping is:
1. When expanding the macro, generated 2 `TokenMap` which maps the macro args and macro defs between tokens and input text-ranges.
2. Before converting generated `TokenTree` to `SyntaxNode`, generated a `ExpandedRangeMap` which is a mapping between token and output text-ranges.
3. Using these 3 mappings to construct an `ExpansionInfo`  which can map between input text ranges and output text ranges.


Co-authored-by: Edwin Cheng <edwin0cheng@gmail.com>
This commit is contained in:
bors[bot] 2019-11-09 09:13:14 +00:00 committed by GitHub
commit 561bb979ce
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 305 additions and 101 deletions

View file

@ -22,9 +22,12 @@ pub trait AstDatabase: SourceDatabase {
#[salsa::interned]
fn intern_macro(&self, macro_call: MacroCallLoc) -> MacroCallId;
fn macro_arg(&self, id: MacroCallId) -> Option<Arc<tt::Subtree>>;
fn macro_def(&self, id: MacroDefId) -> Option<Arc<mbe::MacroRules>>;
fn parse_macro(&self, macro_file: MacroFile) -> Option<Parse<SyntaxNode>>;
fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>;
fn macro_def(&self, id: MacroDefId) -> Option<Arc<(mbe::MacroRules, mbe::TokenMap)>>;
fn parse_macro(
&self,
macro_file: MacroFile,
) -> Option<(Parse<SyntaxNode>, Arc<mbe::RevTokenMap>)>;
fn macro_expand(&self, macro_call: MacroCallId) -> Result<Arc<tt::Subtree>, String>;
}
@ -34,10 +37,13 @@ pub(crate) fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdM
Arc::new(map)
}
pub(crate) fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<MacroRules>> {
pub(crate) fn macro_def(
db: &dyn AstDatabase,
id: MacroDefId,
) -> Option<Arc<(mbe::MacroRules, mbe::TokenMap)>> {
let macro_call = id.ast_id.to_node(db);
let arg = macro_call.token_tree()?;
let (tt, _) = mbe::ast_to_token_tree(&arg).or_else(|| {
let (tt, tmap) = mbe::ast_to_token_tree(&arg).or_else(|| {
log::warn!("fail on macro_def to token tree: {:#?}", arg);
None
})?;
@ -45,15 +51,18 @@ pub(crate) fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<Macr
log::warn!("fail on macro_def parse: {:#?}", tt);
None
})?;
Some(Arc::new(rules))
Some(Arc::new((rules, tmap)))
}
pub(crate) fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<tt::Subtree>> {
pub(crate) fn macro_arg(
db: &dyn AstDatabase,
id: MacroCallId,
) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
let loc = db.lookup_intern_macro(id);
let macro_call = loc.ast_id.to_node(db);
let arg = macro_call.token_tree()?;
let (tt, _) = mbe::ast_to_token_tree(&arg)?;
Some(Arc::new(tt))
let (tt, tmap) = mbe::ast_to_token_tree(&arg)?;
Some(Arc::new((tt, tmap)))
}
pub(crate) fn macro_expand(
@ -64,7 +73,7 @@ pub(crate) fn macro_expand(
let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?;
let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?;
let tt = macro_rules.expand(&macro_arg).map_err(|err| format!("{:?}", err))?;
let tt = macro_rules.0.expand(&macro_arg.0).map_err(|err| format!("{:?}", err))?;
// Set a hard limit for the expanded tt
let count = tt.count();
if count > 65536 {
@ -77,7 +86,7 @@ pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Optio
match file_id.0 {
HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()),
HirFileIdRepr::MacroFile(macro_file) => {
db.parse_macro(macro_file).map(|it| it.syntax_node())
db.parse_macro(macro_file).map(|(it, _)| it.syntax_node())
}
}
}
@ -85,8 +94,9 @@ pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Optio
pub(crate) fn parse_macro(
db: &dyn AstDatabase,
macro_file: MacroFile,
) -> Option<Parse<SyntaxNode>> {
) -> Option<(Parse<SyntaxNode>, Arc<mbe::RevTokenMap>)> {
let _p = profile("parse_macro_query");
let macro_call_id = macro_file.macro_call_id;
let tt = db
.macro_expand(macro_call_id)
@ -97,8 +107,13 @@ pub(crate) fn parse_macro(
log::warn!("fail on macro_parse: (reason: {})", err,);
})
.ok()?;
match macro_file.macro_file_kind {
MacroFileKind::Items => mbe::token_tree_to_items(&tt).ok().map(Parse::to_syntax),
MacroFileKind::Expr => mbe::token_tree_to_expr(&tt).ok().map(Parse::to_syntax),
MacroFileKind::Items => {
mbe::token_tree_to_items(&tt).ok().map(|(p, map)| (p.to_syntax(), Arc::new(map)))
}
MacroFileKind::Expr => {
mbe::token_tree_to_expr(&tt).ok().map(|(p, map)| (p.to_syntax(), Arc::new(map)))
}
}
}

View file

@ -12,11 +12,12 @@ pub mod hygiene;
pub mod diagnostics;
use std::hash::{Hash, Hasher};
use std::sync::Arc;
use ra_db::{salsa, CrateId, FileId};
use ra_syntax::{
ast::{self, AstNode},
SyntaxNode,
SyntaxNode, TextRange, TextUnit,
};
use crate::ast_id_map::FileAstId;
@ -66,6 +67,30 @@ impl HirFileId {
}
}
}
/// Return expansion information if it is a macro-expansion file
pub fn expansion_info(self, db: &dyn db::AstDatabase) -> Option<ExpansionInfo> {
match self.0 {
HirFileIdRepr::FileId(_) => None,
HirFileIdRepr::MacroFile(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id);
let arg_start = loc.ast_id.to_node(db).token_tree()?.syntax().text_range().start();
let def_start =
loc.def.ast_id.to_node(db).token_tree()?.syntax().text_range().start();
let macro_def = db.macro_def(loc.def)?;
let shift = macro_def.0.shift();
let exp_map = db.parse_macro(macro_file)?.1;
let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
let arg_start = (loc.ast_id.file_id, arg_start);
let def_start = (loc.def.ast_id.file_id, def_start);
Some(ExpansionInfo { arg_start, def_start, macro_arg, macro_def, exp_map, shift })
}
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -112,6 +137,36 @@ impl MacroCallId {
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
pub struct ExpansionInfo {
pub(crate) arg_start: (HirFileId, TextUnit),
pub(crate) def_start: (HirFileId, TextUnit),
pub(crate) shift: u32,
pub(crate) macro_def: Arc<(mbe::MacroRules, mbe::TokenMap)>,
pub(crate) macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
pub(crate) exp_map: Arc<mbe::RevTokenMap>,
}
impl ExpansionInfo {
pub fn find_range(&self, from: TextRange) -> Option<(HirFileId, TextRange)> {
fn look_in_rev_map(exp_map: &mbe::RevTokenMap, from: TextRange) -> Option<tt::TokenId> {
exp_map.ranges.iter().find(|&it| it.0.is_subrange(&from)).map(|it| it.1)
}
let token_id = look_in_rev_map(&self.exp_map, from)?;
let (token_map, file_offset, token_id) = if token_id.0 >= self.shift {
(&self.macro_arg.1, self.arg_start, tt::TokenId(token_id.0 - self.shift).into())
} else {
(&self.macro_def.1, self.def_start, token_id)
};
let range = token_map.relative_range_of(token_id)?;
Some((file_offset.0, TextRange::offset_len(range.start() + file_offset.1, range.len())))
}
}
/// `AstId` points to an AST node in any file.
///
/// It is stable across reparses, and can be used as salsa key/value.

View file

@ -29,6 +29,21 @@ pub struct NavigationTarget {
docs: Option<String>,
}
fn find_range_from_node(
db: &RootDatabase,
src: hir::HirFileId,
node: &SyntaxNode,
) -> (FileId, TextRange) {
let text_range = node.text_range();
let (file_id, text_range) = src
.expansion_info(db)
.and_then(|expansion_info| expansion_info.find_range(text_range))
.unwrap_or((src, text_range));
// FIXME: handle recursive macro generated macro
(file_id.original_file(db), text_range)
}
impl NavigationTarget {
/// When `focus_range` is specified, returns it. otherwise
/// returns `full_range`
@ -72,8 +87,12 @@ impl NavigationTarget {
self.focus_range
}
pub(crate) fn from_bind_pat(file_id: FileId, pat: &ast::BindPat) -> NavigationTarget {
NavigationTarget::from_named(file_id, pat, None, None)
pub(crate) fn from_bind_pat(
db: &RootDatabase,
file_id: FileId,
pat: &ast::BindPat,
) -> NavigationTarget {
NavigationTarget::from_named(db, file_id.into(), pat, None, None)
}
pub(crate) fn from_symbol(db: &RootDatabase, symbol: FileSymbol) -> NavigationTarget {
@ -96,7 +115,7 @@ impl NavigationTarget {
) -> NavigationTarget {
let parse = db.parse(file_id);
let pat = pat.to_node(parse.tree().syntax());
NavigationTarget::from_bind_pat(file_id, &pat)
NavigationTarget::from_bind_pat(db, file_id, &pat)
}
pub(crate) fn from_self_param(
@ -119,31 +138,46 @@ impl NavigationTarget {
pub(crate) fn from_module(db: &RootDatabase, module: hir::Module) -> NavigationTarget {
let src = module.definition_source(db);
let file_id = src.file_id.original_file(db);
let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default();
match src.ast {
ModuleSource::SourceFile(node) => {
NavigationTarget::from_syntax(file_id, name, None, node.syntax(), None, None)
let (file_id, text_range) = find_range_from_node(db, src.file_id, node.syntax());
NavigationTarget::from_syntax(
file_id,
name,
None,
text_range,
node.syntax(),
None,
None,
)
}
ModuleSource::Module(node) => {
let (file_id, text_range) = find_range_from_node(db, src.file_id, node.syntax());
NavigationTarget::from_syntax(
file_id,
name,
None,
text_range,
node.syntax(),
node.doc_comment_text(),
node.short_label(),
)
}
ModuleSource::Module(node) => NavigationTarget::from_syntax(
file_id,
name,
None,
node.syntax(),
node.doc_comment_text(),
node.short_label(),
),
}
}
pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget {
let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default();
if let Some(src) = module.declaration_source(db) {
let file_id = src.file_id.original_file(db);
let (file_id, text_range) = find_range_from_node(db, src.file_id, src.ast.syntax());
return NavigationTarget::from_syntax(
file_id,
name,
None,
text_range,
src.ast.syntax(),
src.ast.doc_comment_text(),
src.ast.short_label(),
@ -154,13 +188,25 @@ impl NavigationTarget {
pub(crate) fn from_field(db: &RootDatabase, field: hir::StructField) -> NavigationTarget {
let src = field.source(db);
let file_id = src.file_id.original_file(db);
match src.ast {
FieldSource::Named(it) => {
NavigationTarget::from_named(file_id, &it, it.doc_comment_text(), it.short_label())
}
FieldSource::Named(it) => NavigationTarget::from_named(
db,
src.file_id,
&it,
it.doc_comment_text(),
it.short_label(),
),
FieldSource::Pos(it) => {
NavigationTarget::from_syntax(file_id, "".into(), None, it.syntax(), None, None)
let (file_id, text_range) = find_range_from_node(db, src.file_id, it.syntax());
NavigationTarget::from_syntax(
file_id,
"".into(),
None,
text_range,
it.syntax(),
None,
None,
)
}
}
}
@ -172,7 +218,8 @@ impl NavigationTarget {
{
let src = def.source(db);
NavigationTarget::from_named(
src.file_id.original_file(db),
db,
src.file_id,
&src.ast,
src.ast.doc_comment_text(),
src.ast.short_label(),
@ -212,10 +259,13 @@ impl NavigationTarget {
impl_block: hir::ImplBlock,
) -> NavigationTarget {
let src = impl_block.source(db);
let (file_id, text_range) = find_range_from_node(db, src.file_id, src.ast.syntax());
NavigationTarget::from_syntax(
src.file_id.original_file(db),
file_id,
"impl".into(),
None,
text_range,
src.ast.syntax(),
None,
None,
@ -236,12 +286,7 @@ impl NavigationTarget {
pub(crate) fn from_macro_def(db: &RootDatabase, macro_call: hir::MacroDef) -> NavigationTarget {
let src = macro_call.source(db);
log::debug!("nav target {:#?}", src.ast.syntax());
NavigationTarget::from_named(
src.file_id.original_file(db),
&src.ast,
src.ast.doc_comment_text(),
None,
)
NavigationTarget::from_named(db, src.file_id, &src.ast, src.ast.doc_comment_text(), None)
}
#[cfg(test)]
@ -270,21 +315,33 @@ impl NavigationTarget {
/// Allows `NavigationTarget` to be created from a `NameOwner`
pub(crate) fn from_named(
file_id: FileId,
db: &RootDatabase,
file_id: hir::HirFileId,
node: &impl ast::NameOwner,
docs: Option<String>,
description: Option<String>,
) -> NavigationTarget {
//FIXME: use `_` instead of empty string
let name = node.name().map(|it| it.text().clone()).unwrap_or_default();
let focus_range = node.name().map(|it| it.syntax().text_range());
NavigationTarget::from_syntax(file_id, name, focus_range, node.syntax(), docs, description)
let focus_range = node.name().map(|it| find_range_from_node(db, file_id, it.syntax()).1);
let (file_id, full_range) = find_range_from_node(db, file_id, node.syntax());
NavigationTarget::from_syntax(
file_id,
name,
focus_range,
full_range,
node.syntax(),
docs,
description,
)
}
fn from_syntax(
file_id: FileId,
name: SmolStr,
focus_range: Option<TextRange>,
full_range: TextRange,
node: &SyntaxNode,
docs: Option<String>,
description: Option<String>,
@ -293,9 +350,8 @@ impl NavigationTarget {
file_id,
name,
kind: node.kind(),
full_range: node.text_range(),
full_range,
focus_range,
// ptr: Some(LocalSyntaxPtr::new(node)),
container_name: None,
description,
docs,

View file

@ -101,19 +101,20 @@ pub(crate) fn name_definition(
}
}
if let Some(nav) = named_target(file_id, &parent) {
if let Some(nav) = named_target(db, file_id, &parent) {
return Some(vec![nav]);
}
None
}
fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget> {
fn named_target(db: &RootDatabase, file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget> {
match_ast! {
match node {
ast::StructDef(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -121,7 +122,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::EnumDef(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -129,7 +131,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::EnumVariant(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -137,7 +140,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::FnDef(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -145,7 +149,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::TypeAliasDef(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -153,7 +158,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::ConstDef(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -161,7 +167,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::StaticDef(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -169,7 +176,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::TraitDef(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -177,7 +185,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::RecordFieldDef(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -185,7 +194,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::Module(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
it.short_label(),
@ -193,7 +203,8 @@ fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget>
},
ast::MacroCall(it) => {
Some(NavigationTarget::from_named(
file_id,
db,
file_id.into(),
&it,
it.doc_comment_text(),
None,
@ -334,6 +345,46 @@ mod tests {
);
}
#[test]
fn goto_definition_works_for_macro_defined_fn_with_arg() {
check_goto(
"
//- /lib.rs
macro_rules! define_fn {
($name:ident) => (fn $name() {})
}
define_fn!(
foo
)
fn bar() {
<|>foo();
}
",
"foo FN_DEF FileId(1) [80; 83) [80; 83)",
);
}
#[test]
fn goto_definition_works_for_macro_defined_fn_no_arg() {
check_goto(
"
//- /lib.rs
macro_rules! define_fn {
() => (fn foo() {})
}
define_fn!();
fn bar() {
<|>foo();
}
",
"foo FN_DEF FileId(1) [39; 42) [39; 42)",
);
}
#[test]
fn goto_definition_works_for_methods() {
covers!(goto_definition_works_for_methods);

View file

@ -94,10 +94,10 @@ impl FromIterator<TableEntry<FileId, Parse<ast::SourceFile>>> for SyntaxTreeStat
}
}
impl FromIterator<TableEntry<MacroFile, Option<Parse<SyntaxNode>>>> for SyntaxTreeStats {
impl<M> FromIterator<TableEntry<MacroFile, Option<(Parse<SyntaxNode>, M)>>> for SyntaxTreeStats {
fn from_iter<T>(iter: T) -> SyntaxTreeStats
where
T: IntoIterator<Item = TableEntry<MacroFile, Option<Parse<SyntaxNode>>>>,
T: IntoIterator<Item = TableEntry<MacroFile, Option<(Parse<SyntaxNode>, M)>>>,
{
let mut res = SyntaxTreeStats::default();
for entry in iter {

View file

@ -32,7 +32,7 @@ pub enum ExpandError {
pub use crate::syntax_bridge::{
ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_expr, token_tree_to_items,
token_tree_to_macro_stmts, token_tree_to_pat, token_tree_to_ty,
token_tree_to_macro_stmts, token_tree_to_pat, token_tree_to_ty, RevTokenMap, TokenMap,
};
/// This struct contains AST for a single `macro_rules` definition. What might
@ -118,6 +118,10 @@ impl MacroRules {
shift_subtree(&mut tt, self.shift);
mbe_expander::expand(self, &tt)
}
pub fn shift(&self) -> u32 {
self.shift
}
}
impl Rule {

View file

@ -14,12 +14,18 @@ use crate::subtree_source::SubtreeTokenSource;
use crate::ExpandError;
/// Maps `tt::TokenId` to the relative range of the original token.
#[derive(Default)]
#[derive(Debug, PartialEq, Eq, Default)]
pub struct TokenMap {
/// Maps `tt::TokenId` to the *relative* source range.
tokens: Vec<TextRange>,
}
/// Maps relative range of the expanded syntax node to `tt::TokenId`
#[derive(Debug, PartialEq, Eq, Default)]
pub struct RevTokenMap {
pub ranges: Vec<(TextRange, tt::TokenId)>,
}
/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
/// will consume).
pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> {
@ -52,7 +58,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, Toke
fn fragment_to_syntax_node(
tt: &tt::Subtree,
fragment_kind: FragmentKind,
) -> Result<Parse<SyntaxNode>, ExpandError> {
) -> Result<(Parse<SyntaxNode>, RevTokenMap), ExpandError> {
let tmp;
let tokens = match tt {
tt::Subtree { delimiter: tt::Delimiter::None, token_trees } => token_trees.as_slice(),
@ -69,38 +75,33 @@ fn fragment_to_syntax_node(
return Err(ExpandError::ConversionError);
}
//FIXME: would be cool to report errors
let parse = tree_sink.inner.finish();
Ok(parse)
let (parse, range_map) = tree_sink.finish();
Ok((parse, range_map))
}
/// Parses the token tree (result of macro expansion) to an expression
pub fn token_tree_to_expr(tt: &tt::Subtree) -> Result<Parse<ast::Expr>, ExpandError> {
let parse = fragment_to_syntax_node(tt, Expr)?;
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
macro_rules! impl_token_tree_conversions {
($($(#[$attr:meta])* $name:ident => ($kind:ident, $t:ty) ),*) => {
$(
$(#[$attr])*
pub fn $name(tt: &tt::Subtree) -> Result<(Parse<$t>, RevTokenMap), ExpandError> {
let (parse, map) = fragment_to_syntax_node(tt, $kind)?;
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError).map(|p| (p, map))
}
)*
}
}
/// Parses the token tree (result of macro expansion) to a Pattern
pub fn token_tree_to_pat(tt: &tt::Subtree) -> Result<Parse<ast::Pat>, ExpandError> {
let parse = fragment_to_syntax_node(tt, Pattern)?;
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
}
/// Parses the token tree (result of macro expansion) to a Type
pub fn token_tree_to_ty(tt: &tt::Subtree) -> Result<Parse<ast::TypeRef>, ExpandError> {
let parse = fragment_to_syntax_node(tt, Type)?;
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
}
/// Parses the token tree (result of macro expansion) as a sequence of stmts
pub fn token_tree_to_macro_stmts(tt: &tt::Subtree) -> Result<Parse<ast::MacroStmts>, ExpandError> {
let parse = fragment_to_syntax_node(tt, Statements)?;
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
}
/// Parses the token tree (result of macro expansion) as a sequence of items
pub fn token_tree_to_items(tt: &tt::Subtree) -> Result<Parse<ast::MacroItems>, ExpandError> {
let parse = fragment_to_syntax_node(tt, Items)?;
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
impl_token_tree_conversions! {
/// Parses the token tree (result of macro expansion) to an expression
token_tree_to_expr => (Expr, ast::Expr),
/// Parses the token tree (result of macro expansion) to a Pattern
token_tree_to_pat => (Pattern, ast::Pat),
/// Parses the token tree (result of macro expansion) to a Type
token_tree_to_ty => (Type, ast::TypeRef),
/// Parses the token tree (result of macro expansion) as a sequence of stmts
token_tree_to_macro_stmts => (Statements, ast::MacroStmts),
/// Parses the token tree (result of macro expansion) as a sequence of items
token_tree_to_items => (Items, ast::MacroItems)
}
impl TokenMap {
@ -116,6 +117,12 @@ impl TokenMap {
}
}
impl RevTokenMap {
fn add(&mut self, relative_range: TextRange, token_id: tt::TokenId) {
self.ranges.push((relative_range, token_id.clone()))
}
}
/// Returns the textual content of a doc comment block as a quoted string
/// That is, strips leading `///` (or `/**`, etc)
/// and strips the ending `*/`
@ -262,6 +269,7 @@ struct TtTreeSink<'a> {
cursor: Cursor<'a>,
text_pos: TextUnit,
inner: SyntaxTreeBuilder,
range_map: RevTokenMap,
// Number of roots
// Use for detect ill-form tree which is not single root
@ -276,8 +284,13 @@ impl<'a> TtTreeSink<'a> {
text_pos: 0.into(),
inner: SyntaxTreeBuilder::default(),
roots: smallvec::SmallVec::new(),
range_map: RevTokenMap::default(),
}
}
fn finish(self) -> (Parse<SyntaxNode>, RevTokenMap) {
(self.inner.finish(), self.range_map)
}
}
fn delim_to_str(d: tt::Delimiter, closing: bool) -> SmolStr {
@ -307,6 +320,15 @@ impl<'a> TreeSink for TtTreeSink<'a> {
match self.cursor.token_tree() {
Some(tt::TokenTree::Leaf(leaf)) => {
// Mark the range if needed
if let tt::Leaf::Ident(ident) = leaf {
if kind == IDENT {
let range =
TextRange::offset_len(self.text_pos, TextUnit::of_str(&ident.text));
self.range_map.add(range, ident.id);
}
}
self.cursor = self.cursor.bump();
self.buf += &format!("{}", leaf);
}
@ -337,6 +359,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
{
if curr.spacing == tt::Spacing::Alone {
self.inner.token(WHITESPACE, " ".into());
self.text_pos += TextUnit::of_char(' ');
}
}
}

View file

@ -126,7 +126,7 @@ fn test_expr_order() {
"#,
);
let expanded = expand(&rules, "foo! { 1 + 1}");
let tree = token_tree_to_items(&expanded).unwrap().tree();
let tree = token_tree_to_items(&expanded).unwrap().0.tree();
let dump = format!("{:#?}", tree.syntax());
assert_eq_text!(
@ -383,7 +383,7 @@ fn test_expand_to_item_list() {
",
);
let expansion = expand(&rules, "structs!(Foo, Bar);");
let tree = token_tree_to_items(&expansion).unwrap().tree();
let tree = token_tree_to_items(&expansion).unwrap().0.tree();
assert_eq!(
format!("{:#?}", tree.syntax()).trim(),
r#"
@ -501,7 +501,7 @@ fn test_tt_to_stmts() {
);
let expanded = expand(&rules, "foo!{}");
let stmts = token_tree_to_macro_stmts(&expanded).unwrap().tree();
let stmts = token_tree_to_macro_stmts(&expanded).unwrap().0.tree();
assert_eq!(
format!("{:#?}", stmts.syntax()).trim(),
@ -946,7 +946,7 @@ fn test_vec() {
);
let expansion = expand(&rules, r#"vec![1u32,2];"#);
let tree = token_tree_to_expr(&expansion).unwrap().tree();
let tree = token_tree_to_expr(&expansion).unwrap().0.tree();
assert_eq!(
format!("{:#?}", tree.syntax()).trim(),
@ -1436,8 +1436,8 @@ pub(crate) fn assert_expansion(
};
let (expanded_tree, expected_tree) = match kind {
MacroKind::Items => {
let expanded_tree = token_tree_to_items(&expanded).unwrap().tree();
let expected_tree = token_tree_to_items(&expected).unwrap().tree();
let expanded_tree = token_tree_to_items(&expanded).unwrap().0.tree();
let expected_tree = token_tree_to_items(&expected).unwrap().0.tree();
(
debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),
@ -1446,8 +1446,8 @@ pub(crate) fn assert_expansion(
}
MacroKind::Stmts => {
let expanded_tree = token_tree_to_macro_stmts(&expanded).unwrap().tree();
let expected_tree = token_tree_to_macro_stmts(&expected).unwrap().tree();
let expanded_tree = token_tree_to_macro_stmts(&expanded).unwrap().0.tree();
let expected_tree = token_tree_to_macro_stmts(&expected).unwrap().0.tree();
(
debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),