2289: More correct expansion mapping r=matklad a=matklad



Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2019-11-17 17:17:21 +00:00 committed by GitHub
commit 076921c5c7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 74 additions and 44 deletions

View file

@ -16,7 +16,7 @@ use ra_syntax::{
ast::{self, AstNode},
match_ast, AstPtr,
SyntaxKind::*,
SyntaxNode, SyntaxNodePtr, TextRange, TextUnit,
SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextUnit,
};
use crate::{
@ -131,11 +131,16 @@ pub struct Expansion {
}
impl Expansion {
pub fn translate_offset(&self, db: &impl HirDatabase, offset: TextUnit) -> Option<TextUnit> {
pub fn map_token_down(
&self,
db: &impl HirDatabase,
token: Source<&SyntaxToken>,
) -> Option<Source<SyntaxToken>> {
let exp_info = self.file_id().expansion_info(db)?;
exp_info.translate_offset(offset)
exp_info.map_token_down(token)
}
pub fn file_id(&self) -> HirFileId {
fn file_id(&self) -> HirFileId {
self.macro_call_id.as_file(MacroFileKind::Items)
}
}

View file

@ -18,8 +18,9 @@ use std::sync::Arc;
use ra_db::{salsa, CrateId, FileId};
use ra_syntax::{
algo,
ast::{self, AstNode},
SyntaxNode, TextRange, TextUnit,
SyntaxNode, SyntaxToken, TextRange, TextUnit,
};
use crate::ast_id_map::FileAstId;
@ -83,13 +84,21 @@ impl HirFileId {
loc.def.ast_id.to_node(db).token_tree()?.syntax().text_range().start();
let macro_def = db.macro_def(loc.def)?;
let exp_map = db.parse_macro(macro_file)?.1;
let (parse, exp_map) = db.parse_macro(macro_file)?;
let expanded = Source::new(self, parse.syntax_node());
let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
let arg_start = (loc.ast_id.file_id, arg_start);
let def_start = (loc.def.ast_id.file_id, def_start);
Some(ExpansionInfo { arg_start, def_start, macro_arg, macro_def, exp_map })
Some(ExpansionInfo {
expanded,
arg_start,
def_start,
macro_arg,
macro_def,
exp_map,
})
}
}
}
@ -146,27 +155,34 @@ impl MacroCallId {
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ExpansionInfo {
pub(crate) arg_start: (HirFileId, TextUnit),
pub(crate) def_start: (HirFileId, TextUnit),
expanded: Source<SyntaxNode>,
arg_start: (HirFileId, TextUnit),
def_start: (HirFileId, TextUnit),
pub(crate) macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>,
pub(crate) macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
pub(crate) exp_map: Arc<mbe::RevTokenMap>,
macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>,
macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
exp_map: Arc<mbe::RevTokenMap>,
}
impl ExpansionInfo {
pub fn translate_offset(&self, offset: TextUnit) -> Option<TextUnit> {
let offset = offset.checked_sub(self.arg_start.1)?;
let token_id = self.macro_arg.1.token_by_offset(offset)?;
pub fn map_token_down(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> {
assert_eq!(token.file_id, self.arg_start.0);
let range = token.ast.text_range().checked_sub(self.arg_start.1)?;
let token_id = self.macro_arg.1.token_by_range(range)?;
let token_id = self.macro_def.0.map_id_down(token_id);
let (r, _) = self.exp_map.ranges.iter().find(|(_, tid)| *tid == token_id)?;
Some(r.start())
let range = self.exp_map.range_by_token(token_id)?;
let token = algo::find_covering_element(&self.expanded.ast, range).into_token()?;
Some(self.expanded.with_ast(token))
}
// FIXME: a more correct signature would be
// `pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>>`
pub fn find_range(&self, from: TextRange) -> Option<(HirFileId, TextRange)> {
let token_id = look_in_rev_map(&self.exp_map, from)?;

View file

@ -4,9 +4,8 @@ use std::iter::successors;
use hir::{db::AstDatabase, Source};
use ra_syntax::{
algo::find_node_at_offset,
ast::{self, DocCommentsOwner},
match_ast, AstNode, SyntaxNode, TextUnit,
match_ast, AstNode, SyntaxNode, SyntaxToken,
};
use crate::{
@ -20,37 +19,42 @@ pub(crate) fn goto_definition(
db: &RootDatabase,
position: FilePosition,
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
let offset = descend_into_macros(db, position);
let token = descend_into_macros(db, position)?;
let syntax = db.parse_or_expand(offset.file_id)?;
let res = match_ast! {
match (token.ast.parent()) {
ast::NameRef(name_ref) => {
let navs = reference_definition(db, token.with_ast(&name_ref)).to_vec();
RangeInfo::new(name_ref.syntax().text_range(), navs.to_vec())
},
ast::Name(name) => {
let navs = name_definition(db, token.with_ast(&name))?;
RangeInfo::new(name.syntax().text_range(), navs)
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&syntax, offset.ast) {
let navs = reference_definition(db, offset.with_ast(&name_ref)).to_vec();
return Some(RangeInfo::new(name_ref.syntax().text_range(), navs.to_vec()));
}
if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, offset.ast) {
let navs = name_definition(db, offset.with_ast(&name))?;
return Some(RangeInfo::new(name.syntax().text_range(), navs));
}
None
},
_ => return None,
}
};
Some(res)
}
fn descend_into_macros(db: &RootDatabase, position: FilePosition) -> Source<TextUnit> {
successors(Some(Source::new(position.file_id.into(), position.offset)), |offset| {
let syntax = db.parse_or_expand(offset.file_id)?;
let macro_call = find_node_at_offset::<ast::MacroCall>(&syntax, offset.ast)?;
fn descend_into_macros(db: &RootDatabase, position: FilePosition) -> Option<Source<SyntaxToken>> {
let file = db.parse_or_expand(position.file_id.into())?;
let token = file.token_at_offset(position.offset).filter(|it| !it.kind().is_trivia()).next()?;
successors(Some(Source::new(position.file_id.into(), token)), |token| {
let macro_call = token.ast.ancestors().find_map(ast::MacroCall::cast)?;
let tt = macro_call.token_tree()?;
if !tt.syntax().text_range().contains(offset.ast) {
if !token.ast.text_range().is_subrange(&tt.syntax().text_range()) {
return None;
}
let source_analyzer =
hir::SourceAnalyzer::new(db, offset.with_ast(macro_call.syntax()), None);
hir::SourceAnalyzer::new(db, token.with_ast(token.ast.parent()).as_ref(), None);
let exp = source_analyzer.expand(db, &macro_call)?;
let next_offset = exp.translate_offset(db, offset.ast)?;
Some(Source::new(exp.file_id(), next_offset))
exp.map_token_down(db, token.as_ref())
})
.last()
.unwrap()
}
#[derive(Debug)]

View file

@ -77,14 +77,14 @@ pub fn token_tree_to_syntax_node(
}
impl TokenMap {
pub fn token_by_offset(&self, relative_offset: TextUnit) -> Option<tt::TokenId> {
pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
let (idx, _) =
self.tokens.iter().enumerate().find(|(_, range)| range.contains(relative_offset))?;
self.tokens.iter().enumerate().find(|(_, range)| **range == relative_range)?;
Some(tt::TokenId(idx as u32))
}
pub fn relative_range_of(&self, tt: tt::TokenId) -> Option<TextRange> {
let idx = tt.0 as usize;
pub fn relative_range_of(&self, token_id: tt::TokenId) -> Option<TextRange> {
let idx = token_id.0 as usize;
self.tokens.get(idx).copied()
}
@ -96,6 +96,11 @@ impl TokenMap {
}
impl RevTokenMap {
pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TextRange> {
let &(r, _) = self.ranges.iter().find(|(_, tid)| *tid == token_id)?;
Some(r)
}
fn add(&mut self, relative_range: TextRange, token_id: tt::TokenId) {
self.ranges.push((relative_range, token_id.clone()))
}