mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-25 20:43:21 +00:00
Return all ranges corresponding to a token id in TokenMap
This commit is contained in:
parent
7e31c5ec0d
commit
c5059e0623
6 changed files with 99 additions and 71 deletions
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
mod source_to_def;
|
mod source_to_def;
|
||||||
|
|
||||||
use std::{cell::RefCell, fmt, iter::successors};
|
use std::{cell::RefCell, fmt};
|
||||||
|
|
||||||
use base_db::{FileId, FileRange};
|
use base_db::{FileId, FileRange};
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
|
@ -14,6 +14,7 @@ use hir_expand::{name::AsName, ExpansionInfo};
|
||||||
use hir_ty::{associated_type_shorthand_candidates, Interner};
|
use hir_ty::{associated_type_shorthand_candidates, Interner};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
|
use smallvec::{smallvec, SmallVec};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo::find_node_at_offset,
|
algo::find_node_at_offset,
|
||||||
ast::{self, GenericParamsOwner, LoopBodyOwner},
|
ast::{self, GenericParamsOwner, LoopBodyOwner},
|
||||||
|
@ -166,6 +167,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
|
pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
|
||||||
|
self.imp.descend_into_macros(token).pop().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn descend_into_macros_many(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
|
||||||
self.imp.descend_into_macros(token)
|
self.imp.descend_into_macros(token)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -440,17 +445,19 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
|
fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
|
||||||
let _p = profile::span("descend_into_macros");
|
let _p = profile::span("descend_into_macros");
|
||||||
let parent = match token.parent() {
|
let parent = match token.parent() {
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
None => return token,
|
None => return smallvec![token],
|
||||||
};
|
};
|
||||||
let sa = self.analyze(&parent);
|
let sa = self.analyze(&parent);
|
||||||
|
let mut queue = vec![InFile::new(sa.file_id, token)];
|
||||||
let token = successors(Some(InFile::new(sa.file_id, token)), |token| {
|
let mut res = smallvec![];
|
||||||
|
while let Some(token) = queue.pop() {
|
||||||
self.db.unwind_if_cancelled();
|
self.db.unwind_if_cancelled();
|
||||||
|
|
||||||
|
let mapped = (|| {
|
||||||
for node in token.value.ancestors() {
|
for node in token.value.ancestors() {
|
||||||
match_ast! {
|
match_ast! {
|
||||||
match node {
|
match node {
|
||||||
|
@ -468,48 +475,53 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let file_id = sa.expand(self.db, token.with_value(¯o_call))?;
|
let file_id = sa.expand(self.db, token.with_value(¯o_call))?;
|
||||||
let token = self
|
let mut cache = self.expansion_info_cache.borrow_mut();
|
||||||
.expansion_info_cache
|
let tokens = cache
|
||||||
.borrow_mut()
|
|
||||||
.entry(file_id)
|
.entry(file_id)
|
||||||
.or_insert_with(|| file_id.expansion_info(self.db.upcast()))
|
.or_insert_with(|| file_id.expansion_info(self.db.upcast()))
|
||||||
.as_ref()?
|
.as_ref()?
|
||||||
.map_token_down(self.db.upcast(), None, token.as_ref())?;
|
.map_token_down(self.db.upcast(), None, token.as_ref())?;
|
||||||
|
|
||||||
|
queue.extend(tokens.inspect(|token| {
|
||||||
if let Some(parent) = token.value.parent() {
|
if let Some(parent) = token.value.parent() {
|
||||||
self.cache(find_root(&parent), token.file_id);
|
self.cache(find_root(&parent), token.file_id);
|
||||||
}
|
}
|
||||||
|
}));
|
||||||
return Some(token);
|
return Some(());
|
||||||
},
|
},
|
||||||
ast::Item(item) => {
|
ast::Item(item) => {
|
||||||
if let Some(call_id) = self.with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item.clone()))) {
|
match self.with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item))) {
|
||||||
|
Some(call_id) => {
|
||||||
let file_id = call_id.as_file();
|
let file_id = call_id.as_file();
|
||||||
let token = self
|
let mut cache = self.expansion_info_cache.borrow_mut();
|
||||||
.expansion_info_cache
|
let tokens = cache
|
||||||
.borrow_mut()
|
|
||||||
.entry(file_id)
|
.entry(file_id)
|
||||||
.or_insert_with(|| file_id.expansion_info(self.db.upcast()))
|
.or_insert_with(|| file_id.expansion_info(self.db.upcast()))
|
||||||
.as_ref()?
|
.as_ref()?
|
||||||
.map_token_down(self.db.upcast(), Some(item), token.as_ref())?;
|
.map_token_down(self.db.upcast(), None, token.as_ref())?;
|
||||||
|
|
||||||
|
queue.extend(tokens.inspect(|token| {
|
||||||
if let Some(parent) = token.value.parent() {
|
if let Some(parent) = token.value.parent() {
|
||||||
self.cache(find_root(&parent), token.file_id);
|
self.cache(find_root(&parent), token.file_id);
|
||||||
}
|
}
|
||||||
|
}));
|
||||||
return Some(token);
|
return Some(());
|
||||||
|
}
|
||||||
|
None => {}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
None
|
None
|
||||||
})
|
})();
|
||||||
.last()
|
match mapped {
|
||||||
.unwrap();
|
Some(()) => (),
|
||||||
token.value
|
None => res.push(token.value),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
fn descend_node_at_offset(
|
fn descend_node_at_offset(
|
||||||
|
@ -519,8 +531,8 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
) -> impl Iterator<Item = SyntaxNode> + '_ {
|
) -> impl Iterator<Item = SyntaxNode> + '_ {
|
||||||
// Handle macro token cases
|
// Handle macro token cases
|
||||||
node.token_at_offset(offset)
|
node.token_at_offset(offset)
|
||||||
.map(|token| self.descend_into_macros(token))
|
.flat_map(move |token| self.descend_into_macros(token))
|
||||||
.map(|it| self.token_ancestors_with_macros(it))
|
.map(move |it| self.token_ancestors_with_macros(it))
|
||||||
.flatten()
|
.flatten()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -163,7 +163,7 @@ pub fn expand_speculative(
|
||||||
mbe::token_tree_to_syntax_node(&speculative_expansion.value, fragment_kind).ok()?;
|
mbe::token_tree_to_syntax_node(&speculative_expansion.value, fragment_kind).ok()?;
|
||||||
|
|
||||||
let token_id = macro_def.map_id_down(token_id);
|
let token_id = macro_def.map_id_down(token_id);
|
||||||
let range = tmap_2.range_by_token(token_id, token_to_map.kind())?;
|
let range = tmap_2.first_range_by_token(token_id, token_to_map.kind())?;
|
||||||
let token = node.syntax_node().covering_element(range).into_token()?;
|
let token = node.syntax_node().covering_element(range).into_token()?;
|
||||||
Some((node.syntax_node(), token))
|
Some((node.syntax_node(), token))
|
||||||
}
|
}
|
||||||
|
|
|
@ -171,7 +171,7 @@ impl HygieneInfo {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
let range = token_map.range_by_token(token_id, SyntaxKind::IDENT)?;
|
let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?;
|
||||||
Some((tt.with_value(range + tt.value), origin))
|
Some((tt.with_value(range + tt.value), origin))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -368,7 +368,7 @@ impl ExpansionInfo {
|
||||||
db: &dyn db::AstDatabase,
|
db: &dyn db::AstDatabase,
|
||||||
item: Option<ast::Item>,
|
item: Option<ast::Item>,
|
||||||
token: InFile<&SyntaxToken>,
|
token: InFile<&SyntaxToken>,
|
||||||
) -> Option<InFile<SyntaxToken>> {
|
) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
|
||||||
assert_eq!(token.file_id, self.arg.file_id);
|
assert_eq!(token.file_id, self.arg.file_id);
|
||||||
let token_id = if let Some(item) = item {
|
let token_id = if let Some(item) = item {
|
||||||
let call_id = match self.expanded.file_id.0 {
|
let call_id = match self.expanded.file_id.0 {
|
||||||
|
@ -411,11 +411,12 @@ impl ExpansionInfo {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let range = self.exp_map.range_by_token(token_id, token.value.kind())?;
|
let tokens = self
|
||||||
|
.exp_map
|
||||||
|
.ranges_by_token(token_id, token.value.kind())
|
||||||
|
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
|
||||||
|
|
||||||
let token = self.expanded.value.covering_element(range).into_token()?;
|
Some(tokens.map(move |token| self.expanded.with_value(token)))
|
||||||
|
|
||||||
Some(self.expanded.with_value(token))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn map_token_up(
|
pub fn map_token_up(
|
||||||
|
@ -453,7 +454,7 @@ impl ExpansionInfo {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
let range = token_map.range_by_token(token_id, token.value.kind())?;
|
let range = token_map.first_range_by_token(token_id, token.value.kind())?;
|
||||||
let token =
|
let token =
|
||||||
tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
|
tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
|
||||||
Some((tt.with_value(token), origin))
|
Some((tt.with_value(token), origin))
|
||||||
|
|
|
@ -58,8 +58,9 @@ macro_rules! foobar {
|
||||||
let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap();
|
let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap();
|
||||||
let content = node.syntax_node().to_string();
|
let content = node.syntax_node().to_string();
|
||||||
|
|
||||||
let get_text =
|
let get_text = |id, kind| -> String {
|
||||||
|id, kind| -> String { content[token_map.range_by_token(id, kind).unwrap()].to_string() };
|
content[token_map.first_range_by_token(id, kind).unwrap()].to_string()
|
||||||
|
};
|
||||||
|
|
||||||
assert_eq!(expanded.token_trees.len(), 4);
|
assert_eq!(expanded.token_trees.len(), 4);
|
||||||
// {($e:ident) => { fn $e() {} }}
|
// {($e:ident) => { fn $e() {} }}
|
||||||
|
|
|
@ -46,9 +46,23 @@ impl TokenMap {
|
||||||
Some(token_id)
|
Some(token_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn range_by_token(&self, token_id: tt::TokenId, kind: SyntaxKind) -> Option<TextRange> {
|
pub fn ranges_by_token(
|
||||||
let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?;
|
&self,
|
||||||
range.by_kind(kind)
|
token_id: tt::TokenId,
|
||||||
|
kind: SyntaxKind,
|
||||||
|
) -> impl Iterator<Item = TextRange> + '_ {
|
||||||
|
self.entries
|
||||||
|
.iter()
|
||||||
|
.filter(move |&&(tid, _)| tid == token_id)
|
||||||
|
.filter_map(move |(_, range)| range.by_kind(kind))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn first_range_by_token(
|
||||||
|
&self,
|
||||||
|
token_id: tt::TokenId,
|
||||||
|
kind: SyntaxKind,
|
||||||
|
) -> Option<TextRange> {
|
||||||
|
self.ranges_by_token(token_id, kind).next()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn shrink_to_fit(&mut self) {
|
pub(crate) fn shrink_to_fit(&mut self) {
|
||||||
|
|
Loading…
Reference in a new issue