From c5059e06231a76be32e46f1b9aa06fb67b77db4c Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 28 Aug 2021 21:18:56 +0200 Subject: [PATCH] Return all ranges corresponding to a token id in TokenMap --- crates/hir/src/semantics.rs | 128 +++++++++++++++++-------------- crates/hir_expand/src/db.rs | 2 +- crates/hir_expand/src/hygiene.rs | 2 +- crates/hir_expand/src/lib.rs | 13 ++-- crates/mbe/src/tests/expand.rs | 5 +- crates/mbe/src/token_map.rs | 20 ++++- 6 files changed, 99 insertions(+), 71 deletions(-) diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 18f6015843..faf192d24e 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -2,7 +2,7 @@ mod source_to_def; -use std::{cell::RefCell, fmt, iter::successors}; +use std::{cell::RefCell, fmt}; use base_db::{FileId, FileRange}; use hir_def::{ @@ -14,6 +14,7 @@ use hir_expand::{name::AsName, ExpansionInfo}; use hir_ty::{associated_type_shorthand_candidates, Interner}; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; +use smallvec::{smallvec, SmallVec}; use syntax::{ algo::find_node_at_offset, ast::{self, GenericParamsOwner, LoopBodyOwner}, @@ -166,6 +167,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { } pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { + self.imp.descend_into_macros(token).pop().unwrap() + } + + pub fn descend_into_macros_many(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { self.imp.descend_into_macros(token) } @@ -440,76 +445,83 @@ impl<'db> SemanticsImpl<'db> { ) } - fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { + fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { let _p = profile::span("descend_into_macros"); let parent = match token.parent() { Some(it) => it, - None => return token, + None => return smallvec![token], }; let sa = self.analyze(&parent); - - let token = successors(Some(InFile::new(sa.file_id, token)), |token| { + let mut queue = vec![InFile::new(sa.file_id, token)]; + let mut res = smallvec![]; + while let Some(token) = queue.pop() { self.db.unwind_if_cancelled(); - for node in token.value.ancestors() { - match_ast! { - match node { - ast::MacroCall(macro_call) => { - let tt = macro_call.token_tree()?; - let l_delim = match tt.left_delimiter_token() { - Some(it) => it.text_range().end(), - None => tt.syntax().text_range().start() - }; - let r_delim = match tt.right_delimiter_token() { - Some(it) => it.text_range().start(), - None => tt.syntax().text_range().end() - }; - if !TextRange::new(l_delim, r_delim).contains_range(token.value.text_range()) { - return None; - } - let file_id = sa.expand(self.db, token.with_value(¯o_call))?; - let token = self - .expansion_info_cache - .borrow_mut() - .entry(file_id) - .or_insert_with(|| file_id.expansion_info(self.db.upcast())) - .as_ref()? - .map_token_down(self.db.upcast(), None, token.as_ref())?; - - if let Some(parent) = token.value.parent() { - self.cache(find_root(&parent), token.file_id); - } - - return Some(token); - }, - ast::Item(item) => { - if let Some(call_id) = self.with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item.clone()))) { - let file_id = call_id.as_file(); - let token = self - .expansion_info_cache - .borrow_mut() + let mapped = (|| { + for node in token.value.ancestors() { + match_ast! { + match node { + ast::MacroCall(macro_call) => { + let tt = macro_call.token_tree()?; + let l_delim = match tt.left_delimiter_token() { + Some(it) => it.text_range().end(), + None => tt.syntax().text_range().start() + }; + let r_delim = match tt.right_delimiter_token() { + Some(it) => it.text_range().start(), + None => tt.syntax().text_range().end() + }; + if !TextRange::new(l_delim, r_delim).contains_range(token.value.text_range()) { + return None; + } + let file_id = sa.expand(self.db, token.with_value(¯o_call))?; + let mut cache = self.expansion_info_cache.borrow_mut(); + let tokens = cache .entry(file_id) .or_insert_with(|| file_id.expansion_info(self.db.upcast())) .as_ref()? - .map_token_down(self.db.upcast(), Some(item), token.as_ref())?; + .map_token_down(self.db.upcast(), None, token.as_ref())?; - if let Some(parent) = token.value.parent() { - self.cache(find_root(&parent), token.file_id); + queue.extend(tokens.inspect(|token| { + if let Some(parent) = token.value.parent() { + self.cache(find_root(&parent), token.file_id); + } + })); + return Some(()); + }, + ast::Item(item) => { + match self.with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item))) { + Some(call_id) => { + let file_id = call_id.as_file(); + let mut cache = self.expansion_info_cache.borrow_mut(); + let tokens = cache + .entry(file_id) + .or_insert_with(|| file_id.expansion_info(self.db.upcast())) + .as_ref()? + .map_token_down(self.db.upcast(), None, token.as_ref())?; + + queue.extend(tokens.inspect(|token| { + if let Some(parent) = token.value.parent() { + self.cache(find_root(&parent), token.file_id); + } + })); + return Some(()); + } + None => {} } - - return Some(token); - } - }, - _ => {} + }, + _ => {} + } } } + None + })(); + match mapped { + Some(()) => (), + None => res.push(token.value), } - - None - }) - .last() - .unwrap(); - token.value + } + res } fn descend_node_at_offset( @@ -519,8 +531,8 @@ impl<'db> SemanticsImpl<'db> { ) -> impl Iterator + '_ { // Handle macro token cases node.token_at_offset(offset) - .map(|token| self.descend_into_macros(token)) - .map(|it| self.token_ancestors_with_macros(it)) + .flat_map(move |token| self.descend_into_macros(token)) + .map(move |it| self.token_ancestors_with_macros(it)) .flatten() } diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index fc1dc048ef..2fc8468faf 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs @@ -163,7 +163,7 @@ pub fn expand_speculative( mbe::token_tree_to_syntax_node(&speculative_expansion.value, fragment_kind).ok()?; let token_id = macro_def.map_id_down(token_id); - let range = tmap_2.range_by_token(token_id, token_to_map.kind())?; + let range = tmap_2.first_range_by_token(token_id, token_to_map.kind())?; let token = node.syntax_node().covering_element(range).into_token()?; Some((node.syntax_node(), token)) } diff --git a/crates/hir_expand/src/hygiene.rs b/crates/hir_expand/src/hygiene.rs index 5deb59ae31..cac484a325 100644 --- a/crates/hir_expand/src/hygiene.rs +++ b/crates/hir_expand/src/hygiene.rs @@ -171,7 +171,7 @@ impl HygieneInfo { }, }; - let range = token_map.range_by_token(token_id, SyntaxKind::IDENT)?; + let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?; Some((tt.with_value(range + tt.value), origin)) } } diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index a12dd07426..3bbbb5722f 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs @@ -368,7 +368,7 @@ impl ExpansionInfo { db: &dyn db::AstDatabase, item: Option, token: InFile<&SyntaxToken>, - ) -> Option> { + ) -> Option> + '_> { assert_eq!(token.file_id, self.arg.file_id); let token_id = if let Some(item) = item { let call_id = match self.expanded.file_id.0 { @@ -411,11 +411,12 @@ impl ExpansionInfo { } }; - let range = self.exp_map.range_by_token(token_id, token.value.kind())?; + let tokens = self + .exp_map + .ranges_by_token(token_id, token.value.kind()) + .flat_map(move |range| self.expanded.value.covering_element(range).into_token()); - let token = self.expanded.value.covering_element(range).into_token()?; - - Some(self.expanded.with_value(token)) + Some(tokens.map(move |token| self.expanded.with_value(token))) } pub fn map_token_up( @@ -453,7 +454,7 @@ impl ExpansionInfo { }, }; - let range = token_map.range_by_token(token_id, token.value.kind())?; + let range = token_map.first_range_by_token(token_id, token.value.kind())?; let token = tt.value.covering_element(range + tt.value.text_range().start()).into_token()?; Some((tt.with_value(token), origin)) diff --git a/crates/mbe/src/tests/expand.rs b/crates/mbe/src/tests/expand.rs index c788e427e7..c8d06eebb7 100644 --- a/crates/mbe/src/tests/expand.rs +++ b/crates/mbe/src/tests/expand.rs @@ -58,8 +58,9 @@ macro_rules! foobar { let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap(); let content = node.syntax_node().to_string(); - let get_text = - |id, kind| -> String { content[token_map.range_by_token(id, kind).unwrap()].to_string() }; + let get_text = |id, kind| -> String { + content[token_map.first_range_by_token(id, kind).unwrap()].to_string() + }; assert_eq!(expanded.token_trees.len(), 4); // {($e:ident) => { fn $e() {} }} diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs index ff0c106cf2..9053526d20 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/mbe/src/token_map.rs @@ -46,9 +46,23 @@ impl TokenMap { Some(token_id) } - pub fn range_by_token(&self, token_id: tt::TokenId, kind: SyntaxKind) -> Option { - let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?; - range.by_kind(kind) + pub fn ranges_by_token( + &self, + token_id: tt::TokenId, + kind: SyntaxKind, + ) -> impl Iterator + '_ { + self.entries + .iter() + .filter(move |&&(tid, _)| tid == token_id) + .filter_map(move |(_, range)| range.by_kind(kind)) + } + + pub fn first_range_by_token( + &self, + token_id: tt::TokenId, + kind: SyntaxKind, + ) -> Option { + self.ranges_by_token(token_id, kind).next() } pub(crate) fn shrink_to_fit(&mut self) {