diff --git a/crates/ra_hir_expand/src/db.rs b/crates/ra_hir_expand/src/db.rs index 8369c2b40f..72c728c938 100644 --- a/crates/ra_hir_expand/src/db.rs +++ b/crates/ra_hir_expand/src/db.rs @@ -151,10 +151,15 @@ pub(crate) fn parse_macro_with_info( let arg_start = arg_tt.map(|t| t.syntax().text_range().start()); let def_start = def_tt.map(|t| t.syntax().text_range().start()); - let arg_map = - arg_start.map(|start| exp_map.ranges(&expand_info.arg_map, start)).unwrap_or_default(); - let def_map = - def_start.map(|start| exp_map.ranges(&expand_info.def_map, start)).unwrap_or_default(); + let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition").ok()?.0; + let shift = macro_rules.shift(); + + let arg_map = arg_start + .map(|start| exp_map.map_ranges(&expand_info.arg_map, start, shift)) + .unwrap_or_default(); + let def_map = def_start + .map(|start| exp_map.map_ranges(&expand_info.def_map, start, 0)) + .unwrap_or_default(); let info = ExpansionInfo { arg_map, def_map }; diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs index 2926b29fd0..a0f1a68813 100644 --- a/crates/ra_mbe/src/lib.rs +++ b/crates/ra_mbe/src/lib.rs @@ -118,6 +118,10 @@ impl MacroRules { shift_subtree(&mut tt, self.shift); mbe_expander::expand(self, &tt) } + + pub fn shift(&self) -> u32 { + self.shift + } } impl Rule { diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 2f21ad6e0a..0c499cd03c 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs @@ -12,31 +12,12 @@ use tt::buffer::{Cursor, TokenBuffer}; use crate::subtree_source::SubtreeTokenSource; use crate::ExpandError; -use std::sync::atomic::{AtomicU32, Ordering}; /// Maps `tt::TokenId` to the relative range of the original token. -#[derive(Debug, PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq, Default)] pub struct TokenMap { /// Maps `tt::TokenId` to the *relative* source range. tokens: Vec, - map_id: u32, -} - -static TOKEN_MAP_COUNTER: AtomicU32 = AtomicU32::new(0); - -/// Generate an unique token map id for each instance -fn make_uniq_token_map_id() -> u32 { - let res = TOKEN_MAP_COUNTER.fetch_add(1, Ordering::SeqCst); - if res == std::u32::MAX { - panic!("TOKEN_MAP_COUNTER is overflowed"); - } - res -} - -impl std::default::Default for TokenMap { - fn default() -> TokenMap { - TokenMap { tokens: Default::default(), map_id: make_uniq_token_map_id() } - } } /// Maps relative range of the expanded syntax node to `tt::TokenId` @@ -140,17 +121,14 @@ pub fn token_tree_to_items( impl TokenMap { pub fn relative_range_of(&self, tt: tt::TokenId) -> Option { - if self.map_id != tt.map_id() { - return None; - } - let idx = tt.token_id() as usize; + let idx = tt.0 as usize; self.tokens.get(idx).copied() } fn alloc(&mut self, relative_range: TextRange) -> tt::TokenId { let id = self.tokens.len(); self.tokens.push(relative_range); - tt::TokenId::new(id as u32, self.map_id) + tt::TokenId(id as u32) } } @@ -159,14 +137,17 @@ impl ExpandedRangeMap { self.ranges.push((relative_range, token_id.clone())) } - pub fn ranges(&self, to: &TokenMap, start: TextUnit) -> Vec<(TextRange, TextRange)> { + pub fn map_ranges( + &self, + to: &TokenMap, + start: TextUnit, + shift: u32, + ) -> Vec<(TextRange, TextRange)> { self.ranges .iter() + .filter_map(|(r, tid)| if shift <= tid.0 { Some((r, tid.0 - shift)) } else { None }) .filter_map(|(r, tid)| { - if to.map_id != tid.map_id() { - return None; - } - if let Some(to_range) = to.relative_range_of(*tid) { + if let Some(to_range) = to.relative_range_of(tt::TokenId(tid)) { Some((*r, TextRange::from_to(to_range.start() + start, to_range.end() + start))) } else { None diff --git a/crates/ra_tt/src/lib.rs b/crates/ra_tt/src/lib.rs index 1449618f3e..20c251ff45 100644 --- a/crates/ra_tt/src/lib.rs +++ b/crates/ra_tt/src/lib.rs @@ -25,26 +25,11 @@ use smol_str::SmolStr; /// source token and making sure that identities are preserved during macro /// expansion. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct TokenId { - token_id: u32, - map_id: u32, -} +pub struct TokenId(pub u32); impl TokenId { - pub fn new(token_id: u32, map_id: u32) -> TokenId { - TokenId { token_id, map_id } - } - pub const fn unspecified() -> TokenId { - TokenId { token_id: !0, map_id: !0 } - } - - pub fn token_id(&self) -> u32 { - self.token_id - } - - pub fn map_id(&self) -> u32 { - self.map_id + TokenId(!0) } }