mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 21:43:37 +00:00
Add TokenTextRange
This commit is contained in:
parent
59295854f8
commit
320416d756
2 changed files with 35 additions and 10 deletions
|
@ -227,7 +227,7 @@ impl ExpansionInfo {
|
||||||
let token_id = self.macro_arg.1.token_by_range(range)?;
|
let token_id = self.macro_arg.1.token_by_range(range)?;
|
||||||
let token_id = self.macro_def.0.map_id_down(token_id);
|
let token_id = self.macro_def.0.map_id_down(token_id);
|
||||||
|
|
||||||
let range = self.exp_map.range_by_token(token_id)?;
|
let range = self.exp_map.range_by_token(token_id)?.range(token.value.kind())?;
|
||||||
|
|
||||||
let token = algo::find_covering_element(&self.expanded.value, range).into_token()?;
|
let token = algo::find_covering_element(&self.expanded.value, range).into_token()?;
|
||||||
|
|
||||||
|
@ -248,7 +248,7 @@ impl ExpansionInfo {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let range = token_map.range_by_token(token_id)?;
|
let range = token_map.range_by_token(token_id)?.range(token.value.kind())?;
|
||||||
let token = algo::find_covering_element(&tt.value, range + tt.value.text_range().start())
|
let token = algo::find_covering_element(&tt.value, range + tt.value.text_range().start())
|
||||||
.into_token()?;
|
.into_token()?;
|
||||||
Some((tt.with_value(token), origin))
|
Some((tt.with_value(token), origin))
|
||||||
|
|
|
@ -12,11 +12,30 @@ use tt::buffer::{Cursor, TokenBuffer};
|
||||||
use crate::subtree_source::SubtreeTokenSource;
|
use crate::subtree_source::SubtreeTokenSource;
|
||||||
use crate::ExpandError;
|
use crate::ExpandError;
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||||
|
pub enum TokenTextRange {
|
||||||
|
Token(TextRange),
|
||||||
|
Delimiter(TextRange, TextRange),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TokenTextRange {
|
||||||
|
pub fn range(self, kind: SyntaxKind) -> Option<TextRange> {
|
||||||
|
match self {
|
||||||
|
TokenTextRange::Token(it) => Some(it),
|
||||||
|
TokenTextRange::Delimiter(open, close) => match kind {
|
||||||
|
T!['{'] | T!['('] | T!['['] => Some(open),
|
||||||
|
T!['}'] | T![')'] | T![']'] => Some(close),
|
||||||
|
_ => None,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Maps `tt::TokenId` to the relative range of the original token.
|
/// Maps `tt::TokenId` to the relative range of the original token.
|
||||||
#[derive(Debug, PartialEq, Eq, Default)]
|
#[derive(Debug, PartialEq, Eq, Default)]
|
||||||
pub struct TokenMap {
|
pub struct TokenMap {
|
||||||
/// Maps `tt::TokenId` to the *relative* source range.
|
/// Maps `tt::TokenId` to the *relative* source range.
|
||||||
entries: Vec<(tt::TokenId, TextRange)>,
|
entries: Vec<(tt::TokenId, TokenTextRange)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
|
/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
|
||||||
|
@ -72,26 +91,32 @@ pub fn token_tree_to_syntax_node(
|
||||||
|
|
||||||
impl TokenMap {
|
impl TokenMap {
|
||||||
pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
|
pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
|
||||||
let &(token_id, _) = self.entries.iter().find(|(_, range)| *range == relative_range)?;
|
let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
|
||||||
|
TokenTextRange::Token(it) => *it == relative_range,
|
||||||
|
TokenTextRange::Delimiter(open, close) => {
|
||||||
|
*open == relative_range || *close == relative_range
|
||||||
|
}
|
||||||
|
})?;
|
||||||
Some(token_id)
|
Some(token_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TextRange> {
|
pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TokenTextRange> {
|
||||||
let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?;
|
let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?;
|
||||||
Some(range)
|
Some(range)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
|
fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
|
||||||
self.entries.push((token_id, relative_range));
|
self.entries.push((token_id, TokenTextRange::Token(relative_range)));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_delim(
|
fn insert_delim(
|
||||||
&mut self,
|
&mut self,
|
||||||
_token_id: tt::TokenId,
|
token_id: tt::TokenId,
|
||||||
_open_relative_range: TextRange,
|
open_relative_range: TextRange,
|
||||||
_close_relative_range: TextRange,
|
close_relative_range: TextRange,
|
||||||
) {
|
) {
|
||||||
// FIXME: Add entries for delimiter
|
self.entries
|
||||||
|
.push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue