mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 21:13:37 +00:00
Merge #2302
2302: Collapse TokenMap and RevTokenMap r=matklad a=matklad Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
9cc4d058d2
4 changed files with 105 additions and 119 deletions
|
@ -59,10 +59,8 @@ pub trait AstDatabase: SourceDatabase {
|
|||
fn intern_macro(&self, macro_call: MacroCallLoc) -> MacroCallId;
|
||||
fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>;
|
||||
fn macro_def(&self, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>>;
|
||||
fn parse_macro(
|
||||
&self,
|
||||
macro_file: MacroFile,
|
||||
) -> Option<(Parse<SyntaxNode>, Arc<mbe::RevTokenMap>)>;
|
||||
fn parse_macro(&self, macro_file: MacroFile)
|
||||
-> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
|
||||
fn macro_expand(&self, macro_call: MacroCallId) -> Result<Arc<tt::Subtree>, String>;
|
||||
}
|
||||
|
||||
|
@ -136,7 +134,7 @@ pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Optio
|
|||
pub(crate) fn parse_macro(
|
||||
db: &dyn AstDatabase,
|
||||
macro_file: MacroFile,
|
||||
) -> Option<(Parse<SyntaxNode>, Arc<mbe::RevTokenMap>)> {
|
||||
) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
|
||||
let _p = profile("parse_macro_query");
|
||||
|
||||
let macro_call_id = macro_file.macro_call_id;
|
||||
|
|
|
@ -159,7 +159,7 @@ pub struct ExpansionInfo {
|
|||
|
||||
macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>,
|
||||
macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
|
||||
exp_map: Arc<mbe::RevTokenMap>,
|
||||
exp_map: Arc<mbe::TokenMap>,
|
||||
}
|
||||
|
||||
impl ExpansionInfo {
|
||||
|
@ -186,7 +186,7 @@ impl ExpansionInfo {
|
|||
mbe::Origin::Def => (&self.macro_def.1, &self.def),
|
||||
};
|
||||
|
||||
let range = token_map.relative_range_of(token_id)?;
|
||||
let range = token_map.range_by_token(token_id)?;
|
||||
let token = algo::find_covering_element(
|
||||
tt.ast.syntax(),
|
||||
range + tt.ast.syntax().text_range().start(),
|
||||
|
|
|
@ -31,7 +31,7 @@ pub enum ExpandError {
|
|||
}
|
||||
|
||||
pub use crate::syntax_bridge::{
|
||||
ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, RevTokenMap, TokenMap,
|
||||
ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, TokenMap,
|
||||
};
|
||||
|
||||
/// This struct contains AST for a single `macro_rules` definition. What might
|
||||
|
|
|
@ -14,30 +14,22 @@ use crate::ExpandError;
|
|||
#[derive(Debug, PartialEq, Eq, Default)]
|
||||
pub struct TokenMap {
|
||||
/// Maps `tt::TokenId` to the *relative* source range.
|
||||
tokens: Vec<TextRange>,
|
||||
}
|
||||
|
||||
/// Maps relative range of the expanded syntax node to `tt::TokenId`
|
||||
#[derive(Debug, PartialEq, Eq, Default)]
|
||||
pub struct RevTokenMap {
|
||||
ranges: Vec<(TextRange, tt::TokenId)>,
|
||||
entries: Vec<(tt::TokenId, TextRange)>,
|
||||
}
|
||||
|
||||
/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
|
||||
/// will consume).
|
||||
pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> {
|
||||
let mut token_map = TokenMap::default();
|
||||
let node = ast.syntax();
|
||||
let tt = convert_tt(&mut token_map, node.text_range().start(), node)?;
|
||||
Some((tt, token_map))
|
||||
syntax_node_to_token_tree(ast.syntax())
|
||||
}
|
||||
|
||||
/// Convert the syntax node to a `TokenTree` (what macro
|
||||
/// will consume).
|
||||
pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> {
|
||||
let mut token_map = TokenMap::default();
|
||||
let tt = convert_tt(&mut token_map, node.text_range().start(), node)?;
|
||||
Some((tt, token_map))
|
||||
let global_offset = node.text_range().start();
|
||||
let mut c = Convertor { map: TokenMap::default(), global_offset, next_id: 0 };
|
||||
let subtree = c.go(node)?;
|
||||
Some((subtree, c.map))
|
||||
}
|
||||
|
||||
// The following items are what `rustc` macro can be parsed into :
|
||||
|
@ -55,7 +47,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, Toke
|
|||
pub fn token_tree_to_syntax_node(
|
||||
tt: &tt::Subtree,
|
||||
fragment_kind: FragmentKind,
|
||||
) -> Result<(Parse<SyntaxNode>, RevTokenMap), ExpandError> {
|
||||
) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> {
|
||||
let tmp;
|
||||
let tokens = match tt {
|
||||
tt::Subtree { delimiter: tt::Delimiter::None, token_trees } => token_trees.as_slice(),
|
||||
|
@ -78,35 +70,17 @@ pub fn token_tree_to_syntax_node(
|
|||
|
||||
impl TokenMap {
|
||||
pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
|
||||
let (idx, _) =
|
||||
self.tokens.iter().enumerate().find(|(_, range)| **range == relative_range)?;
|
||||
Some(tt::TokenId(idx as u32))
|
||||
}
|
||||
|
||||
pub fn relative_range_of(&self, token_id: tt::TokenId) -> Option<TextRange> {
|
||||
let idx = token_id.0 as usize;
|
||||
self.tokens.get(idx).copied()
|
||||
}
|
||||
|
||||
fn alloc(&mut self, relative_range: TextRange) -> tt::TokenId {
|
||||
let id = self.tokens.len();
|
||||
self.tokens.push(relative_range);
|
||||
tt::TokenId(id as u32)
|
||||
}
|
||||
}
|
||||
|
||||
impl RevTokenMap {
|
||||
pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
|
||||
self.ranges.iter().find(|&it| it.0 == relative_range).map(|it| it.1)
|
||||
let &(token_id, _) = self.entries.iter().find(|(_, range)| *range == relative_range)?;
|
||||
Some(token_id)
|
||||
}
|
||||
|
||||
pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TextRange> {
|
||||
let &(r, _) = self.ranges.iter().find(|(_, tid)| *tid == token_id)?;
|
||||
Some(r)
|
||||
let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?;
|
||||
Some(range)
|
||||
}
|
||||
|
||||
fn add(&mut self, relative_range: TextRange, token_id: tt::TokenId) {
|
||||
self.ranges.push((relative_range, token_id.clone()))
|
||||
fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
|
||||
self.entries.push((token_id, relative_range));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -171,84 +145,98 @@ fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTr
|
|||
}
|
||||
}
|
||||
|
||||
fn convert_tt(
|
||||
token_map: &mut TokenMap,
|
||||
struct Convertor {
|
||||
map: TokenMap,
|
||||
global_offset: TextUnit,
|
||||
tt: &SyntaxNode,
|
||||
) -> Option<tt::Subtree> {
|
||||
// This tree is empty
|
||||
if tt.first_child_or_token().is_none() {
|
||||
return Some(tt::Subtree { token_trees: vec![], delimiter: tt::Delimiter::None });
|
||||
}
|
||||
next_id: u32,
|
||||
}
|
||||
|
||||
let first_child = tt.first_child_or_token()?;
|
||||
let last_child = tt.last_child_or_token()?;
|
||||
let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) {
|
||||
(T!['('], T![')']) => (tt::Delimiter::Parenthesis, true),
|
||||
(T!['{'], T!['}']) => (tt::Delimiter::Brace, true),
|
||||
(T!['['], T![']']) => (tt::Delimiter::Bracket, true),
|
||||
_ => (tt::Delimiter::None, false),
|
||||
};
|
||||
|
||||
let mut token_trees = Vec::new();
|
||||
let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable();
|
||||
|
||||
while let Some(child) = child_iter.next() {
|
||||
if skip_first && (child == first_child || child == last_child) {
|
||||
continue;
|
||||
impl Convertor {
|
||||
fn go(&mut self, tt: &SyntaxNode) -> Option<tt::Subtree> {
|
||||
// This tree is empty
|
||||
if tt.first_child_or_token().is_none() {
|
||||
return Some(tt::Subtree { token_trees: vec![], delimiter: tt::Delimiter::None });
|
||||
}
|
||||
|
||||
match child {
|
||||
NodeOrToken::Token(token) => {
|
||||
if let Some(doc_tokens) = convert_doc_comment(&token) {
|
||||
token_trees.extend(doc_tokens);
|
||||
} else if token.kind().is_trivia() {
|
||||
continue;
|
||||
} else if token.kind().is_punct() {
|
||||
assert!(token.text().len() == 1, "Input ast::token punct must be single char.");
|
||||
let char = token.text().chars().next().unwrap();
|
||||
let first_child = tt.first_child_or_token()?;
|
||||
let last_child = tt.last_child_or_token()?;
|
||||
let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) {
|
||||
(T!['('], T![')']) => (tt::Delimiter::Parenthesis, true),
|
||||
(T!['{'], T!['}']) => (tt::Delimiter::Brace, true),
|
||||
(T!['['], T![']']) => (tt::Delimiter::Bracket, true),
|
||||
_ => (tt::Delimiter::None, false),
|
||||
};
|
||||
|
||||
let spacing = match child_iter.peek() {
|
||||
Some(NodeOrToken::Token(token)) => {
|
||||
if token.kind().is_punct() {
|
||||
tt::Spacing::Joint
|
||||
} else {
|
||||
tt::Spacing::Alone
|
||||
let mut token_trees = Vec::new();
|
||||
let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable();
|
||||
|
||||
while let Some(child) = child_iter.next() {
|
||||
if skip_first && (child == first_child || child == last_child) {
|
||||
continue;
|
||||
}
|
||||
|
||||
match child {
|
||||
NodeOrToken::Token(token) => {
|
||||
if let Some(doc_tokens) = convert_doc_comment(&token) {
|
||||
token_trees.extend(doc_tokens);
|
||||
} else if token.kind().is_trivia() {
|
||||
continue;
|
||||
} else if token.kind().is_punct() {
|
||||
assert!(
|
||||
token.text().len() == 1,
|
||||
"Input ast::token punct must be single char."
|
||||
);
|
||||
let char = token.text().chars().next().unwrap();
|
||||
|
||||
let spacing = match child_iter.peek() {
|
||||
Some(NodeOrToken::Token(token)) => {
|
||||
if token.kind().is_punct() {
|
||||
tt::Spacing::Joint
|
||||
} else {
|
||||
tt::Spacing::Alone
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => tt::Spacing::Alone,
|
||||
};
|
||||
|
||||
token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into());
|
||||
} else {
|
||||
let child: tt::TokenTree =
|
||||
if token.kind() == T![true] || token.kind() == T![false] {
|
||||
tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
|
||||
} else if token.kind().is_keyword()
|
||||
|| token.kind() == IDENT
|
||||
|| token.kind() == LIFETIME
|
||||
{
|
||||
let relative_range = token.text_range() - global_offset;
|
||||
let id = token_map.alloc(relative_range);
|
||||
let text = token.text().clone();
|
||||
tt::Leaf::from(tt::Ident { text, id }).into()
|
||||
} else if token.kind().is_literal() {
|
||||
tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
|
||||
} else {
|
||||
return None;
|
||||
_ => tt::Spacing::Alone,
|
||||
};
|
||||
|
||||
token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into());
|
||||
} else {
|
||||
let child: tt::TokenTree =
|
||||
if token.kind() == T![true] || token.kind() == T![false] {
|
||||
tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
|
||||
} else if token.kind().is_keyword()
|
||||
|| token.kind() == IDENT
|
||||
|| token.kind() == LIFETIME
|
||||
{
|
||||
let id = self.alloc(token.text_range());
|
||||
let text = token.text().clone();
|
||||
tt::Leaf::from(tt::Ident { text, id }).into()
|
||||
} else if token.kind().is_literal() {
|
||||
tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
token_trees.push(child);
|
||||
}
|
||||
}
|
||||
NodeOrToken::Node(node) => {
|
||||
let child = self.go(&node)?.into();
|
||||
token_trees.push(child);
|
||||
}
|
||||
}
|
||||
NodeOrToken::Node(node) => {
|
||||
let child = convert_tt(token_map, global_offset, &node)?.into();
|
||||
token_trees.push(child);
|
||||
}
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
let res = tt::Subtree { delimiter, token_trees };
|
||||
Some(res)
|
||||
}
|
||||
|
||||
let res = tt::Subtree { delimiter, token_trees };
|
||||
Some(res)
|
||||
fn alloc(&mut self, absolute_range: TextRange) -> tt::TokenId {
|
||||
let relative_range = absolute_range - self.global_offset;
|
||||
let token_id = tt::TokenId(self.next_id);
|
||||
self.next_id += 1;
|
||||
self.map.insert(token_id, relative_range);
|
||||
token_id
|
||||
}
|
||||
}
|
||||
|
||||
struct TtTreeSink<'a> {
|
||||
|
@ -256,7 +244,7 @@ struct TtTreeSink<'a> {
|
|||
cursor: Cursor<'a>,
|
||||
text_pos: TextUnit,
|
||||
inner: SyntaxTreeBuilder,
|
||||
range_map: RevTokenMap,
|
||||
token_map: TokenMap,
|
||||
|
||||
// Number of roots
|
||||
// Use for detect ill-form tree which is not single root
|
||||
|
@ -271,12 +259,12 @@ impl<'a> TtTreeSink<'a> {
|
|||
text_pos: 0.into(),
|
||||
inner: SyntaxTreeBuilder::default(),
|
||||
roots: smallvec::SmallVec::new(),
|
||||
range_map: RevTokenMap::default(),
|
||||
token_map: TokenMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(self) -> (Parse<SyntaxNode>, RevTokenMap) {
|
||||
(self.inner.finish(), self.range_map)
|
||||
fn finish(self) -> (Parse<SyntaxNode>, TokenMap) {
|
||||
(self.inner.finish(), self.token_map)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -312,7 +300,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
|
|||
if kind == IDENT {
|
||||
let range =
|
||||
TextRange::offset_len(self.text_pos, TextUnit::of_str(&ident.text));
|
||||
self.range_map.add(range, ident.id);
|
||||
self.token_map.insert(ident.id, range);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue