mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-11 20:58:54 +00:00
Upgrade rowan
Notably, new rowan comes with support for mutable syntax trees.
This commit is contained in:
parent
62ec04bbd5
commit
f5a81ec468
41 changed files with 376 additions and 176 deletions
6
Cargo.lock
generated
6
Cargo.lock
generated
|
@ -1,7 +1,5 @@
|
||||||
# This file is automatically @generated by Cargo.
|
# This file is automatically @generated by Cargo.
|
||||||
# It is not intended for manual editing.
|
# It is not intended for manual editing.
|
||||||
version = 3
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "addr2line"
|
name = "addr2line"
|
||||||
version = "0.14.1"
|
version = "0.14.1"
|
||||||
|
@ -1326,9 +1324,9 @@ checksum = "b5eb417147ba9860a96cfe72a0b93bf88fee1744b5636ec99ab20c1aa9376581"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rowan"
|
name = "rowan"
|
||||||
version = "0.12.6"
|
version = "0.13.0-pre.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a1b36e449f3702f3b0c821411db1cbdf30fb451726a9456dce5dabcd44420043"
|
checksum = "8f300be7fa17c3fa563d2bc6ab5b1a8d5163162f9111599eda4f86a563714724"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"countme",
|
"countme",
|
||||||
"hashbrown",
|
"hashbrown",
|
||||||
|
|
|
@ -143,6 +143,12 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
||||||
self.imp.diagnostics_display_range(diagnostics)
|
self.imp.diagnostics_display_range(diagnostics)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn token_ancestors_with_macros(
|
||||||
|
&self,
|
||||||
|
token: SyntaxToken,
|
||||||
|
) -> impl Iterator<Item = SyntaxNode> + '_ {
|
||||||
|
token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
|
||||||
|
}
|
||||||
pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
|
pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
|
||||||
self.imp.ancestors_with_macros(node)
|
self.imp.ancestors_with_macros(node)
|
||||||
}
|
}
|
||||||
|
@ -270,8 +276,8 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
||||||
self.imp.scope(node)
|
self.imp.scope(node)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
|
pub fn scope_at_offset(&self, token: &SyntaxToken, offset: TextSize) -> SemanticsScope<'db> {
|
||||||
self.imp.scope_at_offset(node, offset)
|
self.imp.scope_at_offset(&token.parent().unwrap(), offset)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
|
pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
|
||||||
|
@ -341,7 +347,10 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
|
|
||||||
fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
|
fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
|
||||||
let _p = profile::span("descend_into_macros");
|
let _p = profile::span("descend_into_macros");
|
||||||
let parent = token.parent();
|
let parent = match token.parent() {
|
||||||
|
Some(it) => it,
|
||||||
|
None => return token,
|
||||||
|
};
|
||||||
let sa = self.analyze(&parent);
|
let sa = self.analyze(&parent);
|
||||||
|
|
||||||
let token = successors(Some(InFile::new(sa.file_id, token)), |token| {
|
let token = successors(Some(InFile::new(sa.file_id, token)), |token| {
|
||||||
|
@ -360,7 +369,9 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
.as_ref()?
|
.as_ref()?
|
||||||
.map_token_down(token.as_ref())?;
|
.map_token_down(token.as_ref())?;
|
||||||
|
|
||||||
self.cache(find_root(&token.value.parent()), token.file_id);
|
if let Some(parent) = token.value.parent() {
|
||||||
|
self.cache(find_root(&parent), token.file_id);
|
||||||
|
}
|
||||||
|
|
||||||
Some(token)
|
Some(token)
|
||||||
})
|
})
|
||||||
|
@ -378,7 +389,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
// Handle macro token cases
|
// Handle macro token cases
|
||||||
node.token_at_offset(offset)
|
node.token_at_offset(offset)
|
||||||
.map(|token| self.descend_into_macros(token))
|
.map(|token| self.descend_into_macros(token))
|
||||||
.map(|it| self.ancestors_with_macros(it.parent()))
|
.map(|it| self.token_ancestors_with_macros(it))
|
||||||
.flatten()
|
.flatten()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -394,6 +405,13 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
src.with_value(&node).original_file_range(self.db.upcast())
|
src.with_value(&node).original_file_range(self.db.upcast())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn token_ancestors_with_macros(
|
||||||
|
&self,
|
||||||
|
token: SyntaxToken,
|
||||||
|
) -> impl Iterator<Item = SyntaxNode> + '_ {
|
||||||
|
token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent))
|
||||||
|
}
|
||||||
|
|
||||||
fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
|
fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
|
||||||
let node = self.find_file(node);
|
let node = self.find_file(node);
|
||||||
node.ancestors_with_macros(self.db.upcast()).map(|it| it.value)
|
node.ancestors_with_macros(self.db.upcast()).map(|it| it.value)
|
||||||
|
@ -405,7 +423,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
offset: TextSize,
|
offset: TextSize,
|
||||||
) -> impl Iterator<Item = SyntaxNode> + '_ {
|
) -> impl Iterator<Item = SyntaxNode> + '_ {
|
||||||
node.token_at_offset(offset)
|
node.token_at_offset(offset)
|
||||||
.map(|token| self.ancestors_with_macros(token.parent()))
|
.map(|token| self.token_ancestors_with_macros(token))
|
||||||
.kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
|
.kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -510,7 +510,10 @@ impl InFile<SyntaxToken> {
|
||||||
self,
|
self,
|
||||||
db: &dyn db::AstDatabase,
|
db: &dyn db::AstDatabase,
|
||||||
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
|
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
|
||||||
self.map(|it| it.parent()).ancestors_with_macros(db)
|
self.value
|
||||||
|
.parent()
|
||||||
|
.into_iter()
|
||||||
|
.flat_map(move |parent| InFile::new(self.file_id, parent).ancestors_with_macros(db))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -53,10 +53,8 @@ pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Optio
|
||||||
for (r_range, _) in references {
|
for (r_range, _) in references {
|
||||||
let token = file.token_at_offset(r_range.start()).next()?;
|
let token = file.token_at_offset(r_range.start()).next()?;
|
||||||
let token = sema.descend_into_macros(token);
|
let token = sema.descend_into_macros(token);
|
||||||
let syntax = token.parent();
|
|
||||||
|
|
||||||
// This target is the containing function
|
// This target is the containing function
|
||||||
if let Some(nav) = syntax.ancestors().find_map(|node| {
|
if let Some(nav) = token.ancestors().find_map(|node| {
|
||||||
let fn_ = ast::Fn::cast(node)?;
|
let fn_ = ast::Fn::cast(node)?;
|
||||||
let def = sema.to_def(&fn_)?;
|
let def = sema.to_def(&fn_)?;
|
||||||
def.try_to_nav(sema.db)
|
def.try_to_nav(sema.db)
|
||||||
|
@ -77,12 +75,13 @@ pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Optio
|
||||||
let file = file.syntax();
|
let file = file.syntax();
|
||||||
let token = file.token_at_offset(position.offset).next()?;
|
let token = file.token_at_offset(position.offset).next()?;
|
||||||
let token = sema.descend_into_macros(token);
|
let token = sema.descend_into_macros(token);
|
||||||
let syntax = token.parent();
|
|
||||||
|
|
||||||
let mut calls = CallLocations::default();
|
let mut calls = CallLocations::default();
|
||||||
|
|
||||||
syntax
|
token
|
||||||
.descendants()
|
.parent()
|
||||||
|
.into_iter()
|
||||||
|
.flat_map(|it| it.descendants())
|
||||||
.filter_map(|node| FnCallNode::with_node_exact(&node))
|
.filter_map(|node| FnCallNode::with_node_exact(&node))
|
||||||
.filter_map(|call_node| {
|
.filter_map(|call_node| {
|
||||||
let name_ref = call_node.name_ref()?;
|
let name_ref = call_node.name_ref()?;
|
||||||
|
|
|
@ -279,7 +279,7 @@ pub(crate) fn external_docs(
|
||||||
let token = pick_best(file.token_at_offset(position.offset))?;
|
let token = pick_best(file.token_at_offset(position.offset))?;
|
||||||
let token = sema.descend_into_macros(token);
|
let token = sema.descend_into_macros(token);
|
||||||
|
|
||||||
let node = token.parent();
|
let node = token.parent()?;
|
||||||
let definition = match_ast! {
|
let definition = match_ast! {
|
||||||
match node {
|
match node {
|
||||||
ast::NameRef(name_ref) => NameRefClass::classify(&sema, &name_ref).map(|d| d.referenced(sema.db)),
|
ast::NameRef(name_ref) => NameRefClass::classify(&sema, &name_ref).map(|d| d.referenced(sema.db)),
|
||||||
|
|
|
@ -88,7 +88,7 @@ fn try_extend_selection(
|
||||||
return Some(range);
|
return Some(range);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
token.parent()
|
token.parent()?
|
||||||
}
|
}
|
||||||
NodeOrToken::Node(node) => node,
|
NodeOrToken::Node(node) => node,
|
||||||
};
|
};
|
||||||
|
@ -142,7 +142,8 @@ fn extend_tokens_from_range(
|
||||||
let extended = {
|
let extended = {
|
||||||
let fst_expanded = sema.descend_into_macros(first_token.clone());
|
let fst_expanded = sema.descend_into_macros(first_token.clone());
|
||||||
let lst_expanded = sema.descend_into_macros(last_token.clone());
|
let lst_expanded = sema.descend_into_macros(last_token.clone());
|
||||||
let mut lca = algo::least_common_ancestor(&fst_expanded.parent(), &lst_expanded.parent())?;
|
let mut lca =
|
||||||
|
algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?;
|
||||||
lca = shallowest_node(&lca);
|
lca = shallowest_node(&lca);
|
||||||
if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) {
|
if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) {
|
||||||
lca = lca.parent()?;
|
lca = lca.parent()?;
|
||||||
|
@ -151,9 +152,13 @@ fn extend_tokens_from_range(
|
||||||
};
|
};
|
||||||
|
|
||||||
// Compute parent node range
|
// Compute parent node range
|
||||||
let validate = |token: &SyntaxToken| {
|
let validate = |token: &SyntaxToken| -> bool {
|
||||||
let expanded = sema.descend_into_macros(token.clone());
|
let expanded = sema.descend_into_macros(token.clone());
|
||||||
algo::least_common_ancestor(&extended, &expanded.parent()).as_ref() == Some(&extended)
|
let parent = match expanded.parent() {
|
||||||
|
Some(it) => it,
|
||||||
|
None => return false,
|
||||||
|
};
|
||||||
|
algo::least_common_ancestor(&extended, &parent).as_ref() == Some(&extended)
|
||||||
};
|
};
|
||||||
|
|
||||||
// Find the first and last text range under expanded parent
|
// Find the first and last text range under expanded parent
|
||||||
|
|
|
@ -30,7 +30,7 @@ pub(crate) fn goto_definition(
|
||||||
let file = sema.parse(position.file_id).syntax().clone();
|
let file = sema.parse(position.file_id).syntax().clone();
|
||||||
let original_token = pick_best(file.token_at_offset(position.offset))?;
|
let original_token = pick_best(file.token_at_offset(position.offset))?;
|
||||||
let token = sema.descend_into_macros(original_token.clone());
|
let token = sema.descend_into_macros(original_token.clone());
|
||||||
let parent = token.parent();
|
let parent = token.parent()?;
|
||||||
if let Some(comment) = ast::Comment::cast(token) {
|
if let Some(comment) = ast::Comment::cast(token) {
|
||||||
let nav = def_for_doc_comment(&sema, position, &comment)?.try_to_nav(db)?;
|
let nav = def_for_doc_comment(&sema, position, &comment)?.try_to_nav(db)?;
|
||||||
return Some(RangeInfo::new(original_token.text_range(), vec![nav]));
|
return Some(RangeInfo::new(original_token.text_range(), vec![nav]));
|
||||||
|
@ -63,7 +63,7 @@ fn def_for_doc_comment(
|
||||||
position: FilePosition,
|
position: FilePosition,
|
||||||
doc_comment: &ast::Comment,
|
doc_comment: &ast::Comment,
|
||||||
) -> Option<hir::ModuleDef> {
|
) -> Option<hir::ModuleDef> {
|
||||||
let parent = doc_comment.syntax().parent();
|
let parent = doc_comment.syntax().parent()?;
|
||||||
let (link, ns) = extract_positioned_link_from_comment(position, doc_comment)?;
|
let (link, ns) = extract_positioned_link_from_comment(position, doc_comment)?;
|
||||||
|
|
||||||
let def = doc_owner_to_def(sema, parent)?;
|
let def = doc_owner_to_def(sema, parent)?;
|
||||||
|
|
|
@ -22,7 +22,7 @@ pub(crate) fn goto_type_definition(
|
||||||
let token: SyntaxToken = pick_best(file.syntax().token_at_offset(position.offset))?;
|
let token: SyntaxToken = pick_best(file.syntax().token_at_offset(position.offset))?;
|
||||||
let token: SyntaxToken = sema.descend_into_macros(token);
|
let token: SyntaxToken = sema.descend_into_macros(token);
|
||||||
|
|
||||||
let (ty, node) = sema.ancestors_with_macros(token.parent()).find_map(|node| {
|
let (ty, node) = sema.token_ancestors_with_macros(token).find_map(|node| {
|
||||||
let ty = match_ast! {
|
let ty = match_ast! {
|
||||||
match node {
|
match node {
|
||||||
ast::Expr(it) => sema.type_of_expr(&it)?,
|
ast::Expr(it) => sema.type_of_expr(&it)?,
|
||||||
|
|
|
@ -92,7 +92,7 @@ pub(crate) fn hover(
|
||||||
|
|
||||||
let mut res = HoverResult::default();
|
let mut res = HoverResult::default();
|
||||||
|
|
||||||
let node = token.parent();
|
let node = token.parent()?;
|
||||||
let definition = match_ast! {
|
let definition = match_ast! {
|
||||||
match node {
|
match node {
|
||||||
// we don't use NameClass::referenced_or_defined here as we do not want to resolve
|
// we don't use NameClass::referenced_or_defined here as we do not want to resolve
|
||||||
|
@ -438,7 +438,7 @@ fn hover_for_keyword(
|
||||||
if !token.kind().is_keyword() {
|
if !token.kind().is_keyword() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let famous_defs = FamousDefs(&sema, sema.scope(&token.parent()).krate());
|
let famous_defs = FamousDefs(&sema, sema.scope(&token.parent()?).krate());
|
||||||
// std exposes {}_keyword modules with docstrings on the root to document keywords
|
// std exposes {}_keyword modules with docstrings on the root to document keywords
|
||||||
let keyword_mod = format!("{}_keyword", token.text());
|
let keyword_mod = format!("{}_keyword", token.text());
|
||||||
let doc_owner = find_std_module(&famous_defs, &keyword_mod)?;
|
let doc_owner = find_std_module(&famous_defs, &keyword_mod)?;
|
||||||
|
|
|
@ -32,29 +32,35 @@ pub(crate) fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
|
||||||
range
|
range
|
||||||
};
|
};
|
||||||
|
|
||||||
let node = match file.syntax().covering_element(range) {
|
|
||||||
NodeOrToken::Node(node) => node,
|
|
||||||
NodeOrToken::Token(token) => token.parent(),
|
|
||||||
};
|
|
||||||
let mut edit = TextEdit::builder();
|
let mut edit = TextEdit::builder();
|
||||||
for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) {
|
match file.syntax().covering_element(range) {
|
||||||
let range = match range.intersect(token.text_range()) {
|
NodeOrToken::Node(node) => {
|
||||||
Some(range) => range,
|
for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) {
|
||||||
None => continue,
|
remove_newlines(&mut edit, &token, range)
|
||||||
} - token.text_range().start();
|
|
||||||
let text = token.text();
|
|
||||||
for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') {
|
|
||||||
let pos: TextSize = (pos as u32).into();
|
|
||||||
let offset = token.text_range().start() + range.start() + pos;
|
|
||||||
if !edit.invalidates_offset(offset) {
|
|
||||||
remove_newline(&mut edit, &token, offset);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
NodeOrToken::Token(token) => remove_newlines(&mut edit, &token, range),
|
||||||
|
};
|
||||||
edit.finish()
|
edit.finish()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn remove_newlines(edit: &mut TextEditBuilder, token: &SyntaxToken, range: TextRange) {
|
||||||
|
let intersection = match range.intersect(token.text_range()) {
|
||||||
|
Some(range) => range,
|
||||||
|
None => return,
|
||||||
|
};
|
||||||
|
|
||||||
|
let range = intersection - token.text_range().start();
|
||||||
|
let text = token.text();
|
||||||
|
for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') {
|
||||||
|
let pos: TextSize = (pos as u32).into();
|
||||||
|
let offset = token.text_range().start() + range.start() + pos;
|
||||||
|
if !edit.invalidates_offset(offset) {
|
||||||
|
remove_newline(edit, &token, offset);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextSize) {
|
fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextSize) {
|
||||||
if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 {
|
if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 {
|
||||||
let mut string_open_quote = false;
|
let mut string_open_quote = false;
|
||||||
|
@ -148,7 +154,7 @@ fn has_comma_after(node: &SyntaxNode) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
|
fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
|
||||||
let block_expr = ast::BlockExpr::cast(token.parent())?;
|
let block_expr = ast::BlockExpr::cast(token.parent()?)?;
|
||||||
if !block_expr.is_standalone() {
|
if !block_expr.is_standalone() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
@ -170,7 +176,7 @@ fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Op
|
||||||
}
|
}
|
||||||
|
|
||||||
fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
|
fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
|
||||||
let use_tree_list = ast::UseTreeList::cast(token.parent())?;
|
let use_tree_list = ast::UseTreeList::cast(token.parent()?)?;
|
||||||
let (tree,) = use_tree_list.use_trees().collect_tuple()?;
|
let (tree,) = use_tree_list.use_trees().collect_tuple()?;
|
||||||
edit.replace(use_tree_list.syntax().text_range(), tree.syntax().text().to_string());
|
edit.replace(use_tree_list.syntax().text_range(), tree.syntax().text().to_string());
|
||||||
Some(())
|
Some(())
|
||||||
|
|
|
@ -25,7 +25,7 @@ pub(crate) fn matching_brace(file: &SourceFile, offset: TextSize) -> Option<Text
|
||||||
Some((node, idx))
|
Some((node, idx))
|
||||||
})
|
})
|
||||||
.next()?;
|
.next()?;
|
||||||
let parent = brace_token.parent();
|
let parent = brace_token.parent()?;
|
||||||
if brace_token.kind() == T![|] && !ast::ParamList::can_cast(parent.kind()) {
|
if brace_token.kind() == T![|] && !ast::ParamList::can_cast(parent.kind()) {
|
||||||
cov_mark::hit!(pipes_not_braces);
|
cov_mark::hit!(pipes_not_braces);
|
||||||
return None;
|
return None;
|
||||||
|
|
|
@ -148,14 +148,15 @@ fn decl_access(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> Optio
|
||||||
|
|
||||||
fn get_name_of_item_declaration(syntax: &SyntaxNode, position: FilePosition) -> Option<ast::Name> {
|
fn get_name_of_item_declaration(syntax: &SyntaxNode, position: FilePosition) -> Option<ast::Name> {
|
||||||
let token = syntax.token_at_offset(position.offset).right_biased()?;
|
let token = syntax.token_at_offset(position.offset).right_biased()?;
|
||||||
|
let token_parent = token.parent()?;
|
||||||
let kind = token.kind();
|
let kind = token.kind();
|
||||||
if kind == T![;] {
|
if kind == T![;] {
|
||||||
ast::Struct::cast(token.parent())
|
ast::Struct::cast(token_parent)
|
||||||
.filter(|struct_| struct_.field_list().is_none())
|
.filter(|struct_| struct_.field_list().is_none())
|
||||||
.and_then(|struct_| struct_.name())
|
.and_then(|struct_| struct_.name())
|
||||||
} else if kind == T!['{'] {
|
} else if kind == T!['{'] {
|
||||||
match_ast! {
|
match_ast! {
|
||||||
match (token.parent()) {
|
match token_parent {
|
||||||
ast::RecordFieldList(rfl) => match_ast! {
|
ast::RecordFieldList(rfl) => match_ast! {
|
||||||
match (rfl.syntax().parent()?) {
|
match (rfl.syntax().parent()?) {
|
||||||
ast::Variant(it) => it.name(),
|
ast::Variant(it) => it.name(),
|
||||||
|
@ -169,7 +170,7 @@ fn get_name_of_item_declaration(syntax: &SyntaxNode, position: FilePosition) ->
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if kind == T!['('] {
|
} else if kind == T!['('] {
|
||||||
let tfl = ast::TupleFieldList::cast(token.parent())?;
|
let tfl = ast::TupleFieldList::cast(token_parent)?;
|
||||||
match_ast! {
|
match_ast! {
|
||||||
match (tfl.syntax().parent()?) {
|
match (tfl.syntax().parent()?) {
|
||||||
ast::Variant(it) => it.name(),
|
ast::Variant(it) => it.name(),
|
||||||
|
|
|
@ -167,8 +167,7 @@ fn find_related_tests(
|
||||||
let functions = refs.iter().filter_map(|(range, _)| {
|
let functions = refs.iter().filter_map(|(range, _)| {
|
||||||
let token = file.token_at_offset(range.start()).next()?;
|
let token = file.token_at_offset(range.start()).next()?;
|
||||||
let token = sema.descend_into_macros(token);
|
let token = sema.descend_into_macros(token);
|
||||||
let syntax = token.parent();
|
token.ancestors().find_map(ast::Fn::cast)
|
||||||
syntax.ancestors().find_map(ast::Fn::cast)
|
|
||||||
});
|
});
|
||||||
|
|
||||||
for fn_def in functions {
|
for fn_def in functions {
|
||||||
|
|
|
@ -64,7 +64,7 @@ pub(crate) fn highlight(
|
||||||
Some(range) => {
|
Some(range) => {
|
||||||
let node = match source_file.syntax().covering_element(range) {
|
let node = match source_file.syntax().covering_element(range) {
|
||||||
NodeOrToken::Node(it) => it,
|
NodeOrToken::Node(it) => it,
|
||||||
NodeOrToken::Token(it) => it.parent(),
|
NodeOrToken::Token(it) => it.parent().unwrap(),
|
||||||
};
|
};
|
||||||
(node, range)
|
(node, range)
|
||||||
}
|
}
|
||||||
|
@ -167,16 +167,19 @@ fn traverse(
|
||||||
let element_to_highlight = if current_macro_call.is_some() && element.kind() != COMMENT {
|
let element_to_highlight = if current_macro_call.is_some() && element.kind() != COMMENT {
|
||||||
// Inside a macro -- expand it first
|
// Inside a macro -- expand it first
|
||||||
let token = match element.clone().into_token() {
|
let token = match element.clone().into_token() {
|
||||||
Some(it) if it.parent().kind() == TOKEN_TREE => it,
|
Some(it) if it.parent().map_or(false, |it| it.kind() == TOKEN_TREE) => it,
|
||||||
_ => continue,
|
_ => continue,
|
||||||
};
|
};
|
||||||
let token = sema.descend_into_macros(token.clone());
|
let token = sema.descend_into_macros(token.clone());
|
||||||
let parent = token.parent();
|
match token.parent() {
|
||||||
|
Some(parent) => {
|
||||||
// We only care Name and Name_ref
|
// We only care Name and Name_ref
|
||||||
match (token.kind(), parent.kind()) {
|
match (token.kind(), parent.kind()) {
|
||||||
(IDENT, NAME) | (IDENT, NAME_REF) => parent.into(),
|
(IDENT, NAME) | (IDENT, NAME_REF) => parent.into(),
|
||||||
_ => token.into(),
|
_ => token.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => token.into(),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
element.clone()
|
element.clone()
|
||||||
|
|
|
@ -28,7 +28,7 @@ pub(super) fn highlight_format_string(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_format_string(string: &ast::String) -> Option<()> {
|
fn is_format_string(string: &ast::String) -> Option<()> {
|
||||||
let parent = string.syntax().parent();
|
let parent = string.syntax().parent()?;
|
||||||
|
|
||||||
let name = parent.parent().and_then(ast::MacroCall::cast)?.path()?.segment()?.name_ref()?;
|
let name = parent.parent().and_then(ast::MacroCall::cast)?.path()?.segment()?.name_ref()?;
|
||||||
if !matches!(name.text(), "format_args" | "format_args_nl") {
|
if !matches!(name.text(), "format_args" | "format_args_nl") {
|
||||||
|
|
|
@ -27,7 +27,7 @@ pub(crate) fn syntax_tree(
|
||||||
if let Some(tree) = syntax_tree_for_string(&token, text_range) {
|
if let Some(tree) = syntax_tree_for_string(&token, text_range) {
|
||||||
return tree;
|
return tree;
|
||||||
}
|
}
|
||||||
token.parent()
|
token.parent().unwrap()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -108,7 +108,7 @@ fn on_dot_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
|
||||||
};
|
};
|
||||||
let current_indent_len = TextSize::of(current_indent);
|
let current_indent_len = TextSize::of(current_indent);
|
||||||
|
|
||||||
let parent = whitespace.syntax().parent();
|
let parent = whitespace.syntax().parent()?;
|
||||||
// Make sure dot is a part of call chain
|
// Make sure dot is a part of call chain
|
||||||
if !matches!(parent.kind(), FIELD_EXPR | METHOD_CALL_EXPR) {
|
if !matches!(parent.kind(), FIELD_EXPR | METHOD_CALL_EXPR) {
|
||||||
return None;
|
return None;
|
||||||
|
|
|
@ -38,7 +38,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<(
|
||||||
cov_mark::hit!(add_turbo_fish_one_fish_is_enough);
|
cov_mark::hit!(add_turbo_fish_one_fish_is_enough);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let name_ref = ast::NameRef::cast(ident.parent())?;
|
let name_ref = ast::NameRef::cast(ident.parent()?)?;
|
||||||
let def = match NameRefClass::classify(&ctx.sema, &name_ref)? {
|
let def = match NameRefClass::classify(&ctx.sema, &name_ref)? {
|
||||||
NameRefClass::Definition(def) => def,
|
NameRefClass::Definition(def) => def,
|
||||||
NameRefClass::ExternCrate(_) | NameRefClass::FieldShorthand { .. } => return None,
|
NameRefClass::ExternCrate(_) | NameRefClass::FieldShorthand { .. } => return None,
|
||||||
|
|
|
@ -41,7 +41,7 @@ fn add_vis(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||||
});
|
});
|
||||||
|
|
||||||
let (offset, target) = if let Some(keyword) = item_keyword {
|
let (offset, target) = if let Some(keyword) = item_keyword {
|
||||||
let parent = keyword.parent();
|
let parent = keyword.parent()?;
|
||||||
let def_kws = vec![CONST, STATIC, TYPE_ALIAS, FN, MODULE, STRUCT, ENUM, TRAIT];
|
let def_kws = vec![CONST, STATIC, TYPE_ALIAS, FN, MODULE, STRUCT, ENUM, TRAIT];
|
||||||
// Parent is not a definition, can't add visibility
|
// Parent is not a definition, can't add visibility
|
||||||
if !def_kws.iter().any(|&def_kw| def_kw == parent.kind()) {
|
if !def_kws.iter().any(|&def_kw| def_kw == parent.kind()) {
|
||||||
|
|
|
@ -48,7 +48,7 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Opti
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let current_scope = ctx.sema.scope(&star.parent());
|
let current_scope = ctx.sema.scope(&star.parent()?);
|
||||||
let current_module = current_scope.module()?;
|
let current_module = current_scope.module()?;
|
||||||
|
|
||||||
let refs_in_target = find_refs_in_mod(ctx, target_module, Some(current_module))?;
|
let refs_in_target = find_refs_in_mod(ctx, target_module, Some(current_module))?;
|
||||||
|
|
|
@ -16,7 +16,6 @@ use syntax::{
|
||||||
edit::{AstNodeEdit, IndentLevel},
|
edit::{AstNodeEdit, IndentLevel},
|
||||||
AstNode,
|
AstNode,
|
||||||
},
|
},
|
||||||
SyntaxElement,
|
|
||||||
SyntaxKind::{self, BLOCK_EXPR, BREAK_EXPR, COMMENT, PATH_EXPR, RETURN_EXPR},
|
SyntaxKind::{self, BLOCK_EXPR, BREAK_EXPR, COMMENT, PATH_EXPR, RETURN_EXPR},
|
||||||
SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, WalkEvent, T,
|
SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, WalkEvent, T,
|
||||||
};
|
};
|
||||||
|
@ -62,7 +61,10 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext) -> Option
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let node = element_to_node(node);
|
let node = match node {
|
||||||
|
syntax::NodeOrToken::Node(n) => n,
|
||||||
|
syntax::NodeOrToken::Token(t) => t.parent()?,
|
||||||
|
};
|
||||||
|
|
||||||
let body = extraction_target(&node, ctx.frange.range)?;
|
let body = extraction_target(&node, ctx.frange.range)?;
|
||||||
|
|
||||||
|
@ -560,14 +562,6 @@ impl HasTokenAtOffset for FunctionBody {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// node or token's parent
|
|
||||||
fn element_to_node(node: SyntaxElement) -> SyntaxNode {
|
|
||||||
match node {
|
|
||||||
syntax::NodeOrToken::Node(n) => n,
|
|
||||||
syntax::NodeOrToken::Token(t) => t.parent(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Try to guess what user wants to extract
|
/// Try to guess what user wants to extract
|
||||||
///
|
///
|
||||||
/// We have basically have two cases:
|
/// We have basically have two cases:
|
||||||
|
@ -1246,7 +1240,7 @@ fn make_body(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
FlowHandler::If { .. } => {
|
FlowHandler::If { .. } => {
|
||||||
let lit_false = ast::Literal::cast(make::tokens::literal("false").parent()).unwrap();
|
let lit_false = make::expr_literal("false");
|
||||||
with_tail_expr(block, lit_false.into())
|
with_tail_expr(block, lit_false.into())
|
||||||
}
|
}
|
||||||
FlowHandler::IfOption { .. } => {
|
FlowHandler::IfOption { .. } => {
|
||||||
|
@ -1420,9 +1414,7 @@ fn update_external_control_flow(handler: &FlowHandler, syntax: &SyntaxNode) -> S
|
||||||
fn make_rewritten_flow(handler: &FlowHandler, arg_expr: Option<ast::Expr>) -> Option<ast::Expr> {
|
fn make_rewritten_flow(handler: &FlowHandler, arg_expr: Option<ast::Expr>) -> Option<ast::Expr> {
|
||||||
let value = match handler {
|
let value = match handler {
|
||||||
FlowHandler::None | FlowHandler::Try { .. } => return None,
|
FlowHandler::None | FlowHandler::Try { .. } => return None,
|
||||||
FlowHandler::If { .. } => {
|
FlowHandler::If { .. } => make::expr_literal("true").into(),
|
||||||
ast::Literal::cast(make::tokens::literal("true").parent()).unwrap().into()
|
|
||||||
}
|
|
||||||
FlowHandler::IfOption { .. } => {
|
FlowHandler::IfOption { .. } => {
|
||||||
let expr = arg_expr.unwrap_or_else(|| make::expr_tuple(Vec::new()));
|
let expr = arg_expr.unwrap_or_else(|| make::expr_tuple(Vec::new()));
|
||||||
let args = make::arg_list(iter::once(expr));
|
let args = make::arg_list(iter::once(expr));
|
||||||
|
|
|
@ -23,7 +23,7 @@ pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext) -> Option
|
||||||
let plus = ctx.find_token_syntax_at_offset(T![+])?;
|
let plus = ctx.find_token_syntax_at_offset(T![+])?;
|
||||||
|
|
||||||
// Make sure we're in a `TypeBoundList`
|
// Make sure we're in a `TypeBoundList`
|
||||||
if ast::TypeBoundList::cast(plus.parent()).is_none() {
|
if ast::TypeBoundList::cast(plus.parent()?).is_none() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,7 @@ use crate::{
|
||||||
|
|
||||||
pub(crate) fn invert_if(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
pub(crate) fn invert_if(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||||
let if_keyword = ctx.find_token_syntax_at_offset(T![if])?;
|
let if_keyword = ctx.find_token_syntax_at_offset(T![if])?;
|
||||||
let expr = ast::IfExpr::cast(if_keyword.parent())?;
|
let expr = ast::IfExpr::cast(if_keyword.parent()?)?;
|
||||||
let if_range = if_keyword.text_range();
|
let if_range = if_keyword.text_range();
|
||||||
let cursor_in_range = if_range.contains_range(ctx.frange.range);
|
let cursor_in_range = if_range.contains_range(ctx.frange.range);
|
||||||
if !cursor_in_range {
|
if !cursor_in_range {
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, edit::AstNodeEdit, make, AstNode, NameOwner, TypeBoundsOwner},
|
ast::{self, edit_in_place::GenericParamsOwnerEdit, make, AstNode, NameOwner, TypeBoundsOwner},
|
||||||
match_ast,
|
match_ast,
|
||||||
SyntaxKind::*,
|
|
||||||
T,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||||
|
@ -23,7 +21,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||||
// }
|
// }
|
||||||
// ```
|
// ```
|
||||||
pub(crate) fn move_bounds_to_where_clause(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
pub(crate) fn move_bounds_to_where_clause(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||||
let type_param_list = ctx.find_node_at_offset::<ast::GenericParamList>()?;
|
let type_param_list = ctx.find_node_at_offset::<ast::GenericParamList>()?.clone_for_update();
|
||||||
|
|
||||||
let mut type_params = type_param_list.type_params();
|
let mut type_params = type_param_list.type_params();
|
||||||
if type_params.all(|p| p.type_bound_list().is_none()) {
|
if type_params.all(|p| p.type_bound_list().is_none()) {
|
||||||
|
@ -31,23 +29,7 @@ pub(crate) fn move_bounds_to_where_clause(acc: &mut Assists, ctx: &AssistContext
|
||||||
}
|
}
|
||||||
|
|
||||||
let parent = type_param_list.syntax().parent()?;
|
let parent = type_param_list.syntax().parent()?;
|
||||||
if parent.children_with_tokens().any(|it| it.kind() == WHERE_CLAUSE) {
|
let original_parent_range = parent.text_range();
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let anchor = match_ast! {
|
|
||||||
match parent {
|
|
||||||
ast::Fn(it) => it.body()?.syntax().clone().into(),
|
|
||||||
ast::Trait(it) => it.assoc_item_list()?.syntax().clone().into(),
|
|
||||||
ast::Impl(it) => it.assoc_item_list()?.syntax().clone().into(),
|
|
||||||
ast::Enum(it) => it.variant_list()?.syntax().clone().into(),
|
|
||||||
ast::Struct(it) => {
|
|
||||||
it.syntax().children_with_tokens()
|
|
||||||
.find(|it| it.kind() == RECORD_FIELD_LIST || it.kind() == T![;])?
|
|
||||||
},
|
|
||||||
_ => return None
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let target = type_param_list.syntax().text_range();
|
let target = type_param_list.syntax().text_range();
|
||||||
acc.add(
|
acc.add(
|
||||||
|
@ -55,29 +37,27 @@ pub(crate) fn move_bounds_to_where_clause(acc: &mut Assists, ctx: &AssistContext
|
||||||
"Move to where clause",
|
"Move to where clause",
|
||||||
target,
|
target,
|
||||||
|edit| {
|
|edit| {
|
||||||
let new_params = type_param_list
|
let where_clause: ast::WhereClause = match_ast! {
|
||||||
.type_params()
|
match parent {
|
||||||
.filter(|it| it.type_bound_list().is_some())
|
ast::Fn(it) => it.get_or_create_where_clause(),
|
||||||
.map(|type_param| {
|
// ast::Trait(it) => it.get_or_create_where_clause(),
|
||||||
let without_bounds = type_param.remove_bounds();
|
ast::Impl(it) => it.get_or_create_where_clause(),
|
||||||
(type_param, without_bounds)
|
// ast::Enum(it) => it.get_or_create_where_clause(),
|
||||||
});
|
ast::Struct(it) => it.get_or_create_where_clause(),
|
||||||
|
_ => return,
|
||||||
let new_type_param_list = type_param_list.replace_descendants(new_params);
|
|
||||||
edit.replace_ast(type_param_list.clone(), new_type_param_list);
|
|
||||||
|
|
||||||
let where_clause = {
|
|
||||||
let predicates = type_param_list.type_params().filter_map(build_predicate);
|
|
||||||
make::where_clause(predicates)
|
|
||||||
};
|
|
||||||
|
|
||||||
let to_insert = match anchor.prev_sibling_or_token() {
|
|
||||||
Some(ref elem) if elem.kind() == WHITESPACE => {
|
|
||||||
format!("{} ", where_clause.syntax())
|
|
||||||
}
|
}
|
||||||
_ => format!(" {}", where_clause.syntax()),
|
|
||||||
};
|
};
|
||||||
edit.insert(anchor.text_range().start(), to_insert);
|
|
||||||
|
for type_param in type_param_list.type_params() {
|
||||||
|
if let Some(tbl) = type_param.type_bound_list() {
|
||||||
|
if let Some(predicate) = build_predicate(type_param.clone()) {
|
||||||
|
where_clause.add_predicate(predicate.clone_for_update())
|
||||||
|
}
|
||||||
|
tbl.remove()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
edit.replace(original_parent_range, parent.to_string())
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||||
// ```
|
// ```
|
||||||
pub(crate) fn split_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
pub(crate) fn split_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||||
let colon_colon = ctx.find_token_syntax_at_offset(T![::])?;
|
let colon_colon = ctx.find_token_syntax_at_offset(T![::])?;
|
||||||
let path = ast::Path::cast(colon_colon.parent())?.qualifier()?;
|
let path = ast::Path::cast(colon_colon.parent()?)?.qualifier()?;
|
||||||
let top_path = successors(Some(path.clone()), |it| it.parent_path()).last()?;
|
let top_path = successors(Some(path.clone()), |it| it.parent_path()).last()?;
|
||||||
|
|
||||||
let use_tree = top_path.syntax().ancestors().find_map(ast::UseTree::cast)?;
|
let use_tree = top_path.syntax().ancestors().find_map(ast::UseTree::cast)?;
|
||||||
|
|
|
@ -30,7 +30,7 @@ pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
|
||||||
let assist_label = "Unwrap block";
|
let assist_label = "Unwrap block";
|
||||||
|
|
||||||
let l_curly_token = ctx.find_token_syntax_at_offset(T!['{'])?;
|
let l_curly_token = ctx.find_token_syntax_at_offset(T!['{'])?;
|
||||||
let mut block = ast::BlockExpr::cast(l_curly_token.parent())?;
|
let mut block = ast::BlockExpr::cast(l_curly_token.parent()?)?;
|
||||||
let target = block.syntax().text_range();
|
let target = block.syntax().text_range();
|
||||||
let mut parent = block.syntax().parent()?;
|
let mut parent = block.syntax().parent()?;
|
||||||
if ast::MatchArm::can_cast(parent.kind()) {
|
if ast::MatchArm::can_cast(parent.kind()) {
|
||||||
|
|
|
@ -33,7 +33,7 @@ pub(crate) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
for node in ctx.token.parent().ancestors() {
|
for node in ctx.token.ancestors() {
|
||||||
match_ast! {
|
match_ast! {
|
||||||
match node {
|
match node {
|
||||||
ast::SourceFile(it) => it.items().filter_map(|item| match item {
|
ast::SourceFile(it) => it.items().filter_map(|item| match item {
|
||||||
|
|
|
@ -82,13 +82,14 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext
|
||||||
|
|
||||||
fn completion_match(ctx: &CompletionContext) -> Option<(ImplCompletionKind, SyntaxNode, Impl)> {
|
fn completion_match(ctx: &CompletionContext) -> Option<(ImplCompletionKind, SyntaxNode, Impl)> {
|
||||||
let mut token = ctx.token.clone();
|
let mut token = ctx.token.clone();
|
||||||
// For keywork without name like `impl .. { fn $0 }`, the current position is inside
|
// For keyword without name like `impl .. { fn $0 }`, the current position is inside
|
||||||
// the whitespace token, which is outside `FN` syntax node.
|
// the whitespace token, which is outside `FN` syntax node.
|
||||||
// We need to follow the previous token in this case.
|
// We need to follow the previous token in this case.
|
||||||
if token.kind() == SyntaxKind::WHITESPACE {
|
if token.kind() == SyntaxKind::WHITESPACE {
|
||||||
token = token.prev_token()?;
|
token = token.prev_token()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let parent_kind = token.parent().map_or(SyntaxKind::EOF, |it| it.kind());
|
||||||
let impl_item_offset = match token.kind() {
|
let impl_item_offset = match token.kind() {
|
||||||
// `impl .. { const $0 }`
|
// `impl .. { const $0 }`
|
||||||
// ERROR 0
|
// ERROR 0
|
||||||
|
@ -102,14 +103,14 @@ fn completion_match(ctx: &CompletionContext) -> Option<(ImplCompletionKind, Synt
|
||||||
// FN/TYPE_ALIAS/CONST 1
|
// FN/TYPE_ALIAS/CONST 1
|
||||||
// NAME 0
|
// NAME 0
|
||||||
// IDENT <- *
|
// IDENT <- *
|
||||||
SyntaxKind::IDENT if token.parent().kind() == SyntaxKind::NAME => 1,
|
SyntaxKind::IDENT if parent_kind == SyntaxKind::NAME => 1,
|
||||||
// `impl .. { foo$0 }`
|
// `impl .. { foo$0 }`
|
||||||
// MACRO_CALL 3
|
// MACRO_CALL 3
|
||||||
// PATH 2
|
// PATH 2
|
||||||
// PATH_SEGMENT 1
|
// PATH_SEGMENT 1
|
||||||
// NAME_REF 0
|
// NAME_REF 0
|
||||||
// IDENT <- *
|
// IDENT <- *
|
||||||
SyntaxKind::IDENT if token.parent().kind() == SyntaxKind::NAME_REF => 3,
|
SyntaxKind::IDENT if parent_kind == SyntaxKind::NAME_REF => 3,
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -120,7 +120,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
let original_token =
|
let original_token =
|
||||||
original_file.syntax().token_at_offset(position.offset).left_biased()?;
|
original_file.syntax().token_at_offset(position.offset).left_biased()?;
|
||||||
let token = sema.descend_into_macros(original_token.clone());
|
let token = sema.descend_into_macros(original_token.clone());
|
||||||
let scope = sema.scope_at_offset(&token.parent(), position.offset);
|
let scope = sema.scope_at_offset(&token, position.offset);
|
||||||
let mut locals = vec![];
|
let mut locals = vec![];
|
||||||
scope.process_all_names(&mut |name, scope| {
|
scope.process_all_names(&mut |name, scope| {
|
||||||
if let ScopeDef::Local(local) = scope {
|
if let ScopeDef::Local(local) = scope {
|
||||||
|
@ -281,7 +281,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
fn fill_impl_def(&mut self) {
|
fn fill_impl_def(&mut self) {
|
||||||
self.impl_def = self
|
self.impl_def = self
|
||||||
.sema
|
.sema
|
||||||
.ancestors_with_macros(self.token.parent())
|
.token_ancestors_with_macros(self.token.clone())
|
||||||
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
|
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
|
||||||
.find_map(ast::Impl::cast);
|
.find_map(ast::Impl::cast);
|
||||||
}
|
}
|
||||||
|
@ -293,7 +293,10 @@ impl<'a> CompletionContext<'a> {
|
||||||
offset: TextSize,
|
offset: TextSize,
|
||||||
) {
|
) {
|
||||||
let expected = {
|
let expected = {
|
||||||
let mut node = self.token.parent();
|
let mut node = match self.token.parent() {
|
||||||
|
Some(it) => it,
|
||||||
|
None => return,
|
||||||
|
};
|
||||||
loop {
|
loop {
|
||||||
let ret = match_ast! {
|
let ret = match_ast! {
|
||||||
match node {
|
match node {
|
||||||
|
@ -474,17 +477,17 @@ impl<'a> CompletionContext<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
self.use_item_syntax =
|
self.use_item_syntax =
|
||||||
self.sema.ancestors_with_macros(self.token.parent()).find_map(ast::Use::cast);
|
self.sema.token_ancestors_with_macros(self.token.clone()).find_map(ast::Use::cast);
|
||||||
|
|
||||||
self.function_syntax = self
|
self.function_syntax = self
|
||||||
.sema
|
.sema
|
||||||
.ancestors_with_macros(self.token.parent())
|
.token_ancestors_with_macros(self.token.clone())
|
||||||
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
|
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
|
||||||
.find_map(ast::Fn::cast);
|
.find_map(ast::Fn::cast);
|
||||||
|
|
||||||
self.record_field_syntax = self
|
self.record_field_syntax = self
|
||||||
.sema
|
.sema
|
||||||
.ancestors_with_macros(self.token.parent())
|
.token_ancestors_with_macros(self.token.clone())
|
||||||
.take_while(|it| {
|
.take_while(|it| {
|
||||||
it.kind() != SOURCE_FILE && it.kind() != MODULE && it.kind() != CALL_EXPR
|
it.kind() != SOURCE_FILE && it.kind() != MODULE && it.kind() != CALL_EXPR
|
||||||
})
|
})
|
||||||
|
|
|
@ -184,11 +184,7 @@ fn test_has_impl_as_prev_sibling() {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn is_in_loop_body(element: SyntaxElement) -> bool {
|
pub(crate) fn is_in_loop_body(element: SyntaxElement) -> bool {
|
||||||
let leaf = match element {
|
for node in element.ancestors() {
|
||||||
NodeOrToken::Node(node) => node,
|
|
||||||
NodeOrToken::Token(token) => token.parent(),
|
|
||||||
};
|
|
||||||
for node in leaf.ancestors() {
|
|
||||||
if node.kind() == FN || node.kind() == CLOSURE_EXPR {
|
if node.kind() == FN || node.kind() == CLOSURE_EXPR {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -201,7 +197,7 @@ pub(crate) fn is_in_loop_body(element: SyntaxElement) -> bool {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if let Some(body) = loop_body {
|
if let Some(body) = loop_body {
|
||||||
if body.syntax().text_range().contains_range(leaf.text_range()) {
|
if body.syntax().text_range().contains_range(element.text_range()) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -235,12 +231,8 @@ fn previous_sibling_or_ancestor_sibling(element: SyntaxElement) -> Option<Syntax
|
||||||
Some(sibling)
|
Some(sibling)
|
||||||
} else {
|
} else {
|
||||||
// if not trying to find first ancestor which has such a sibling
|
// if not trying to find first ancestor which has such a sibling
|
||||||
let node = match element {
|
let range = element.text_range();
|
||||||
NodeOrToken::Node(node) => node,
|
let top_node = element.ancestors().take_while(|it| it.text_range() == range).last()?;
|
||||||
NodeOrToken::Token(token) => token.parent(),
|
|
||||||
};
|
|
||||||
let range = node.text_range();
|
|
||||||
let top_node = node.ancestors().take_while(|it| it.text_range() == range).last()?;
|
|
||||||
let prev_sibling_node = top_node.ancestors().find(|it| {
|
let prev_sibling_node = top_node.ancestors().find(|it| {
|
||||||
non_trivia_sibling(NodeOrToken::Node(it.to_owned()), Direction::Prev).is_some()
|
non_trivia_sibling(NodeOrToken::Node(it.to_owned()), Direction::Prev).is_some()
|
||||||
})?;
|
})?;
|
||||||
|
|
|
@ -109,7 +109,7 @@ fn call_info_impl(
|
||||||
token: SyntaxToken,
|
token: SyntaxToken,
|
||||||
) -> Option<(hir::Callable, Option<usize>)> {
|
) -> Option<(hir::Callable, Option<usize>)> {
|
||||||
// Find the calling expression and it's NameRef
|
// Find the calling expression and it's NameRef
|
||||||
let calling_node = FnCallNode::with_node(&token.parent())?;
|
let calling_node = FnCallNode::with_node(&token.parent()?)?;
|
||||||
|
|
||||||
let callable = match &calling_node {
|
let callable = match &calling_node {
|
||||||
FnCallNode::CallExpr(call) => sema.type_of_expr(&call.expr()?)?.as_callable(sema.db)?,
|
FnCallNode::CallExpr(call) => sema.type_of_expr(&call.expr()?)?.as_callable(sema.db)?,
|
||||||
|
|
|
@ -195,7 +195,7 @@ impl<'db> ResolutionScope<'db> {
|
||||||
.syntax()
|
.syntax()
|
||||||
.token_at_offset(resolve_context.offset)
|
.token_at_offset(resolve_context.offset)
|
||||||
.left_biased()
|
.left_biased()
|
||||||
.map(|token| token.parent())
|
.and_then(|token| token.parent())
|
||||||
.unwrap_or_else(|| file.syntax().clone());
|
.unwrap_or_else(|| file.syntax().clone());
|
||||||
let node = pick_node_for_resolution(node);
|
let node = pick_node_for_resolution(node);
|
||||||
let scope = sema.scope(&node);
|
let scope = sema.scope(&node);
|
||||||
|
|
|
@ -13,7 +13,7 @@ doctest = false
|
||||||
[dependencies]
|
[dependencies]
|
||||||
cov-mark = { version = "1.1", features = ["thread-local"] }
|
cov-mark = { version = "1.1", features = ["thread-local"] }
|
||||||
itertools = "0.10.0"
|
itertools = "0.10.0"
|
||||||
rowan = "0.12.2"
|
rowan = "0.13.0-pre.2"
|
||||||
rustc_lexer = { version = "710.0.0", package = "rustc-ap-rustc_lexer" }
|
rustc_lexer = { version = "710.0.0", package = "rustc-ap-rustc_lexer" }
|
||||||
rustc-hash = "1.1.0"
|
rustc-hash = "1.1.0"
|
||||||
arrayvec = "0.5.1"
|
arrayvec = "0.5.1"
|
||||||
|
|
|
@ -4,7 +4,6 @@ use std::{
|
||||||
fmt,
|
fmt,
|
||||||
hash::BuildHasherDefault,
|
hash::BuildHasherDefault,
|
||||||
ops::{self, RangeInclusive},
|
ops::{self, RangeInclusive},
|
||||||
ptr,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
|
@ -27,7 +26,7 @@ pub fn ancestors_at_offset(
|
||||||
offset: TextSize,
|
offset: TextSize,
|
||||||
) -> impl Iterator<Item = SyntaxNode> {
|
) -> impl Iterator<Item = SyntaxNode> {
|
||||||
node.token_at_offset(offset)
|
node.token_at_offset(offset)
|
||||||
.map(|token| token.parent().ancestors())
|
.map(|token| token.ancestors())
|
||||||
.kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
|
.kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -171,7 +170,7 @@ pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff {
|
||||||
&& lhs.text_range().len() == rhs.text_range().len()
|
&& lhs.text_range().len() == rhs.text_range().len()
|
||||||
&& match (&lhs, &rhs) {
|
&& match (&lhs, &rhs) {
|
||||||
(NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => {
|
(NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => {
|
||||||
ptr::eq(lhs.green(), rhs.green()) || lhs.text() == rhs.text()
|
lhs == rhs || lhs.text() == rhs.text()
|
||||||
}
|
}
|
||||||
(NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(),
|
(NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(),
|
||||||
_ => false,
|
_ => false,
|
||||||
|
@ -280,9 +279,10 @@ fn _insert_children(
|
||||||
to_green_element(element)
|
to_green_element(element)
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut old_children = parent.green().children().map(|it| match it {
|
let parent_green = parent.green();
|
||||||
NodeOrToken::Token(it) => NodeOrToken::Token(it.clone()),
|
let mut old_children = parent_green.children().map(|it| match it {
|
||||||
NodeOrToken::Node(it) => NodeOrToken::Node(it.clone()),
|
NodeOrToken::Token(it) => NodeOrToken::Token(it.to_owned()),
|
||||||
|
NodeOrToken::Node(it) => NodeOrToken::Node(it.to_owned()),
|
||||||
});
|
});
|
||||||
|
|
||||||
let new_children = match &position {
|
let new_children = match &position {
|
||||||
|
@ -319,9 +319,10 @@ fn _replace_children(
|
||||||
) -> SyntaxNode {
|
) -> SyntaxNode {
|
||||||
let start = position_of_child(parent, to_delete.start().clone());
|
let start = position_of_child(parent, to_delete.start().clone());
|
||||||
let end = position_of_child(parent, to_delete.end().clone());
|
let end = position_of_child(parent, to_delete.end().clone());
|
||||||
let mut old_children = parent.green().children().map(|it| match it {
|
let parent_green = parent.green();
|
||||||
NodeOrToken::Token(it) => NodeOrToken::Token(it.clone()),
|
let mut old_children = parent_green.children().map(|it| match it {
|
||||||
NodeOrToken::Node(it) => NodeOrToken::Node(it.clone()),
|
NodeOrToken::Token(it) => NodeOrToken::Token(it.to_owned()),
|
||||||
|
NodeOrToken::Node(it) => NodeOrToken::Node(it.to_owned()),
|
||||||
});
|
});
|
||||||
|
|
||||||
let before = old_children.by_ref().take(start).collect::<Vec<_>>();
|
let before = old_children.by_ref().take(start).collect::<Vec<_>>();
|
||||||
|
@ -487,9 +488,9 @@ impl<'a> SyntaxRewriter<'a> {
|
||||||
/// Returns `None` when there are no replacements.
|
/// Returns `None` when there are no replacements.
|
||||||
pub fn rewrite_root(&self) -> Option<SyntaxNode> {
|
pub fn rewrite_root(&self) -> Option<SyntaxNode> {
|
||||||
let _p = profile::span("rewrite_root");
|
let _p = profile::span("rewrite_root");
|
||||||
fn element_to_node_or_parent(element: &SyntaxElement) -> SyntaxNode {
|
fn element_to_node_or_parent(element: &SyntaxElement) -> Option<SyntaxNode> {
|
||||||
match element {
|
match element {
|
||||||
SyntaxElement::Node(it) => it.clone(),
|
SyntaxElement::Node(it) => Some(it.clone()),
|
||||||
SyntaxElement::Token(it) => it.parent(),
|
SyntaxElement::Token(it) => it.parent(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -497,9 +498,9 @@ impl<'a> SyntaxRewriter<'a> {
|
||||||
assert!(self.f.is_none());
|
assert!(self.f.is_none());
|
||||||
self.replacements
|
self.replacements
|
||||||
.keys()
|
.keys()
|
||||||
.map(element_to_node_or_parent)
|
.filter_map(element_to_node_or_parent)
|
||||||
.chain(self.insertions.keys().map(|pos| match pos {
|
.chain(self.insertions.keys().filter_map(|pos| match pos {
|
||||||
InsertPos::FirstChildOf(it) => it.clone(),
|
InsertPos::FirstChildOf(it) => Some(it.clone()),
|
||||||
InsertPos::After(it) => element_to_node_or_parent(it),
|
InsertPos::After(it) => element_to_node_or_parent(it),
|
||||||
}))
|
}))
|
||||||
// If we only have one replacement/insertion, we must return its parent node, since `rewrite` does
|
// If we only have one replacement/insertion, we must return its parent node, since `rewrite` does
|
||||||
|
@ -552,7 +553,7 @@ impl<'a> SyntaxRewriter<'a> {
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
match element {
|
match element {
|
||||||
NodeOrToken::Token(it) => acc.push(NodeOrToken::Token(it.green().clone())),
|
NodeOrToken::Token(it) => acc.push(NodeOrToken::Token(it.green().to_owned())),
|
||||||
NodeOrToken::Node(it) => {
|
NodeOrToken::Node(it) => {
|
||||||
acc.push(NodeOrToken::Node(self.rewrite_children(it)));
|
acc.push(NodeOrToken::Node(self.rewrite_children(it)));
|
||||||
}
|
}
|
||||||
|
@ -567,7 +568,7 @@ impl<'a> SyntaxRewriter<'a> {
|
||||||
fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
|
fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
|
||||||
match element {
|
match element {
|
||||||
NodeOrToken::Node(it) => NodeOrToken::Node(it.green().to_owned()),
|
NodeOrToken::Node(it) => NodeOrToken::Node(it.green().to_owned()),
|
||||||
NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()),
|
NodeOrToken::Token(it) => NodeOrToken::Token(it.green().to_owned()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -625,7 +626,7 @@ fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize {
|
||||||
fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
|
fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
|
||||||
match element {
|
match element {
|
||||||
NodeOrToken::Node(it) => it.green().to_owned().into(),
|
NodeOrToken::Node(it) => it.green().to_owned().into(),
|
||||||
NodeOrToken::Token(it) => it.green().clone().into(),
|
NodeOrToken::Token(it) => it.green().to_owned().into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,7 @@ mod token_ext;
|
||||||
mod node_ext;
|
mod node_ext;
|
||||||
mod expr_ext;
|
mod expr_ext;
|
||||||
pub mod edit;
|
pub mod edit;
|
||||||
|
pub mod edit_in_place;
|
||||||
pub mod make;
|
pub mod make;
|
||||||
|
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
|
@ -40,6 +41,12 @@ pub trait AstNode {
|
||||||
Self: Sized;
|
Self: Sized;
|
||||||
|
|
||||||
fn syntax(&self) -> &SyntaxNode;
|
fn syntax(&self) -> &SyntaxNode;
|
||||||
|
fn clone_for_update(&self) -> Self
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
Self::cast(self.syntax().clone_for_update()).unwrap()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Like `AstNode`, but wraps tokens rather than interior nodes.
|
/// Like `AstNode`, but wraps tokens rather than interior nodes.
|
||||||
|
|
105
crates/syntax/src/ast/edit_in_place.rs
Normal file
105
crates/syntax/src/ast/edit_in_place.rs
Normal file
|
@ -0,0 +1,105 @@
|
||||||
|
//! Structural editing for ast.
|
||||||
|
|
||||||
|
use std::iter::empty;
|
||||||
|
|
||||||
|
use ast::{edit::AstNodeEdit, make, GenericParamsOwner, WhereClause};
|
||||||
|
use parser::T;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
ast,
|
||||||
|
ted::{self, Position},
|
||||||
|
AstNode, Direction, SyntaxKind,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::NameOwner;
|
||||||
|
|
||||||
|
pub trait GenericParamsOwnerEdit: ast::GenericParamsOwner + AstNodeEdit {
|
||||||
|
fn get_or_create_where_clause(&self) -> ast::WhereClause;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GenericParamsOwnerEdit for ast::Fn {
|
||||||
|
fn get_or_create_where_clause(&self) -> WhereClause {
|
||||||
|
if self.where_clause().is_none() {
|
||||||
|
let position = if let Some(ty) = self.ret_type() {
|
||||||
|
Position::after(ty.syntax().clone())
|
||||||
|
} else if let Some(param_list) = self.param_list() {
|
||||||
|
Position::after(param_list.syntax().clone())
|
||||||
|
} else {
|
||||||
|
Position::last_child_of(self.syntax().clone())
|
||||||
|
};
|
||||||
|
create_where_clause(position)
|
||||||
|
}
|
||||||
|
self.where_clause().unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GenericParamsOwnerEdit for ast::Impl {
|
||||||
|
fn get_or_create_where_clause(&self) -> WhereClause {
|
||||||
|
if self.where_clause().is_none() {
|
||||||
|
let position = if let Some(ty) = self.self_ty() {
|
||||||
|
Position::after(ty.syntax().clone())
|
||||||
|
} else {
|
||||||
|
Position::last_child_of(self.syntax().clone())
|
||||||
|
};
|
||||||
|
create_where_clause(position)
|
||||||
|
}
|
||||||
|
self.where_clause().unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl GenericParamsOwnerEdit for ast::Struct {
|
||||||
|
fn get_or_create_where_clause(&self) -> WhereClause {
|
||||||
|
if self.where_clause().is_none() {
|
||||||
|
let tfl = self.field_list().and_then(|fl| match fl {
|
||||||
|
ast::FieldList::RecordFieldList(_) => None,
|
||||||
|
ast::FieldList::TupleFieldList(it) => Some(it),
|
||||||
|
});
|
||||||
|
let position = if let Some(tfl) = tfl {
|
||||||
|
Position::after(tfl.syntax().clone())
|
||||||
|
} else if let Some(gpl) = self.generic_param_list() {
|
||||||
|
Position::after(gpl.syntax().clone())
|
||||||
|
} else if let Some(name) = self.name() {
|
||||||
|
Position::after(name.syntax().clone())
|
||||||
|
} else {
|
||||||
|
Position::last_child_of(self.syntax().clone())
|
||||||
|
};
|
||||||
|
create_where_clause(position)
|
||||||
|
}
|
||||||
|
self.where_clause().unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_where_clause(position: Position) {
|
||||||
|
let elements = vec![
|
||||||
|
make::tokens::single_space().into(),
|
||||||
|
make::where_clause(empty()).clone_for_update().syntax().clone().into(),
|
||||||
|
];
|
||||||
|
ted::insert_all(position, elements);
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ast::WhereClause {
|
||||||
|
pub fn add_predicate(&self, predicate: ast::WherePred) {
|
||||||
|
if let Some(pred) = self.predicates().last() {
|
||||||
|
if !pred.syntax().siblings_with_tokens(Direction::Next).any(|it| it.kind() == T![,]) {
|
||||||
|
ted::append_child(self.syntax().clone(), make::token(T![,]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if self.syntax().children_with_tokens().last().map(|it| it.kind())
|
||||||
|
!= Some(SyntaxKind::WHITESPACE)
|
||||||
|
{
|
||||||
|
ted::append_child(self.syntax().clone(), make::tokens::single_space());
|
||||||
|
}
|
||||||
|
ted::append_child(self.syntax().clone(), predicate.syntax().clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ast::TypeBoundList {
|
||||||
|
pub fn remove(&self) {
|
||||||
|
if let Some(colon) =
|
||||||
|
self.syntax().siblings_with_tokens(Direction::Prev).find(|it| it.kind() == T![:])
|
||||||
|
{
|
||||||
|
ted::remove_all(colon..=self.syntax().clone().into())
|
||||||
|
} else {
|
||||||
|
ted::remove(self.syntax().clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -174,6 +174,11 @@ pub fn block_expr(
|
||||||
pub fn expr_unit() -> ast::Expr {
|
pub fn expr_unit() -> ast::Expr {
|
||||||
expr_from_text("()")
|
expr_from_text("()")
|
||||||
}
|
}
|
||||||
|
pub fn expr_literal(text: &str) -> ast::Literal {
|
||||||
|
assert_eq!(text.trim(), text);
|
||||||
|
ast_from_text(&format!("fn f() {{ let _ = {}; }}", text))
|
||||||
|
}
|
||||||
|
|
||||||
pub fn expr_empty_block() -> ast::Expr {
|
pub fn expr_empty_block() -> ast::Expr {
|
||||||
expr_from_text("{}")
|
expr_from_text("{}")
|
||||||
}
|
}
|
||||||
|
@ -390,6 +395,7 @@ pub fn token(kind: SyntaxKind) -> SyntaxToken {
|
||||||
tokens::SOURCE_FILE
|
tokens::SOURCE_FILE
|
||||||
.tree()
|
.tree()
|
||||||
.syntax()
|
.syntax()
|
||||||
|
.clone_for_update()
|
||||||
.descendants_with_tokens()
|
.descendants_with_tokens()
|
||||||
.filter_map(|it| it.into_token())
|
.filter_map(|it| it.into_token())
|
||||||
.find(|it| it.kind() == kind)
|
.find(|it| it.kind() == kind)
|
||||||
|
@ -544,6 +550,7 @@ pub mod tokens {
|
||||||
SOURCE_FILE
|
SOURCE_FILE
|
||||||
.tree()
|
.tree()
|
||||||
.syntax()
|
.syntax()
|
||||||
|
.clone_for_update()
|
||||||
.descendants_with_tokens()
|
.descendants_with_tokens()
|
||||||
.filter_map(|it| it.into_token())
|
.filter_map(|it| it.into_token())
|
||||||
.find(|it| it.kind() == WHITESPACE && it.text() == " ")
|
.find(|it| it.kind() == WHITESPACE && it.text() == " ")
|
||||||
|
@ -569,13 +576,16 @@ pub mod tokens {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn single_newline() -> SyntaxToken {
|
pub fn single_newline() -> SyntaxToken {
|
||||||
SOURCE_FILE
|
let res = SOURCE_FILE
|
||||||
.tree()
|
.tree()
|
||||||
.syntax()
|
.syntax()
|
||||||
|
.clone_for_update()
|
||||||
.descendants_with_tokens()
|
.descendants_with_tokens()
|
||||||
.filter_map(|it| it.into_token())
|
.filter_map(|it| it.into_token())
|
||||||
.find(|it| it.kind() == WHITESPACE && it.text() == "\n")
|
.find(|it| it.kind() == WHITESPACE && it.text() == "\n")
|
||||||
.unwrap()
|
.unwrap();
|
||||||
|
res.detach();
|
||||||
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn blank_line() -> SyntaxToken {
|
pub fn blank_line() -> SyntaxToken {
|
||||||
|
|
|
@ -34,7 +34,9 @@ impl ast::NameRef {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn text_of_first_token(node: &SyntaxNode) -> &str {
|
fn text_of_first_token(node: &SyntaxNode) -> &str {
|
||||||
node.green().children().next().and_then(|it| it.into_token()).unwrap().text()
|
let t =
|
||||||
|
node.green().children().next().and_then(|it| it.into_token()).unwrap().text().to_string();
|
||||||
|
Box::leak(Box::new(t))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum Macro {
|
pub enum Macro {
|
||||||
|
|
|
@ -38,6 +38,7 @@ pub mod ast;
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub mod fuzz;
|
pub mod fuzz;
|
||||||
pub mod utils;
|
pub mod utils;
|
||||||
|
pub mod ted;
|
||||||
|
|
||||||
use std::{marker::PhantomData, sync::Arc};
|
use std::{marker::PhantomData, sync::Arc};
|
||||||
|
|
||||||
|
|
|
@ -124,11 +124,7 @@ fn is_contextual_kw(text: &str) -> bool {
|
||||||
fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> {
|
fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> {
|
||||||
let node = node.covering_element(range);
|
let node = node.covering_element(range);
|
||||||
|
|
||||||
let mut ancestors = match node {
|
node.ancestors().find_map(|node| {
|
||||||
NodeOrToken::Token(it) => it.parent().ancestors(),
|
|
||||||
NodeOrToken::Node(it) => it.ancestors(),
|
|
||||||
};
|
|
||||||
ancestors.find_map(|node| {
|
|
||||||
let first_child = node.first_child_or_token().map(|it| it.kind());
|
let first_child = node.first_child_or_token().map(|it| it.kind());
|
||||||
let parent = node.parent().map(|it| it.kind());
|
let parent = node.parent().map(|it| it.kind());
|
||||||
Reparser::for_node(node.kind(), first_child, parent).map(|r| (node, r))
|
Reparser::for_node(node.kind(), first_child, parent).map(|r| (node, r))
|
||||||
|
|
78
crates/syntax/src/ted.rs
Normal file
78
crates/syntax/src/ted.rs
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
//! Primitive tree editor, ed for trees
|
||||||
|
#![allow(unused)]
|
||||||
|
use std::ops::RangeInclusive;
|
||||||
|
|
||||||
|
use crate::{SyntaxElement, SyntaxNode};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Position {
|
||||||
|
repr: PositionRepr,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum PositionRepr {
|
||||||
|
FirstChild(SyntaxNode),
|
||||||
|
After(SyntaxElement),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Position {
|
||||||
|
pub fn after(elem: impl Into<SyntaxElement>) -> Position {
|
||||||
|
let repr = PositionRepr::After(elem.into());
|
||||||
|
Position { repr }
|
||||||
|
}
|
||||||
|
pub fn before(elem: impl Into<SyntaxElement>) -> Position {
|
||||||
|
let elem = elem.into();
|
||||||
|
let repr = match elem.prev_sibling_or_token() {
|
||||||
|
Some(it) => PositionRepr::After(it),
|
||||||
|
None => PositionRepr::FirstChild(elem.parent().unwrap()),
|
||||||
|
};
|
||||||
|
Position { repr }
|
||||||
|
}
|
||||||
|
pub fn first_child_of(node: impl Into<SyntaxNode>) -> Position {
|
||||||
|
let repr = PositionRepr::FirstChild(node.into());
|
||||||
|
Position { repr }
|
||||||
|
}
|
||||||
|
pub fn last_child_of(node: impl Into<SyntaxNode>) -> Position {
|
||||||
|
let node = node.into();
|
||||||
|
let repr = match node.last_child_or_token() {
|
||||||
|
Some(it) => PositionRepr::After(it),
|
||||||
|
None => PositionRepr::FirstChild(node),
|
||||||
|
};
|
||||||
|
Position { repr }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert(position: Position, elem: impl Into<SyntaxElement>) {
|
||||||
|
insert_all(position, vec![elem.into()])
|
||||||
|
}
|
||||||
|
pub fn insert_all(position: Position, elements: Vec<SyntaxElement>) {
|
||||||
|
let (parent, index) = match position.repr {
|
||||||
|
PositionRepr::FirstChild(parent) => (parent, 0),
|
||||||
|
PositionRepr::After(child) => (child.parent().unwrap(), child.index() + 1),
|
||||||
|
};
|
||||||
|
parent.splice_children(index..index, elements);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remove(elem: impl Into<SyntaxElement>) {
|
||||||
|
let elem = elem.into();
|
||||||
|
remove_all(elem.clone()..=elem)
|
||||||
|
}
|
||||||
|
pub fn remove_all(range: RangeInclusive<SyntaxElement>) {
|
||||||
|
replace_all(range, Vec::new())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn replace(old: impl Into<SyntaxElement>, new: impl Into<SyntaxElement>) {
|
||||||
|
let old = old.into();
|
||||||
|
replace_all(old.clone()..=old, vec![new.into()])
|
||||||
|
}
|
||||||
|
pub fn replace_all(range: RangeInclusive<SyntaxElement>, new: Vec<SyntaxElement>) {
|
||||||
|
let start = range.start().index();
|
||||||
|
let end = range.end().index();
|
||||||
|
let parent = range.start().parent().unwrap();
|
||||||
|
parent.splice_children(start..end + 1, new)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn append_child(node: impl Into<SyntaxNode>, child: impl Into<SyntaxElement>) {
|
||||||
|
let position = Position::last_child_of(node);
|
||||||
|
insert(position, child)
|
||||||
|
}
|
Loading…
Reference in a new issue