mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 13:33:31 +00:00
Merge #9901
9901: internal: remove dead code r=matklad a=matklad
bors r+
🤖
Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
182a2b8e72
3 changed files with 9 additions and 115 deletions
|
@ -705,8 +705,8 @@ mod tests {
|
||||||
use crate::tests::parse_macro;
|
use crate::tests::parse_macro;
|
||||||
use parser::TokenSource;
|
use parser::TokenSource;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo::{insert_children, InsertPosition},
|
ast::{make, AstNode},
|
||||||
ast::AstNode,
|
ted,
|
||||||
};
|
};
|
||||||
use test_utils::assert_eq_text;
|
use test_utils::assert_eq_text;
|
||||||
|
|
||||||
|
@ -772,42 +772,26 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_token_tree_last_child_is_white_space() {
|
fn test_token_tree_last_child_is_white_space() {
|
||||||
let source_file = ast::SourceFile::parse("f!({} );").ok().unwrap();
|
let source_file = ast::SourceFile::parse("f!{}").ok().unwrap();
|
||||||
let macro_call = source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
let macro_call = source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||||
let token_tree = macro_call.token_tree().unwrap();
|
let token_tree = macro_call.token_tree().unwrap();
|
||||||
|
|
||||||
// Token Tree now is :
|
// Token Tree now is :
|
||||||
// TokenTree
|
// TokenTree
|
||||||
// - T!['(']
|
|
||||||
// - TokenTree
|
// - TokenTree
|
||||||
// - T!['{']
|
// - T!['{']
|
||||||
// - T!['}']
|
// - T!['}']
|
||||||
// - WHITE_SPACE
|
|
||||||
// - T![')']
|
|
||||||
|
|
||||||
let rbrace =
|
|
||||||
token_tree.syntax().descendants_with_tokens().find(|it| it.kind() == T!['}']).unwrap();
|
|
||||||
let space = token_tree
|
|
||||||
.syntax()
|
|
||||||
.descendants_with_tokens()
|
|
||||||
.find(|it| it.kind() == SyntaxKind::WHITESPACE)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// reorder th white space, such that the white is inside the inner token-tree.
|
|
||||||
let token_tree = insert_children(
|
|
||||||
&rbrace.parent().unwrap(),
|
|
||||||
InsertPosition::Last,
|
|
||||||
std::iter::once(space),
|
|
||||||
);
|
|
||||||
|
|
||||||
|
let token_tree = token_tree.clone_for_update();
|
||||||
|
ted::append_child(token_tree.syntax(), make::tokens::single_space());
|
||||||
|
let token_tree = token_tree.clone_subtree();
|
||||||
// Token Tree now is :
|
// Token Tree now is :
|
||||||
// TokenTree
|
// TokenTree
|
||||||
// - T!['{']
|
// - T!['{']
|
||||||
// - T!['}']
|
// - T!['}']
|
||||||
// - WHITE_SPACE
|
// - WHITE_SPACE
|
||||||
let token_tree = ast::TokenTree::cast(token_tree).unwrap();
|
|
||||||
let tt = syntax_node_to_token_tree(token_tree.syntax()).0;
|
|
||||||
|
|
||||||
|
let tt = syntax_node_to_token_tree(token_tree.syntax()).0;
|
||||||
assert_eq!(tt.delimiter_kind(), Some(tt::DelimiterKind::Brace));
|
assert_eq!(tt.delimiter_kind(), Some(tt::DelimiterKind::Brace));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,8 +8,8 @@ use rustc_hash::FxHashMap;
|
||||||
use text_edit::TextEditBuilder;
|
use text_edit::TextEditBuilder;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxNodePtr,
|
AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange,
|
||||||
SyntaxToken, TextRange, TextSize,
|
TextSize,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Returns ancestors of the node at the offset, sorted by length. This should
|
/// Returns ancestors of the node at the offset, sorted by length. This should
|
||||||
|
@ -92,14 +92,6 @@ pub fn has_errors(node: &SyntaxNode) -> bool {
|
||||||
node.children().any(|it| it.kind() == SyntaxKind::ERROR)
|
node.children().any(|it| it.kind() == SyntaxKind::ERROR)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
|
||||||
pub enum InsertPosition<T> {
|
|
||||||
First,
|
|
||||||
Last,
|
|
||||||
Before(T),
|
|
||||||
After(T),
|
|
||||||
}
|
|
||||||
|
|
||||||
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<rustc_hash::FxHasher>>;
|
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<rustc_hash::FxHasher>>;
|
||||||
|
|
||||||
#[derive(Debug, Hash, PartialEq, Eq)]
|
#[derive(Debug, Hash, PartialEq, Eq)]
|
||||||
|
@ -250,87 +242,6 @@ pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Adds specified children (tokens or nodes) to the current node at the
|
|
||||||
/// specific position.
|
|
||||||
///
|
|
||||||
/// This is a type-unsafe low-level editing API, if you need to use it,
|
|
||||||
/// prefer to create a type-safe abstraction on top of it instead.
|
|
||||||
pub fn insert_children(
|
|
||||||
parent: &SyntaxNode,
|
|
||||||
position: InsertPosition<SyntaxElement>,
|
|
||||||
to_insert: impl IntoIterator<Item = SyntaxElement>,
|
|
||||||
) -> SyntaxNode {
|
|
||||||
let mut to_insert = to_insert.into_iter();
|
|
||||||
_insert_children(parent, position, &mut to_insert)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn _insert_children(
|
|
||||||
parent: &SyntaxNode,
|
|
||||||
position: InsertPosition<SyntaxElement>,
|
|
||||||
to_insert: &mut dyn Iterator<Item = SyntaxElement>,
|
|
||||||
) -> SyntaxNode {
|
|
||||||
let mut delta = TextSize::default();
|
|
||||||
let to_insert = to_insert.map(|element| {
|
|
||||||
delta += element.text_range().len();
|
|
||||||
to_green_element(element)
|
|
||||||
});
|
|
||||||
|
|
||||||
let parent_green = parent.green();
|
|
||||||
let mut old_children = parent_green.children().map(|it| match it {
|
|
||||||
NodeOrToken::Token(it) => NodeOrToken::Token(it.to_owned()),
|
|
||||||
NodeOrToken::Node(it) => NodeOrToken::Node(it.to_owned()),
|
|
||||||
});
|
|
||||||
|
|
||||||
let new_children = match &position {
|
|
||||||
InsertPosition::First => to_insert.chain(old_children).collect::<Vec<_>>(),
|
|
||||||
InsertPosition::Last => old_children.chain(to_insert).collect::<Vec<_>>(),
|
|
||||||
InsertPosition::Before(anchor) | InsertPosition::After(anchor) => {
|
|
||||||
let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 };
|
|
||||||
let split_at = position_of_child(parent, anchor.clone()) + take_anchor;
|
|
||||||
let before = old_children.by_ref().take(split_at).collect::<Vec<_>>();
|
|
||||||
before.into_iter().chain(to_insert).chain(old_children).collect::<Vec<_>>()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
with_children(parent, new_children)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn with_children(
|
|
||||||
parent: &SyntaxNode,
|
|
||||||
new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>,
|
|
||||||
) -> SyntaxNode {
|
|
||||||
let _p = profile::span("with_children");
|
|
||||||
|
|
||||||
let new_green = rowan::GreenNode::new(rowan::SyntaxKind(parent.kind() as u16), new_children);
|
|
||||||
with_green(parent, new_green)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn with_green(syntax_node: &SyntaxNode, green: rowan::GreenNode) -> SyntaxNode {
|
|
||||||
let len = green.children().map(|it| it.text_len()).sum::<TextSize>();
|
|
||||||
let new_root_node = syntax_node.replace_with(green);
|
|
||||||
let new_root_node = SyntaxNode::new_root(new_root_node);
|
|
||||||
|
|
||||||
// FIXME: use a more elegant way to re-fetch the node (#1185), make
|
|
||||||
// `range` private afterwards
|
|
||||||
let mut ptr = SyntaxNodePtr::new(syntax_node);
|
|
||||||
ptr.range = TextRange::at(ptr.range.start(), len);
|
|
||||||
ptr.to_node(&new_root_node)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize {
|
|
||||||
parent
|
|
||||||
.children_with_tokens()
|
|
||||||
.position(|it| it == child)
|
|
||||||
.expect("element is not a child of current element")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
|
|
||||||
match element {
|
|
||||||
NodeOrToken::Node(it) => it.green().into(),
|
|
||||||
NodeOrToken::Token(it) => it.green().to_owned().into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use expect_test::{expect, Expect};
|
use expect_test::{expect, Expect};
|
||||||
|
|
|
@ -47,7 +47,6 @@ use stdx::format_to;
|
||||||
use text_edit::Indel;
|
use text_edit::Indel;
|
||||||
|
|
||||||
pub use crate::{
|
pub use crate::{
|
||||||
algo::InsertPosition,
|
|
||||||
ast::{AstNode, AstToken},
|
ast::{AstNode, AstToken},
|
||||||
parsing::lexer::{lex_single_syntax_kind, lex_single_valid_syntax_kind, tokenize, Token},
|
parsing::lexer::{lex_single_syntax_kind, lex_single_valid_syntax_kind, tokenize, Token},
|
||||||
ptr::{AstPtr, SyntaxNodePtr},
|
ptr::{AstPtr, SyntaxNodePtr},
|
||||||
|
|
Loading…
Reference in a new issue