switch to upstream rowan's API

This commit is contained in:
Aleksey Kladov 2019-07-20 20:04:34 +03:00
parent 7bde8012cb
commit c9cfd57eea
22 changed files with 208 additions and 738 deletions

22
Cargo.lock generated
View file

@ -263,11 +263,6 @@ dependencies = [
"bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "colosseum"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "console"
version = "0.7.7"
@ -484,6 +479,11 @@ dependencies = [
"yansi 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "format-buf"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "fs_extra"
version = "1.1.0"
@ -1124,6 +1124,7 @@ name = "ra_assists"
version = "0.1.0"
dependencies = [
"arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
"format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"once_cell 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1211,6 +1212,7 @@ dependencies = [
name = "ra_ide_api"
version = "0.1.0"
dependencies = [
"format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"fst 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"insta 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1313,7 +1315,7 @@ dependencies = [
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_parser 0.1.0",
"ra_text_edit 0.1.0",
"rowan 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rowan 0.6.0-pre.1 (registry+https://github.com/rust-lang/crates.io-index)",
"smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1584,11 +1586,9 @@ dependencies = [
[[package]]
name = "rowan"
version = "0.5.6"
version = "0.6.0-pre.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
"text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2150,7 +2150,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9"
"checksum clicolors-control 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "73abfd4c73d003a674ce5d2933fca6ce6c42480ea84a5ffe0a2dc39ed56300f9"
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
"checksum colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "370c83b49aedf022ee27942e8ae1d9de1cf40dc9653ee6550e4455d08f6406f9"
"checksum console 0.7.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8ca57c2c14b8a2bf3105bc9d15574aad80babf6a9c44b1058034cdf8bd169628"
"checksum cpuprofiler 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "33f07976bb6821459632d7a18d97ccca005cb5c552f251f822c7c1781c1d7035"
"checksum crossbeam-channel 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "0f0ed1a4de2235cabda8558ff5840bffb97fcb64c97827f354a451307df5f72b"
@ -2177,6 +2176,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum filetime 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "2f8c63033fcba1f51ef744505b3cad42510432b904c062afa67ad7ece008429d"
"checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33"
"checksum flexi_logger 0.13.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e9d3c4470d1ff8446baa0c13202646722886dde8dc4c5d33cb8242d70ece79d5"
"checksum format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f7aea5a5909a74969507051a3b17adc84737e31a5f910559892aedce026f4d53"
"checksum fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674"
"checksum fsevent 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5ab7d1bd1bd33cc98b0889831b72da23c0aa4df9cec7e0702f46ecea04b35db6"
"checksum fsevent-sys 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f41b048a94555da0f42f1d632e2e19510084fb8e303b0daa2816e733fb3644a0"
@ -2275,7 +2275,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7790c7f1cc73d831d28dc5a7deb316a006e7848e6a7f467cdb10a0a9e0fb1c"
"checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e"
"checksum ron 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "17f52a24414403f81528b67488cf8edc4eda977d3af1646bb6b106a600ead78f"
"checksum rowan 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0c433ffe99ac9b96fa9882805d05eee5d750c9202fb42d0546c556e5d70d54be"
"checksum rowan 0.6.0-pre.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0eeee40f1a2724b7d0d9fa5f73a7804cd2f4c91b37ba9f785d429f31819d60df"
"checksum rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7f4dccf6f4891ebcc0c39f9b6eb1a83b9bf5d747cb439ec6fba4f3b977038af"
"checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8"
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"

View file

@ -5,6 +5,7 @@ version = "0.1.0"
authors = ["rust-analyzer developers"]
[dependencies]
format-buf = "1.0.0"
once_cell = "0.2.0"
join_to_string = "0.1.3"
itertools = "0.8.0"

View file

@ -1,5 +1,4 @@
use std::fmt::Write;
use format_buf::format;
use hir::db::HirDatabase;
use join_to_string::join;
use ra_syntax::{
@ -19,7 +18,7 @@ pub(crate) fn add_impl(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let mut buf = String::new();
buf.push_str("\n\nimpl");
if let Some(type_params) = &type_params {
write!(buf, "{}", type_params.syntax()).unwrap();
format!(buf, "{}", type_params.syntax());
}
buf.push_str(" ");
buf.push_str(name.text().as_str());

View file

@ -4,7 +4,10 @@ use arrayvec::ArrayVec;
use hir::Name;
use ra_fmt::leading_indent;
use ra_syntax::{
ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement, SyntaxKind::*, T,
algo::{insert_children, replace_children},
ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement,
SyntaxKind::*,
T,
};
use ra_text_edit::TextEditBuilder;
@ -38,7 +41,7 @@ impl<N: AstNode> AstEditor<N> {
position: InsertPosition<SyntaxElement>,
to_insert: impl Iterator<Item = SyntaxElement>,
) -> N {
let new_syntax = self.ast().syntax().insert_children(position, to_insert);
let new_syntax = insert_children(self.ast().syntax(), position, to_insert);
N::cast(new_syntax).unwrap()
}
@ -48,7 +51,7 @@ impl<N: AstNode> AstEditor<N> {
to_delete: RangeInclusive<SyntaxElement>,
to_insert: impl Iterator<Item = SyntaxElement>,
) -> N {
let new_syntax = self.ast().syntax().replace_children(to_delete, to_insert);
let new_syntax = replace_children(self.ast().syntax(), to_delete, to_insert);
N::cast(new_syntax).unwrap()
}

View file

@ -1,5 +1,4 @@
use std::fmt::Write;
use format_buf::format;
use hir::db::HirDatabase;
use ra_syntax::{
ast::{self, AstNode},
@ -37,7 +36,7 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option
buf.push_str("let var_name = ");
TextUnit::of_str("let ")
};
write!(buf, "{}", expr.syntax()).unwrap();
format!(buf, "{}", expr.syntax());
let full_stmt = ast::ExprStmt::cast(anchor_stmt.clone());
let is_full_stmt = if let Some(expr_stmt) = &full_stmt {
Some(expr.syntax().clone()) == expr_stmt.expr().map(|e| e.syntax().clone())

View file

@ -2,7 +2,7 @@ use hir::db::HirDatabase;
use ra_syntax::{
ast,
ast::{AstNode, AstToken, IfExpr, MatchArm},
SyntaxElement, TextUnit,
TextUnit,
};
use crate::{Assist, AssistCtx, AssistId};
@ -18,10 +18,10 @@ pub(crate) fn move_guard_to_arm_body(mut ctx: AssistCtx<impl HirDatabase>) -> Op
ctx.add_action(AssistId("move_guard_to_arm_body"), "move guard to arm body", |edit| {
edit.target(guard.syntax().text_range());
let offseting_amount = match &space_before_guard {
Some(SyntaxElement::Token(tok)) => {
let offseting_amount = match space_before_guard.and_then(|it| it.into_token()) {
Some(tok) => {
if let Some(_) = ast::Whitespace::cast(tok.clone()) {
let ele = space_before_guard.unwrap().text_range();
let ele = tok.text_range();
edit.delete(ele);
ele.len()
} else {

View file

@ -5,6 +5,7 @@ version = "0.1.0"
authors = ["rust-analyzer developers"]
[dependencies]
format-buf = "1.0.0"
itertools = "0.8.0"
join_to_string = "0.1.3"
log = "0.4.5"

View file

@ -1,5 +1,4 @@
use std::fmt::Write;
use format_buf::format;
use ra_syntax::ast::{self, AstNode, NameOwner, TypeAscriptionOwner, VisibilityOwner};
pub(crate) trait ShortLabel {
@ -73,7 +72,7 @@ where
let mut buf = short_label_from_node(node, prefix)?;
if let Some(type_ref) = node.ascribed_type() {
write!(buf, ": {}", type_ref.syntax()).unwrap();
format!(buf, ": {}", type_ref.syntax());
}
Some(buf)

View file

@ -2,7 +2,7 @@ use ra_db::SourceDatabase;
use ra_syntax::{
algo::{find_covering_element, find_token_at_offset, TokenAtOffset},
ast::{self, AstNode, AstToken},
Direction, SyntaxElement,
Direction, NodeOrToken,
SyntaxKind::*,
SyntaxNode, SyntaxToken, TextRange, TextUnit, T,
};
@ -53,7 +53,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
return Some(leaf_range);
};
let node = match find_covering_element(root, range) {
SyntaxElement::Token(token) => {
NodeOrToken::Token(token) => {
if token.text_range() != range {
return Some(token.text_range());
}
@ -64,7 +64,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
}
token.parent()
}
SyntaxElement::Node(node) => node,
NodeOrToken::Node(node) => node,
};
if node.text_range() != range {
return Some(node.text_range());
@ -153,8 +153,8 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
node.siblings_with_tokens(dir)
.skip(1)
.skip_while(|node| match node {
SyntaxElement::Node(_) => false,
SyntaxElement::Token(it) => is_single_line_ws(it),
NodeOrToken::Node(_) => false,
NodeOrToken::Token(it) => is_single_line_ws(it),
})
.next()
.and_then(|it| it.into_token())

View file

@ -2,7 +2,7 @@ use rustc_hash::FxHashSet;
use ra_syntax::{
ast::{self, AstNode, AstToken, VisibilityOwner},
Direction, SourceFile, SyntaxElement,
Direction, NodeOrToken, SourceFile,
SyntaxKind::{self, *},
SyntaxNode, TextRange,
};
@ -31,8 +31,8 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
// Fold items that span multiple lines
if let Some(kind) = fold_kind(element.kind()) {
let is_multiline = match &element {
SyntaxElement::Node(node) => node.text().contains_char('\n'),
SyntaxElement::Token(token) => token.text().contains('\n'),
NodeOrToken::Node(node) => node.text().contains_char('\n'),
NodeOrToken::Token(token) => token.text().contains('\n'),
};
if is_multiline {
res.push(Fold { range: element.text_range(), kind });
@ -41,7 +41,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
}
match element {
SyntaxElement::Token(token) => {
NodeOrToken::Token(token) => {
// Fold groups of comments
if let Some(comment) = ast::Comment::cast(token) {
if !visited_comments.contains(&comment) {
@ -53,7 +53,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
}
}
}
SyntaxElement::Node(node) => {
NodeOrToken::Node(node) => {
// Fold groups of imports
if node.kind() == USE_ITEM && !visited_imports.contains(&node) {
if let Some(range) = contiguous_range_for_group(&node, &mut visited_imports) {
@ -108,7 +108,7 @@ fn contiguous_range_for_group_unless(
let mut last = first.clone();
for element in first.siblings_with_tokens(Direction::Next) {
let node = match element {
SyntaxElement::Token(token) => {
NodeOrToken::Token(token) => {
if let Some(ws) = ast::Whitespace::cast(token) {
if !ws.spans_multiple_lines() {
// Ignore whitespace without blank lines
@ -119,7 +119,7 @@ fn contiguous_range_for_group_unless(
// group ends here
break;
}
SyntaxElement::Node(node) => node,
NodeOrToken::Node(node) => node,
};
// Stop if we find a node that doesn't belong to the group
@ -154,7 +154,7 @@ fn contiguous_range_for_comment(
let mut last = first.clone();
for element in first.syntax().siblings_with_tokens(Direction::Next) {
match element {
SyntaxElement::Token(token) => {
NodeOrToken::Token(token) => {
if let Some(ws) = ast::Whitespace::cast(token.clone()) {
if !ws.spans_multiple_lines() {
// Ignore whitespace without blank lines
@ -173,7 +173,7 @@ fn contiguous_range_for_comment(
// * A comment of a different flavor was reached
break;
}
SyntaxElement::Node(_) => break,
NodeOrToken::Node(_) => break,
};
}

View file

@ -3,7 +3,7 @@ use ra_fmt::{compute_ws, extract_trivial_expression};
use ra_syntax::{
algo::{find_covering_element, non_trivia_sibling},
ast::{self, AstNode, AstToken},
Direction, SourceFile, SyntaxElement,
Direction, NodeOrToken, SourceFile,
SyntaxKind::{self, WHITESPACE},
SyntaxNode, SyntaxToken, TextRange, TextUnit, T,
};
@ -23,8 +23,8 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
};
let node = match find_covering_element(file.syntax(), range) {
SyntaxElement::Node(node) => node,
SyntaxElement::Token(token) => token.parent(),
NodeOrToken::Node(node) => node,
NodeOrToken::Token(token) => token.parent(),
};
let mut edit = TextEditBuilder::default();
for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) {

View file

@ -1,7 +1,7 @@
use crate::db::RootDatabase;
use ra_db::SourceDatabase;
use ra_syntax::{
algo, AstNode, SourceFile, SyntaxElement,
algo, AstNode, NodeOrToken, SourceFile,
SyntaxKind::{RAW_STRING, STRING},
SyntaxToken, TextRange,
};
@ -16,8 +16,8 @@ pub(crate) fn syntax_tree(
let parse = db.parse(file_id);
if let Some(text_range) = text_range {
let node = match algo::find_covering_element(parse.tree().syntax(), text_range) {
SyntaxElement::Node(node) => node,
SyntaxElement::Token(token) => {
NodeOrToken::Node(node) => node,
NodeOrToken::Token(token) => {
if let Some(tree) = syntax_tree_for_string(&token, text_range) {
return tree;
}

View file

@ -1,12 +1,13 @@
use crate::subtree_source::SubtreeTokenSource;
use crate::ExpandError;
use ra_parser::{ParseError, TreeSink};
use ra_syntax::{
ast, AstNode, AstToken, Parse, SmolStr, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxNode,
ast, AstNode, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode,
SyntaxTreeBuilder, TextRange, TextUnit, T,
};
use tt::buffer::{Cursor, TokenBuffer};
use crate::subtree_source::SubtreeTokenSource;
use crate::ExpandError;
/// Maps `tt::TokenId` to the relative range of the original token.
#[derive(Default)]
pub struct TokenMap {
@ -200,7 +201,7 @@ fn convert_tt(
}
match child {
SyntaxElement::Token(token) => {
NodeOrToken::Token(token) => {
if let Some(doc_tokens) = convert_doc_comment(&token) {
token_trees.extend(doc_tokens);
} else if token.kind().is_trivia() {
@ -210,7 +211,7 @@ fn convert_tt(
let char = token.text().chars().next().unwrap();
let spacing = match child_iter.peek() {
Some(SyntaxElement::Token(token)) => {
Some(NodeOrToken::Token(token)) => {
if token.kind().is_punct() {
tt::Spacing::Joint
} else {
@ -241,7 +242,7 @@ fn convert_tt(
token_trees.push(child);
}
}
SyntaxElement::Node(node) => {
NodeOrToken::Node(node) => {
let child = convert_tt(token_map, global_offset, &node)?.into();
token_trees.push(child);
}

View file

@ -1,4 +1,4 @@
use ra_syntax::{ast, AstNode};
use ra_syntax::{ast, AstNode, NodeOrToken};
use super::*;
@ -118,11 +118,11 @@ pub fn debug_dump_ignore_spaces(node: &ra_syntax::SyntaxNode) -> String {
match event {
WalkEvent::Enter(element) => {
match element {
ra_syntax::SyntaxElement::Node(node) => {
NodeOrToken::Node(node) => {
indent!();
writeln!(buf, "{:?}", node.kind()).unwrap();
}
ra_syntax::SyntaxElement::Token(token) => match token.kind() {
NodeOrToken::Token(token) => match token.kind() {
ra_syntax::SyntaxKind::WHITESPACE => {}
_ => {
indent!();

View file

@ -10,7 +10,7 @@ repository = "https://github.com/rust-analyzer/rust-analyzer"
[dependencies]
unicode-xid = "0.1.0"
itertools = "0.8.0"
rowan = "0.5.6"
rowan = "0.6.0-pre.1"
# ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here
# to reduce number of compilations

View file

@ -1,17 +1,18 @@
pub mod visit;
use std::ops::RangeInclusive;
use itertools::Itertools;
use crate::{AstNode, Direction, SyntaxElement, SyntaxNode, SyntaxToken, TextRange, TextUnit};
use crate::{
AstNode, Direction, InsertPosition, NodeOrToken, SourceFile, SyntaxElement, SyntaxNode,
SyntaxNodePtr, SyntaxToken, TextRange, TextUnit,
};
pub use rowan::TokenAtOffset;
pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffset<SyntaxToken> {
match node.0.token_at_offset(offset) {
TokenAtOffset::None => TokenAtOffset::None,
TokenAtOffset::Single(n) => TokenAtOffset::Single(SyntaxToken(n)),
TokenAtOffset::Between(l, r) => TokenAtOffset::Between(SyntaxToken(l), SyntaxToken(r)),
}
node.token_at_offset(offset)
}
/// Returns ancestors of the node at the offset, sorted by length. This should
@ -44,20 +45,110 @@ pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) ->
/// Finds the first sibling in the given direction which is not `trivia`
pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Option<SyntaxElement> {
return match element {
SyntaxElement::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia),
SyntaxElement::Token(token) => {
token.siblings_with_tokens(direction).skip(1).find(not_trivia)
}
NodeOrToken::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia),
NodeOrToken::Token(token) => token.siblings_with_tokens(direction).skip(1).find(not_trivia),
};
fn not_trivia(element: &SyntaxElement) -> bool {
match element {
SyntaxElement::Node(_) => true,
SyntaxElement::Token(token) => !token.kind().is_trivia(),
NodeOrToken::Node(_) => true,
NodeOrToken::Token(token) => !token.kind().is_trivia(),
}
}
}
pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement {
SyntaxElement::new(root.0.covering_node(range))
root.covering_element(range)
}
/// Adds specified children (tokens or nodes) to the current node at the
/// specific position.
///
/// This is a type-unsafe low-level editing API, if you need to use it,
/// prefer to create a type-safe abstraction on top of it instead.
pub fn insert_children(
parent: &SyntaxNode,
position: InsertPosition<SyntaxElement>,
to_insert: impl Iterator<Item = SyntaxElement>,
) -> SyntaxNode {
let mut delta = TextUnit::default();
let to_insert = to_insert.map(|element| {
delta += element.text_range().len();
to_green_element(element)
});
let old_children = parent.green().children();
let new_children = match &position {
InsertPosition::First => {
to_insert.chain(old_children.iter().cloned()).collect::<Box<[_]>>()
}
InsertPosition::Last => old_children.iter().cloned().chain(to_insert).collect::<Box<[_]>>(),
InsertPosition::Before(anchor) | InsertPosition::After(anchor) => {
let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 };
let split_at = position_of_child(parent, anchor.clone()) + take_anchor;
let (before, after) = old_children.split_at(split_at);
before
.iter()
.cloned()
.chain(to_insert)
.chain(after.iter().cloned())
.collect::<Box<[_]>>()
}
};
with_children(parent, new_children)
}
/// Replaces all nodes in `to_delete` with nodes from `to_insert`
///
/// This is a type-unsafe low-level editing API, if you need to use it,
/// prefer to create a type-safe abstraction on top of it instead.
pub fn replace_children(
parent: &SyntaxNode,
to_delete: RangeInclusive<SyntaxElement>,
to_insert: impl Iterator<Item = SyntaxElement>,
) -> SyntaxNode {
let start = position_of_child(parent, to_delete.start().clone());
let end = position_of_child(parent, to_delete.end().clone());
let old_children = parent.green().children();
let new_children = old_children[..start]
.iter()
.cloned()
.chain(to_insert.map(to_green_element))
.chain(old_children[end + 1..].iter().cloned())
.collect::<Box<[_]>>();
with_children(parent, new_children)
}
fn with_children(
parent: &SyntaxNode,
new_children: Box<[NodeOrToken<rowan::GreenNode, rowan::GreenToken>]>,
) -> SyntaxNode {
let len = new_children.iter().map(|it| it.text_len()).sum::<TextUnit>();
let new_node =
rowan::GreenNode::new(rowan::cursor::SyntaxKind(parent.kind() as u16), new_children);
let new_file_node = parent.replace_with(new_node);
let file = SourceFile::new(new_file_node);
// FIXME: use a more elegant way to re-fetch the node (#1185), make
// `range` private afterwards
let mut ptr = SyntaxNodePtr::new(parent);
ptr.range = TextRange::offset_len(ptr.range().start(), len);
ptr.to_node(file.syntax()).to_owned()
}
fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize {
parent
.children_with_tokens()
.position(|it| it == child)
.expect("element is not a child of current element")
}
fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
match element {
NodeOrToken::Node(it) => it.green().clone().into(),
NodeOrToken::Token(it) => it.green().clone().into(),
}
}

View file

@ -2,7 +2,7 @@
use crate::{
ast::{self, child_opt, children, AstChildren, AstNode},
SmolStr, SyntaxElement,
SmolStr,
SyntaxKind::*,
SyntaxToken, T,
};
@ -229,14 +229,11 @@ pub enum LiteralKind {
impl ast::Literal {
pub fn token(&self) -> SyntaxToken {
let elem = self
.syntax()
self.syntax()
.children_with_tokens()
.find(|e| e.kind() != ATTR && !e.kind().is_trivia());
match elem {
Some(SyntaxElement::Token(token)) => token,
_ => unreachable!(),
}
.find(|e| e.kind() != ATTR && !e.kind().is_trivia())
.and_then(|e| e.into_token())
.unwrap()
}
pub fn kind(&self) -> LiteralKind {

View file

@ -24,10 +24,7 @@ impl ast::NameRef {
}
fn text_of_first_token(node: &SyntaxNode) -> &SmolStr {
match node.0.green().children().first() {
Some(rowan::GreenElement::Token(it)) => it.text(),
_ => panic!(),
}
node.green().children().first().and_then(|it| it.as_token()).unwrap().text()
}
impl ast::Attr {

View file

@ -20,7 +20,6 @@
//! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md>
mod syntax_node;
mod syntax_text;
mod syntax_error;
mod parsing;
mod validation;
@ -43,14 +42,13 @@ pub use crate::{
ptr::{AstPtr, SyntaxNodePtr},
syntax_error::{Location, SyntaxError, SyntaxErrorKind},
syntax_node::{
Direction, InsertPosition, SyntaxElement, SyntaxNode, SyntaxToken, SyntaxTreeBuilder,
WalkEvent,
Direction, InsertPosition, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken,
SyntaxTreeBuilder, WalkEvent,
},
syntax_text::SyntaxText,
};
pub use ra_parser::SyntaxKind;
pub use ra_parser::T;
pub use rowan::{SmolStr, TextRange, TextUnit};
pub use rowan::{SmolStr, SyntaxText, TextRange, TextUnit};
/// `Parse` is the result of the parsing: a syntax tree and a collection of
/// errors.
@ -76,7 +74,7 @@ impl<T> Parse<T> {
}
pub fn syntax_node(&self) -> SyntaxNode {
SyntaxNode::new(self.green.clone())
SyntaxNode::new_root(self.green.clone())
}
}
@ -147,7 +145,7 @@ pub use crate::ast::SourceFile;
impl SourceFile {
fn new(green: GreenNode) -> SourceFile {
let root = SyntaxNode::new(green);
let root = SyntaxNode::new_root(green);
if cfg!(debug_assertions) {
validation::validate_block_structure(&root);
}
@ -267,8 +265,8 @@ fn api_walkthrough() {
match event {
WalkEvent::Enter(node) => {
let text = match &node {
SyntaxElement::Node(it) => it.text().to_string(),
SyntaxElement::Token(it) => it.text().to_string(),
NodeOrToken::Node(it) => it.text().to_string(),
NodeOrToken::Token(it) => it.text().to_string(),
};
buf += &format!("{:indent$}{:?} {:?}\n", " ", text, node.kind(), indent = indent);
indent += 2;

View file

@ -16,7 +16,7 @@ use crate::{
text_token_source::TextTokenSource,
text_tree_sink::TextTreeSink,
},
syntax_node::{GreenNode, GreenToken, SyntaxElement, SyntaxNode},
syntax_node::{GreenNode, GreenToken, NodeOrToken, SyntaxElement, SyntaxNode},
SyntaxError,
SyntaxKind::*,
TextRange, TextUnit, T,
@ -70,7 +70,8 @@ fn reparse_token<'node>(
}
}
let new_token = GreenToken::new(rowan::SyntaxKind(token.kind().into()), text.into());
let new_token =
GreenToken::new(rowan::cursor::SyntaxKind(token.kind().into()), text.into());
Some((token.replace_with(new_token), token.text_range()))
}
_ => None,
@ -98,8 +99,8 @@ fn get_text_after_edit(element: SyntaxElement, edit: &AtomTextEdit) -> String {
let edit =
AtomTextEdit::replace(edit.delete - element.text_range().start(), edit.insert.clone());
let text = match element {
SyntaxElement::Token(token) => token.text().to_string(),
SyntaxElement::Node(node) => node.text().to_string(),
NodeOrToken::Token(token) => token.text().to_string(),
NodeOrToken::Node(node) => node.text().to_string(),
};
edit.apply(text)
}
@ -114,8 +115,8 @@ fn is_contextual_kw(text: &str) -> bool {
fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> {
let node = algo::find_covering_element(node, range);
let mut ancestors = match node {
SyntaxElement::Token(it) => it.parent().ancestors(),
SyntaxElement::Node(it) => it.ancestors(),
NodeOrToken::Token(it) => it.parent().ancestors(),
NodeOrToken::Node(it) => it.ancestors(),
};
ancestors.find_map(|node| {
let first_child = node.first_child_or_token().map(|it| it.kind());

View file

@ -6,15 +6,12 @@
//! The *real* implementation is in the (language-agnostic) `rowan` crate, this
//! modules just wraps its API.
use std::{fmt, iter::successors, ops::RangeInclusive};
use ra_parser::ParseError;
use rowan::GreenNodeBuilder;
use rowan::{GreenNodeBuilder, Language};
use crate::{
syntax_error::{SyntaxError, SyntaxErrorKind},
AstNode, Parse, SmolStr, SourceFile, SyntaxKind, SyntaxNodePtr, SyntaxText, TextRange,
TextUnit,
Parse, SmolStr, SyntaxKind, TextUnit,
};
pub use rowan::WalkEvent;
@ -28,465 +25,27 @@ pub enum InsertPosition<T> {
After(T),
}
#[derive(PartialEq, Eq, Hash, Clone)]
pub struct SyntaxNode(pub(crate) rowan::cursor::SyntaxNode);
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum RustLanguage {}
impl Language for RustLanguage {
type Kind = SyntaxKind;
impl fmt::Debug for SyntaxNode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if f.alternate() {
let mut level = 0;
for event in self.preorder_with_tokens() {
match event {
WalkEvent::Enter(element) => {
for _ in 0..level {
write!(f, " ")?;
}
match element {
SyntaxElement::Node(node) => writeln!(f, "{:?}", node)?,
SyntaxElement::Token(token) => writeln!(f, "{:?}", token)?,
}
level += 1;
}
WalkEvent::Leave(_) => level -= 1,
}
}
assert_eq!(level, 0);
Ok(())
} else {
write!(f, "{:?}@{:?}", self.kind(), self.text_range())
}
fn kind_from_raw(raw: rowan::cursor::SyntaxKind) -> SyntaxKind {
SyntaxKind::from(raw.0)
}
fn kind_to_raw(kind: SyntaxKind) -> rowan::cursor::SyntaxKind {
rowan::cursor::SyntaxKind(kind.into())
}
}
impl fmt::Display for SyntaxNode {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.text(), fmt)
}
}
pub type SyntaxNode = rowan::SyntaxNode<RustLanguage>;
pub type SyntaxToken = rowan::SyntaxToken<RustLanguage>;
pub type SyntaxElement = rowan::NodeOrToken<SyntaxNode, SyntaxToken>;
pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren<RustLanguage>;
pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Direction {
Next,
Prev,
}
impl SyntaxNode {
pub(crate) fn new(green: GreenNode) -> SyntaxNode {
let inner = rowan::cursor::SyntaxNode::new_root(green);
SyntaxNode(inner)
}
pub fn kind(&self) -> SyntaxKind {
self.0.kind().0.into()
}
pub fn text_range(&self) -> TextRange {
self.0.text_range()
}
pub fn text(&self) -> SyntaxText {
SyntaxText::new(self.clone())
}
pub fn parent(&self) -> Option<SyntaxNode> {
self.0.parent().map(SyntaxNode)
}
pub fn first_child(&self) -> Option<SyntaxNode> {
self.0.first_child().map(SyntaxNode)
}
pub fn first_child_or_token(&self) -> Option<SyntaxElement> {
self.0.first_child_or_token().map(SyntaxElement::new)
}
pub fn last_child(&self) -> Option<SyntaxNode> {
self.0.last_child().map(SyntaxNode)
}
pub fn last_child_or_token(&self) -> Option<SyntaxElement> {
self.0.last_child_or_token().map(SyntaxElement::new)
}
pub fn next_sibling(&self) -> Option<SyntaxNode> {
self.0.next_sibling().map(SyntaxNode)
}
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
self.0.next_sibling_or_token().map(SyntaxElement::new)
}
pub fn prev_sibling(&self) -> Option<SyntaxNode> {
self.0.prev_sibling().map(SyntaxNode)
}
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
self.0.prev_sibling_or_token().map(SyntaxElement::new)
}
pub fn children(&self) -> SyntaxNodeChildren {
SyntaxNodeChildren(self.0.children())
}
pub fn children_with_tokens(&self) -> SyntaxElementChildren {
SyntaxElementChildren(self.0.children_with_tokens())
}
pub fn first_token(&self) -> Option<SyntaxToken> {
self.0.first_token().map(SyntaxToken)
}
pub fn last_token(&self) -> Option<SyntaxToken> {
self.0.last_token().map(SyntaxToken)
}
pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode> {
successors(Some(self.clone()), |node| node.parent())
}
pub fn descendants(&self) -> impl Iterator<Item = SyntaxNode> {
self.preorder().filter_map(|event| match event {
WalkEvent::Enter(node) => Some(node),
WalkEvent::Leave(_) => None,
})
}
pub fn descendants_with_tokens(&self) -> impl Iterator<Item = SyntaxElement> {
self.preorder_with_tokens().filter_map(|event| match event {
WalkEvent::Enter(it) => Some(it),
WalkEvent::Leave(_) => None,
})
}
pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = SyntaxNode> {
successors(Some(self.clone()), move |node| match direction {
Direction::Next => node.next_sibling(),
Direction::Prev => node.prev_sibling(),
})
}
pub fn siblings_with_tokens(
&self,
direction: Direction,
) -> impl Iterator<Item = SyntaxElement> {
let me: SyntaxElement = self.clone().into();
successors(Some(me), move |el| match direction {
Direction::Next => el.next_sibling_or_token(),
Direction::Prev => el.prev_sibling_or_token(),
})
}
pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<SyntaxNode>> {
self.0.preorder().map(|event| match event {
WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode(n)),
WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxNode(n)),
})
}
pub fn preorder_with_tokens(&self) -> impl Iterator<Item = WalkEvent<SyntaxElement>> {
self.0.preorder_with_tokens().map(|event| match event {
WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxElement::new(n)),
WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxElement::new(n)),
})
}
pub(crate) fn replace_with(&self, replacement: GreenNode) -> GreenNode {
self.0.replace_with(replacement)
}
/// Adds specified children (tokens or nodes) to the current node at the
/// specific position.
///
/// This is a type-unsafe low-level editing API, if you need to use it,
/// prefer to create a type-safe abstraction on top of it instead.
pub fn insert_children(
&self,
position: InsertPosition<SyntaxElement>,
to_insert: impl Iterator<Item = SyntaxElement>,
) -> SyntaxNode {
let mut delta = TextUnit::default();
let to_insert = to_insert.map(|element| {
delta += element.text_len();
to_green_element(element)
});
let old_children = self.0.green().children();
let new_children = match &position {
InsertPosition::First => {
to_insert.chain(old_children.iter().cloned()).collect::<Box<[_]>>()
}
InsertPosition::Last => {
old_children.iter().cloned().chain(to_insert).collect::<Box<[_]>>()
}
InsertPosition::Before(anchor) | InsertPosition::After(anchor) => {
let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 };
let split_at = self.position_of_child(anchor.clone()) + take_anchor;
let (before, after) = old_children.split_at(split_at);
before
.iter()
.cloned()
.chain(to_insert)
.chain(after.iter().cloned())
.collect::<Box<[_]>>()
}
};
self.with_children(new_children)
}
/// Replaces all nodes in `to_delete` with nodes from `to_insert`
///
/// This is a type-unsafe low-level editing API, if you need to use it,
/// prefer to create a type-safe abstraction on top of it instead.
pub fn replace_children(
&self,
to_delete: RangeInclusive<SyntaxElement>,
to_insert: impl Iterator<Item = SyntaxElement>,
) -> SyntaxNode {
let start = self.position_of_child(to_delete.start().clone());
let end = self.position_of_child(to_delete.end().clone());
let old_children = self.0.green().children();
let new_children = old_children[..start]
.iter()
.cloned()
.chain(to_insert.map(to_green_element))
.chain(old_children[end + 1..].iter().cloned())
.collect::<Box<[_]>>();
self.with_children(new_children)
}
fn with_children(&self, new_children: Box<[rowan::GreenElement]>) -> SyntaxNode {
let len = new_children.iter().map(|it| it.text_len()).sum::<TextUnit>();
let new_node = GreenNode::new(rowan::SyntaxKind(self.kind() as u16), new_children);
let new_file_node = self.replace_with(new_node);
let file = SourceFile::new(new_file_node);
// FIXME: use a more elegant way to re-fetch the node (#1185), make
// `range` private afterwards
let mut ptr = SyntaxNodePtr::new(self);
ptr.range = TextRange::offset_len(ptr.range().start(), len);
ptr.to_node(file.syntax()).to_owned()
}
fn position_of_child(&self, child: SyntaxElement) -> usize {
self.children_with_tokens()
.position(|it| it == child)
.expect("element is not a child of current element")
}
}
fn to_green_element(element: SyntaxElement) -> rowan::GreenElement {
match element {
SyntaxElement::Node(node) => node.0.green().clone().into(),
SyntaxElement::Token(tok) => {
GreenToken::new(rowan::SyntaxKind(tok.kind() as u16), tok.text().clone()).into()
}
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct SyntaxToken(pub(crate) rowan::cursor::SyntaxToken);
impl fmt::Debug for SyntaxToken {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{:?}@{:?}", self.kind(), self.text_range())?;
if self.text().len() < 25 {
return write!(fmt, " {:?}", self.text());
}
let text = self.text().as_str();
for idx in 21..25 {
if text.is_char_boundary(idx) {
let text = format!("{} ...", &text[..idx]);
return write!(fmt, " {:?}", text);
}
}
unreachable!()
}
}
impl fmt::Display for SyntaxToken {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(self.text(), fmt)
}
}
impl SyntaxToken {
pub fn kind(&self) -> SyntaxKind {
self.0.kind().0.into()
}
pub fn text(&self) -> &SmolStr {
self.0.text()
}
pub fn text_range(&self) -> TextRange {
self.0.text_range()
}
pub fn parent(&self) -> SyntaxNode {
SyntaxNode(self.0.parent())
}
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
self.0.next_sibling_or_token().map(SyntaxElement::new)
}
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
self.0.prev_sibling_or_token().map(SyntaxElement::new)
}
pub fn siblings_with_tokens(
&self,
direction: Direction,
) -> impl Iterator<Item = SyntaxElement> {
let me: SyntaxElement = self.clone().into();
successors(Some(me), move |el| match direction {
Direction::Next => el.next_sibling_or_token(),
Direction::Prev => el.prev_sibling_or_token(),
})
}
pub fn next_token(&self) -> Option<SyntaxToken> {
self.0.next_token().map(SyntaxToken)
}
pub fn prev_token(&self) -> Option<SyntaxToken> {
self.0.prev_token().map(SyntaxToken)
}
pub(crate) fn replace_with(&self, new_token: GreenToken) -> GreenNode {
self.0.replace_with(new_token)
}
}
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub enum SyntaxElement {
Node(SyntaxNode),
Token(SyntaxToken),
}
impl From<SyntaxNode> for SyntaxElement {
fn from(node: SyntaxNode) -> Self {
SyntaxElement::Node(node)
}
}
impl From<SyntaxToken> for SyntaxElement {
fn from(token: SyntaxToken) -> Self {
SyntaxElement::Token(token)
}
}
impl fmt::Display for SyntaxElement {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self {
SyntaxElement::Node(it) => fmt::Display::fmt(it, fmt),
SyntaxElement::Token(it) => fmt::Display::fmt(it, fmt),
}
}
}
impl SyntaxElement {
pub(crate) fn new(el: rowan::cursor::SyntaxElement) -> Self {
match el {
rowan::cursor::SyntaxElement::Node(it) => SyntaxElement::Node(SyntaxNode(it)),
rowan::cursor::SyntaxElement::Token(it) => SyntaxElement::Token(SyntaxToken(it)),
}
}
pub fn kind(&self) -> SyntaxKind {
match self {
SyntaxElement::Node(it) => it.kind(),
SyntaxElement::Token(it) => it.kind(),
}
}
pub fn as_node(&self) -> Option<&SyntaxNode> {
match self {
SyntaxElement::Node(node) => Some(node),
SyntaxElement::Token(_) => None,
}
}
pub fn into_node(self) -> Option<SyntaxNode> {
match self {
SyntaxElement::Node(node) => Some(node),
SyntaxElement::Token(_) => None,
}
}
pub fn as_token(&self) -> Option<&SyntaxToken> {
match self {
SyntaxElement::Node(_) => None,
SyntaxElement::Token(token) => Some(token),
}
}
pub fn into_token(self) -> Option<SyntaxToken> {
match self {
SyntaxElement::Node(_) => None,
SyntaxElement::Token(token) => Some(token),
}
}
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
match self {
SyntaxElement::Node(it) => it.next_sibling_or_token(),
SyntaxElement::Token(it) => it.next_sibling_or_token(),
}
}
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
match self {
SyntaxElement::Node(it) => it.prev_sibling_or_token(),
SyntaxElement::Token(it) => it.prev_sibling_or_token(),
}
}
pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode> {
match self {
SyntaxElement::Node(it) => it.clone(),
SyntaxElement::Token(it) => it.parent(),
}
.ancestors()
}
pub fn text_range(&self) -> TextRange {
match self {
SyntaxElement::Node(it) => it.text_range(),
SyntaxElement::Token(it) => it.text_range(),
}
}
fn text_len(&self) -> TextUnit {
match self {
SyntaxElement::Node(node) => node.0.green().text_len(),
SyntaxElement::Token(token) => TextUnit::of_str(token.0.text()),
}
}
}
#[derive(Clone, Debug)]
pub struct SyntaxNodeChildren(rowan::cursor::SyntaxNodeChildren);
impl Iterator for SyntaxNodeChildren {
type Item = SyntaxNode;
fn next(&mut self) -> Option<SyntaxNode> {
self.0.next().map(SyntaxNode)
}
}
#[derive(Clone, Debug)]
pub struct SyntaxElementChildren(rowan::cursor::SyntaxElementChildren);
impl Iterator for SyntaxElementChildren {
type Item = SyntaxElement;
fn next(&mut self) -> Option<SyntaxElement> {
self.0.next().map(SyntaxElement::new)
}
}
pub use rowan::{Direction, NodeOrToken};
pub struct SyntaxTreeBuilder {
errors: Vec<SyntaxError>,
@ -507,19 +66,21 @@ impl SyntaxTreeBuilder {
pub fn finish(self) -> Parse<SyntaxNode> {
let (green, errors) = self.finish_raw();
let node = SyntaxNode::new(green);
let node = SyntaxNode::new_root(green);
if cfg!(debug_assertions) {
crate::validation::validate_block_structure(&node);
}
Parse::new(node.0.green().clone(), errors)
Parse::new(node.green().clone(), errors)
}
pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) {
self.inner.token(rowan::SyntaxKind(kind.into()), text)
let kind = RustLanguage::kind_to_raw(kind);
self.inner.token(kind, text)
}
pub fn start_node(&mut self, kind: SyntaxKind) {
self.inner.start_node(rowan::SyntaxKind(kind.into()))
let kind = RustLanguage::kind_to_raw(kind);
self.inner.start_node(kind)
}
pub fn finish_node(&mut self) {

View file

@ -1,178 +0,0 @@
use std::{
fmt,
ops::{self, Bound},
};
use crate::{SmolStr, SyntaxElement, SyntaxNode, TextRange, TextUnit};
#[derive(Clone)]
pub struct SyntaxText {
node: SyntaxNode,
range: TextRange,
}
impl SyntaxText {
pub(crate) fn new(node: SyntaxNode) -> SyntaxText {
let range = node.text_range();
SyntaxText { node, range }
}
pub fn try_fold_chunks<T, F, E>(&self, init: T, mut f: F) -> Result<T, E>
where
F: FnMut(T, &str) -> Result<T, E>,
{
self.node.descendants_with_tokens().try_fold(init, move |acc, element| {
let res = match element {
SyntaxElement::Token(token) => {
let range = match self.range.intersection(&token.text_range()) {
None => return Ok(acc),
Some(it) => it,
};
let slice = if range == token.text_range() {
token.text()
} else {
let range = range - token.text_range().start();
&token.text()[range]
};
f(acc, slice)?
}
SyntaxElement::Node(_) => acc,
};
Ok(res)
})
}
pub fn try_for_each_chunk<F: FnMut(&str) -> Result<(), E>, E>(
&self,
mut f: F,
) -> Result<(), E> {
self.try_fold_chunks((), move |(), chunk| f(chunk))
}
pub fn for_each_chunk<F: FnMut(&str)>(&self, mut f: F) {
enum Void {}
match self.try_for_each_chunk(|chunk| Ok::<(), Void>(f(chunk))) {
Ok(()) => (),
Err(void) => match void {},
}
}
pub fn to_smol_string(&self) -> SmolStr {
self.to_string().into()
}
pub fn contains_char(&self, c: char) -> bool {
self.try_for_each_chunk(|chunk| if chunk.contains(c) { Err(()) } else { Ok(()) }).is_err()
}
pub fn find_char(&self, c: char) -> Option<TextUnit> {
let mut acc: TextUnit = 0.into();
let res = self.try_for_each_chunk(|chunk| {
if let Some(pos) = chunk.find(c) {
let pos: TextUnit = (pos as u32).into();
return Err(acc + pos);
}
acc += TextUnit::of_str(chunk);
Ok(())
});
found(res)
}
pub fn len(&self) -> TextUnit {
self.range.len()
}
pub fn is_empty(&self) -> bool {
self.range.is_empty()
}
pub fn slice(&self, range: impl ops::RangeBounds<TextUnit>) -> SyntaxText {
let start = match range.start_bound() {
Bound::Included(&b) => b,
Bound::Excluded(_) => panic!("utf-aware slicing can't work this way"),
Bound::Unbounded => 0.into(),
};
let end = match range.end_bound() {
Bound::Included(_) => panic!("utf-aware slicing can't work this way"),
Bound::Excluded(&b) => b,
Bound::Unbounded => self.len(),
};
assert!(start <= end);
let len = end - start;
let start = self.range.start() + start;
let end = start + len;
assert!(
start <= end,
"invalid slice, range: {:?}, slice: {:?}",
self.range,
(range.start_bound(), range.end_bound()),
);
let range = TextRange::from_to(start, end);
assert!(
range.is_subrange(&self.range),
"invalid slice, range: {:?}, slice: {:?}",
self.range,
range,
);
SyntaxText { node: self.node.clone(), range }
}
pub fn char_at(&self, offset: impl Into<TextUnit>) -> Option<char> {
let offset = offset.into();
let mut start: TextUnit = 0.into();
let res = self.try_for_each_chunk(|chunk| {
let end = start + TextUnit::of_str(chunk);
if start <= offset && offset < end {
let off: usize = u32::from(offset - start) as usize;
return Err(chunk[off..].chars().next().unwrap());
}
start = end;
Ok(())
});
found(res)
}
}
fn found<T>(res: Result<(), T>) -> Option<T> {
match res {
Ok(()) => None,
Err(it) => Some(it),
}
}
impl fmt::Debug for SyntaxText {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.to_string(), f)
}
}
impl fmt::Display for SyntaxText {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.try_for_each_chunk(|chunk| fmt::Display::fmt(chunk, f))
}
}
impl From<SyntaxText> for String {
fn from(text: SyntaxText) -> String {
text.to_string()
}
}
impl PartialEq<str> for SyntaxText {
fn eq(&self, mut rhs: &str) -> bool {
self.try_for_each_chunk(|chunk| {
if !rhs.starts_with(chunk) {
return Err(());
}
rhs = &rhs[chunk.len()..];
Ok(())
})
.is_ok()
}
}
impl PartialEq<&'_ str> for SyntaxText {
fn eq(&self, rhs: &&str) -> bool {
self == *rhs
}
}