mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-25 04:23:25 +00:00
switch to new rowan
This commit is contained in:
parent
dec9bde108
commit
9e213385c9
50 changed files with 1026 additions and 1227 deletions
16
Cargo.lock
generated
16
Cargo.lock
generated
|
@ -1077,8 +1077,8 @@ dependencies = [
|
||||||
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"ra_parser 0.1.0",
|
"ra_parser 0.1.0",
|
||||||
"ra_text_edit 0.1.0",
|
"ra_text_edit 0.1.0",
|
||||||
"rowan 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rowan 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"smol_str 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
"smol_str 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"test_utils 0.1.0",
|
"test_utils 0.1.0",
|
||||||
"text_unit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"text_unit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -1098,7 +1098,7 @@ dependencies = [
|
||||||
name = "ra_tt"
|
name = "ra_tt"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"smol_str 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
"smol_str 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1298,12 +1298,12 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rowan"
|
name = "rowan"
|
||||||
version = "0.3.3"
|
version = "0.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"smol_str 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
"smol_str 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"text_unit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"text_unit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1456,7 +1456,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "smol_str"
|
name = "smol_str"
|
||||||
version = "0.1.9"
|
version = "0.1.10"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde 1.0.89 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -1964,7 +1964,7 @@ dependencies = [
|
||||||
"checksum relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7790c7f1cc73d831d28dc5a7deb316a006e7848e6a7f467cdb10a0a9e0fb1c"
|
"checksum relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7790c7f1cc73d831d28dc5a7deb316a006e7848e6a7f467cdb10a0a9e0fb1c"
|
||||||
"checksum remove_dir_all 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3488ba1b9a2084d38645c4c08276a1752dcbf2c7130d74f1569681ad5d2799c5"
|
"checksum remove_dir_all 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3488ba1b9a2084d38645c4c08276a1752dcbf2c7130d74f1569681ad5d2799c5"
|
||||||
"checksum ron 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "17f52a24414403f81528b67488cf8edc4eda977d3af1646bb6b106a600ead78f"
|
"checksum ron 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "17f52a24414403f81528b67488cf8edc4eda977d3af1646bb6b106a600ead78f"
|
||||||
"checksum rowan 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "74d41f779e2c893339e34bebf035652c58214823cd412550111886c06632f89d"
|
"checksum rowan 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "397cd19c109641f10f3c66433440285e232d8cbd37406cf8f944a15ab1e63a8e"
|
||||||
"checksum rustc-demangle 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "adacaae16d02b6ec37fdc7acfcddf365978de76d1983d3ee22afc260e1ca9619"
|
"checksum rustc-demangle 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "adacaae16d02b6ec37fdc7acfcddf365978de76d1983d3ee22afc260e1ca9619"
|
||||||
"checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8"
|
"checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8"
|
||||||
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
|
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
|
||||||
|
@ -1983,7 +1983,7 @@ dependencies = [
|
||||||
"checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
|
"checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
|
||||||
"checksum slug 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b3bc762e6a4b6c6fcaade73e77f9ebc6991b676f88bb2358bddb56560f073373"
|
"checksum slug 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b3bc762e6a4b6c6fcaade73e77f9ebc6991b676f88bb2358bddb56560f073373"
|
||||||
"checksum smallvec 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c4488ae950c49d403731982257768f48fada354a5203fe81f9bb6f43ca9002be"
|
"checksum smallvec 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c4488ae950c49d403731982257768f48fada354a5203fe81f9bb6f43ca9002be"
|
||||||
"checksum smol_str 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9af1035bc5d742ab6b7ab16713e41cc2ffe78cb474f6f43cd696b2d16052007e"
|
"checksum smol_str 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "d077b3367211e9c6e2e012fb804c444e0d80ab5a51ae4137739b58e6446dcaef"
|
||||||
"checksum stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dba1a27d3efae4351c8051072d619e3ade2820635c3958d826bfea39d59b54c8"
|
"checksum stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dba1a27d3efae4351c8051072d619e3ade2820635c3958d826bfea39d59b54c8"
|
||||||
"checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
|
"checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
|
||||||
"checksum superslice 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ab16ced94dbd8a46c82fd81e3ed9a8727dac2977ea869d217bcc4ea1f122e81f"
|
"checksum superslice 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ab16ced94dbd8a46c82fd81e3ed9a8727dac2977ea869d217bcc4ea1f122e81f"
|
||||||
|
|
|
@ -33,8 +33,10 @@ pub(crate) fn add_derive(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist>
|
||||||
|
|
||||||
// Insert `derive` after doc comments.
|
// Insert `derive` after doc comments.
|
||||||
fn derive_insertion_offset(nominal: &ast::NominalDef) -> Option<TextUnit> {
|
fn derive_insertion_offset(nominal: &ast::NominalDef) -> Option<TextUnit> {
|
||||||
let non_ws_child =
|
let non_ws_child = nominal
|
||||||
nominal.syntax().children().find(|it| it.kind() != COMMENT && it.kind() != WHITESPACE)?;
|
.syntax()
|
||||||
|
.children_with_tokens()
|
||||||
|
.find(|it| it.kind() != COMMENT && it.kind() != WHITESPACE)?;
|
||||||
Some(non_ws_child.range().start())
|
Some(non_ws_child.range().start())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use join_to_string::join;
|
use join_to_string::join;
|
||||||
use hir::db::HirDatabase;
|
use hir::db::HirDatabase;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
ast::{self, AstNode, AstToken, NameOwner, TypeParamsOwner},
|
ast::{self, AstNode, NameOwner, TypeParamsOwner},
|
||||||
TextUnit,
|
TextUnit,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -22,8 +22,10 @@ pub(crate) fn add_impl(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||||
buf.push_str(" ");
|
buf.push_str(" ");
|
||||||
buf.push_str(name.text().as_str());
|
buf.push_str(name.text().as_str());
|
||||||
if let Some(type_params) = type_params {
|
if let Some(type_params) = type_params {
|
||||||
let lifetime_params =
|
let lifetime_params = type_params
|
||||||
type_params.lifetime_params().filter_map(|it| it.lifetime()).map(|it| it.text());
|
.lifetime_params()
|
||||||
|
.filter_map(|it| it.lifetime_token())
|
||||||
|
.map(|it| it.text());
|
||||||
let type_params =
|
let type_params =
|
||||||
type_params.type_params().filter_map(|it| it.name()).map(|it| it.text());
|
type_params.type_params().filter_map(|it| it.name()).map(|it| it.text());
|
||||||
join(lifetime_params.chain(type_params)).surround_with("<", ">").to_buf(&mut buf);
|
join(lifetime_params.chain(type_params)).surround_with("<", ">").to_buf(&mut buf);
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::fmt::Write;
|
||||||
|
|
||||||
use crate::{Assist, AssistId, AssistCtx};
|
use crate::{Assist, AssistId, AssistCtx};
|
||||||
|
|
||||||
use hir::Resolver;
|
use hir::Resolver;
|
||||||
|
@ -91,8 +93,9 @@ fn add_missing_impl_members_inner(
|
||||||
};
|
};
|
||||||
|
|
||||||
let changed_range = {
|
let changed_range = {
|
||||||
let children = impl_item_list.syntax().children();
|
let children = impl_item_list.syntax().children_with_tokens();
|
||||||
let last_whitespace = children.filter_map(ast::Whitespace::cast).last();
|
let last_whitespace =
|
||||||
|
children.filter_map(|it| ast::Whitespace::cast(it.as_token()?)).last();
|
||||||
|
|
||||||
last_whitespace.map(|w| w.syntax().range()).unwrap_or_else(|| {
|
last_whitespace.map(|w| w.syntax().range()).unwrap_or_else(|| {
|
||||||
let in_brackets = impl_item_list.syntax().range().end() - TextUnit::of_str("}");
|
let in_brackets = impl_item_list.syntax().range().end() - TextUnit::of_str("}");
|
||||||
|
@ -134,13 +137,13 @@ fn resolve_target_trait_def(
|
||||||
fn build_func_body(def: &ast::FnDef) -> String {
|
fn build_func_body(def: &ast::FnDef) -> String {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
|
|
||||||
for child in def.syntax().children() {
|
for child in def.syntax().children_with_tokens() {
|
||||||
match (child.prev_sibling().map(|c| c.kind()), child.kind()) {
|
match (child.prev_sibling_or_token().map(|c| c.kind()), child.kind()) {
|
||||||
(_, SyntaxKind::SEMI) => buf.push_str(" { unimplemented!() }"),
|
(_, SyntaxKind::SEMI) => buf.push_str(" { unimplemented!() }"),
|
||||||
(_, SyntaxKind::ATTR) | (_, SyntaxKind::COMMENT) => {}
|
(_, SyntaxKind::ATTR) | (_, SyntaxKind::COMMENT) => {}
|
||||||
(Some(SyntaxKind::ATTR), SyntaxKind::WHITESPACE)
|
(Some(SyntaxKind::ATTR), SyntaxKind::WHITESPACE)
|
||||||
| (Some(SyntaxKind::COMMENT), SyntaxKind::WHITESPACE) => {}
|
| (Some(SyntaxKind::COMMENT), SyntaxKind::WHITESPACE) => {}
|
||||||
_ => child.text().push_to(&mut buf),
|
_ => write!(buf, "{}", child).unwrap(),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,8 +2,8 @@ use hir::db::HirDatabase;
|
||||||
use ra_text_edit::TextEditBuilder;
|
use ra_text_edit::TextEditBuilder;
|
||||||
use ra_db::FileRange;
|
use ra_db::FileRange;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
SourceFile, TextRange, AstNode, TextUnit, SyntaxNode,
|
SourceFile, TextRange, AstNode, TextUnit, SyntaxNode, SyntaxElement, SyntaxToken,
|
||||||
algo::{find_leaf_at_offset, find_node_at_offset, find_covering_node, LeafAtOffset},
|
algo::{find_token_at_offset, find_node_at_offset, find_covering_element, TokenAtOffset},
|
||||||
};
|
};
|
||||||
use ra_fmt::{leading_indent, reindent};
|
use ra_fmt::{leading_indent, reindent};
|
||||||
|
|
||||||
|
@ -104,15 +104,15 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> {
|
||||||
Some(self.assist)
|
Some(self.assist)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn leaf_at_offset(&self) -> LeafAtOffset<&'a SyntaxNode> {
|
pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken<'a>> {
|
||||||
find_leaf_at_offset(self.source_file.syntax(), self.frange.range.start())
|
find_token_at_offset(self.source_file.syntax(), self.frange.range.start())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn node_at_offset<N: AstNode>(&self) -> Option<&'a N> {
|
pub(crate) fn node_at_offset<N: AstNode>(&self) -> Option<&'a N> {
|
||||||
find_node_at_offset(self.source_file.syntax(), self.frange.range.start())
|
find_node_at_offset(self.source_file.syntax(), self.frange.range.start())
|
||||||
}
|
}
|
||||||
pub(crate) fn covering_node(&self) -> &'a SyntaxNode {
|
pub(crate) fn covering_element(&self) -> SyntaxElement<'a> {
|
||||||
find_covering_node(self.source_file.syntax(), self.frange.range)
|
find_covering_element(self.source_file.syntax(), self.frange.range)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ fn collect_path_segments_raw<'a>(
|
||||||
) -> Option<usize> {
|
) -> Option<usize> {
|
||||||
let oldlen = segments.len();
|
let oldlen = segments.len();
|
||||||
loop {
|
loop {
|
||||||
let mut children = path.syntax().children();
|
let mut children = path.syntax().children_with_tokens();
|
||||||
let (first, second, third) = (
|
let (first, second, third) = (
|
||||||
children.next().map(|n| (n, n.kind())),
|
children.next().map(|n| (n, n.kind())),
|
||||||
children.next().map(|n| (n, n.kind())),
|
children.next().map(|n| (n, n.kind())),
|
||||||
|
@ -29,11 +29,11 @@ fn collect_path_segments_raw<'a>(
|
||||||
);
|
);
|
||||||
match (first, second, third) {
|
match (first, second, third) {
|
||||||
(Some((subpath, PATH)), Some((_, COLONCOLON)), Some((segment, PATH_SEGMENT))) => {
|
(Some((subpath, PATH)), Some((_, COLONCOLON)), Some((segment, PATH_SEGMENT))) => {
|
||||||
path = ast::Path::cast(subpath)?;
|
path = ast::Path::cast(subpath.as_node()?)?;
|
||||||
segments.push(ast::PathSegment::cast(segment)?);
|
segments.push(ast::PathSegment::cast(segment.as_node()?)?);
|
||||||
}
|
}
|
||||||
(Some((segment, PATH_SEGMENT)), _, _) => {
|
(Some((segment, PATH_SEGMENT)), _, _) => {
|
||||||
segments.push(ast::PathSegment::cast(segment)?);
|
segments.push(ast::PathSegment::cast(segment.as_node()?)?);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
(_, _, _) => return None,
|
(_, _, _) => return None,
|
||||||
|
@ -514,8 +514,7 @@ fn apply_auto_import<'a>(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||||
let node = ctx.covering_node();
|
let path: &ast::Path = ctx.node_at_offset()?;
|
||||||
let path = node.ancestors().find_map(ast::Path::cast)?;
|
|
||||||
// We don't want to mess with use statements
|
// We don't want to mess with use statements
|
||||||
if path.syntax().ancestors().find_map(ast::UseItem::cast).is_some() {
|
if path.syntax().ancestors().find_map(ast::UseItem::cast).is_some() {
|
||||||
return None;
|
return None;
|
||||||
|
@ -537,7 +536,7 @@ pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let current_file = node.ancestors().find_map(ast::SourceFile::cast)?;
|
let current_file = path.syntax().ancestors().find_map(ast::SourceFile::cast)?;
|
||||||
ctx.add_action(
|
ctx.add_action(
|
||||||
AssistId("auto_import"),
|
AssistId("auto_import"),
|
||||||
format!("import {} in the current file", fmt_segments(&segments)),
|
format!("import {} in the current file", fmt_segments(&segments)),
|
||||||
|
|
|
@ -15,13 +15,13 @@ pub(crate) fn change_visibility(ctx: AssistCtx<impl HirDatabase>) -> Option<Assi
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_vis(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
fn add_vis(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||||
let item_keyword = ctx.leaf_at_offset().find(|leaf| match leaf.kind() {
|
let item_keyword = ctx.token_at_offset().find(|leaf| match leaf.kind() {
|
||||||
FN_KW | MOD_KW | STRUCT_KW | ENUM_KW | TRAIT_KW => true,
|
FN_KW | MOD_KW | STRUCT_KW | ENUM_KW | TRAIT_KW => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
});
|
});
|
||||||
|
|
||||||
let (offset, target) = if let Some(keyword) = item_keyword {
|
let (offset, target) = if let Some(keyword) = item_keyword {
|
||||||
let parent = keyword.parent()?;
|
let parent = keyword.parent();
|
||||||
let def_kws = vec![FN_DEF, MODULE, STRUCT_DEF, ENUM_DEF, TRAIT_DEF];
|
let def_kws = vec![FN_DEF, MODULE, STRUCT_DEF, ENUM_DEF, TRAIT_DEF];
|
||||||
// Parent is not a definition, can't add visibility
|
// Parent is not a definition, can't add visibility
|
||||||
if !def_kws.iter().any(|&def_kw| def_kw == parent.kind()) {
|
if !def_kws.iter().any(|&def_kw| def_kw == parent.kind()) {
|
||||||
|
@ -33,8 +33,8 @@ fn add_vis(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||||
}
|
}
|
||||||
(vis_offset(parent), keyword.range())
|
(vis_offset(parent), keyword.range())
|
||||||
} else {
|
} else {
|
||||||
let ident = ctx.leaf_at_offset().find(|leaf| leaf.kind() == IDENT)?;
|
let ident = ctx.token_at_offset().find(|leaf| leaf.kind() == IDENT)?;
|
||||||
let field = ident.ancestors().find_map(ast::NamedFieldDef::cast)?;
|
let field = ident.parent().ancestors().find_map(ast::NamedFieldDef::cast)?;
|
||||||
if field.name()?.syntax().range() != ident.range() && field.visibility().is_some() {
|
if field.name()?.syntax().range() != ident.range() && field.visibility().is_some() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
@ -51,7 +51,7 @@ fn add_vis(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn vis_offset(node: &SyntaxNode) -> TextUnit {
|
fn vis_offset(node: &SyntaxNode) -> TextUnit {
|
||||||
node.children()
|
node.children_with_tokens()
|
||||||
.skip_while(|it| match it.kind() {
|
.skip_while(|it| match it.kind() {
|
||||||
WHITESPACE | COMMENT | ATTR => true,
|
WHITESPACE | COMMENT | ATTR => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
|
|
|
@ -8,7 +8,7 @@ pub(crate) fn flip_binexpr(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assis
|
||||||
let expr = ctx.node_at_offset::<BinExpr>()?;
|
let expr = ctx.node_at_offset::<BinExpr>()?;
|
||||||
let lhs = expr.lhs()?.syntax();
|
let lhs = expr.lhs()?.syntax();
|
||||||
let rhs = expr.rhs()?.syntax();
|
let rhs = expr.rhs()?.syntax();
|
||||||
let op_range = expr.op()?.range();
|
let op_range = expr.op_token()?.range();
|
||||||
// The assist should be applied only if the cursor is on the operator
|
// The assist should be applied only if the cursor is on the operator
|
||||||
let cursor_in_range = ctx.frange.range.is_subrange(&op_range);
|
let cursor_in_range = ctx.frange.range.is_subrange(&op_range);
|
||||||
if !cursor_in_range {
|
if !cursor_in_range {
|
||||||
|
|
|
@ -8,13 +8,13 @@ use ra_syntax::{
|
||||||
use crate::{AssistCtx, Assist, AssistId};
|
use crate::{AssistCtx, Assist, AssistId};
|
||||||
|
|
||||||
pub(crate) fn flip_comma(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
pub(crate) fn flip_comma(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||||
let comma = ctx.leaf_at_offset().find(|leaf| leaf.kind() == COMMA)?;
|
let comma = ctx.token_at_offset().find(|leaf| leaf.kind() == COMMA)?;
|
||||||
let prev = non_trivia_sibling(comma, Direction::Prev)?;
|
let prev = non_trivia_sibling(comma.into(), Direction::Prev)?;
|
||||||
let next = non_trivia_sibling(comma, Direction::Next)?;
|
let next = non_trivia_sibling(comma.into(), Direction::Next)?;
|
||||||
ctx.add_action(AssistId("flip_comma"), "flip comma", |edit| {
|
ctx.add_action(AssistId("flip_comma"), "flip comma", |edit| {
|
||||||
edit.target(comma.range());
|
edit.target(comma.range());
|
||||||
edit.replace(prev.range(), next.text());
|
edit.replace(prev.range(), next.to_string());
|
||||||
edit.replace(next.range(), prev.text());
|
edit.replace(next.range(), prev.to_string());
|
||||||
});
|
});
|
||||||
|
|
||||||
ctx.build()
|
ctx.build()
|
||||||
|
|
|
@ -46,8 +46,10 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Opt
|
||||||
| ExprKind::BlockExpr(_) => false,
|
| ExprKind::BlockExpr(_) => false,
|
||||||
};
|
};
|
||||||
|
|
||||||
let delete_range = if let Some(whitespace) =
|
let delete_range = if let Some(whitespace) = let_stmt
|
||||||
let_stmt.syntax().next_sibling().and_then(ast::Whitespace::cast)
|
.syntax()
|
||||||
|
.next_sibling_or_token()
|
||||||
|
.and_then(|it| ast::Whitespace::cast(it.as_token()?))
|
||||||
{
|
{
|
||||||
TextRange::from_to(let_stmt.syntax().range().start(), whitespace.syntax().range().end())
|
TextRange::from_to(let_stmt.syntax().range().start(), whitespace.syntax().range().end())
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -2,9 +2,8 @@ use test_utils::tested_by;
|
||||||
use hir::db::HirDatabase;
|
use hir::db::HirDatabase;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
ast::{self, AstNode},
|
ast::{self, AstNode},
|
||||||
SyntaxKind::{
|
SyntaxNode, TextUnit,
|
||||||
WHITESPACE, MATCH_ARM, LAMBDA_EXPR, PATH_EXPR, BREAK_EXPR, LOOP_EXPR, RETURN_EXPR, COMMENT
|
SyntaxKind::{WHITESPACE, MATCH_ARM, LAMBDA_EXPR, PATH_EXPR, BREAK_EXPR, LOOP_EXPR, RETURN_EXPR, COMMENT},
|
||||||
}, SyntaxNode, TextUnit,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{AssistCtx, Assist, AssistId};
|
use crate::{AssistCtx, Assist, AssistId};
|
||||||
|
@ -13,14 +12,14 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option
|
||||||
if ctx.frange.range.is_empty() {
|
if ctx.frange.range.is_empty() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let node = ctx.covering_node();
|
let node = ctx.covering_element();
|
||||||
if node.kind() == COMMENT {
|
if node.kind() == COMMENT {
|
||||||
tested_by!(introduce_var_in_comment_is_not_applicable);
|
tested_by!(introduce_var_in_comment_is_not_applicable);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let expr = node.ancestors().find_map(valid_target_expr)?;
|
let expr = node.ancestors().find_map(valid_target_expr)?;
|
||||||
let (anchor_stmt, wrap_in_block) = anchor_stmt(expr)?;
|
let (anchor_stmt, wrap_in_block) = anchor_stmt(expr)?;
|
||||||
let indent = anchor_stmt.prev_sibling()?;
|
let indent = anchor_stmt.prev_sibling_or_token()?.as_token()?;
|
||||||
if indent.kind() != WHITESPACE {
|
if indent.kind() != WHITESPACE {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
@ -54,16 +53,15 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option
|
||||||
// We want to maintain the indent level,
|
// We want to maintain the indent level,
|
||||||
// but we do not want to duplicate possible
|
// but we do not want to duplicate possible
|
||||||
// extra newlines in the indent block
|
// extra newlines in the indent block
|
||||||
for chunk in indent.text().chunks() {
|
let text = indent.text();
|
||||||
if chunk.starts_with("\r\n") {
|
if text.starts_with("\r\n") {
|
||||||
buf.push_str("\r\n");
|
buf.push_str("\r\n");
|
||||||
buf.push_str(chunk.trim_start_matches("\r\n"));
|
buf.push_str(text.trim_start_matches("\r\n"));
|
||||||
} else if chunk.starts_with("\n") {
|
} else if text.starts_with("\n") {
|
||||||
buf.push_str("\n");
|
buf.push_str("\n");
|
||||||
buf.push_str(chunk.trim_start_matches("\n"));
|
buf.push_str(text.trim_start_matches("\n"));
|
||||||
} else {
|
} else {
|
||||||
buf.push_str(chunk);
|
buf.push_str(text);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
edit.target(expr.syntax().range());
|
edit.target(expr.syntax().range());
|
||||||
|
|
|
@ -62,15 +62,15 @@ fn is_valid_macrocall(macro_call: &ast::MacroCall, macro_name: &str) -> Option<b
|
||||||
let name_ref = path.segment()?.name_ref()?;
|
let name_ref = path.segment()?.name_ref()?;
|
||||||
|
|
||||||
// Make sure it is actually a dbg-macro call, dbg followed by !
|
// Make sure it is actually a dbg-macro call, dbg followed by !
|
||||||
let excl = path.syntax().next_sibling()?;
|
let excl = path.syntax().next_sibling_or_token()?;
|
||||||
|
|
||||||
if name_ref.text() != macro_name || excl.kind() != EXCL {
|
if name_ref.text() != macro_name || excl.kind() != EXCL {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let node = macro_call.token_tree()?.syntax();
|
let node = macro_call.token_tree()?.syntax();
|
||||||
let first_child = node.first_child()?;
|
let first_child = node.first_child_or_token()?;
|
||||||
let last_child = node.last_child()?;
|
let last_child = node.last_child_or_token()?;
|
||||||
|
|
||||||
match (first_child.kind(), last_child.kind()) {
|
match (first_child.kind(), last_child.kind()) {
|
||||||
(L_PAREN, R_PAREN) | (L_BRACK, R_BRACK) | (L_CURLY, R_CURLY) => Some(true),
|
(L_PAREN, R_PAREN) | (L_BRACK, R_BRACK) | (L_CURLY, R_CURLY) => Some(true),
|
||||||
|
|
|
@ -8,8 +8,8 @@ use ra_syntax::{
|
||||||
use crate::{AssistCtx, Assist, AssistId};
|
use crate::{AssistCtx, Assist, AssistId};
|
||||||
|
|
||||||
pub(crate) fn split_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
pub(crate) fn split_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||||
let colon_colon = ctx.leaf_at_offset().find(|leaf| leaf.kind() == COLONCOLON)?;
|
let colon_colon = ctx.token_at_offset().find(|leaf| leaf.kind() == COLONCOLON)?;
|
||||||
let path = colon_colon.parent().and_then(ast::Path::cast)?;
|
let path = ast::Path::cast(colon_colon.parent())?;
|
||||||
let top_path = generate(Some(path), |it| it.parent_path()).last()?;
|
let top_path = generate(Some(path), |it| it.parent_path()).last()?;
|
||||||
|
|
||||||
let use_tree = top_path.syntax().ancestors().find_map(ast::UseTree::cast);
|
let use_tree = top_path.syntax().ancestors().find_map(ast::UseTree::cast);
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use std::collections::HashSet;
|
use std::{collections::HashSet, time::Instant};
|
||||||
|
|
||||||
use ra_db::SourceDatabase;
|
use ra_db::SourceDatabase;
|
||||||
use ra_batch::BatchDatabase;
|
use ra_batch::BatchDatabase;
|
||||||
|
@ -8,8 +8,10 @@ use ra_syntax::AstNode;
|
||||||
use crate::Result;
|
use crate::Result;
|
||||||
|
|
||||||
pub fn run(verbose: bool) -> Result<()> {
|
pub fn run(verbose: bool) -> Result<()> {
|
||||||
|
let db_load_time = Instant::now();
|
||||||
let (db, roots) = BatchDatabase::load_cargo(".")?;
|
let (db, roots) = BatchDatabase::load_cargo(".")?;
|
||||||
println!("Database loaded, {} roots", roots.len());
|
println!("Database loaded, {} roots, {:?}", roots.len(), db_load_time.elapsed());
|
||||||
|
let analysis_time = Instant::now();
|
||||||
let mut num_crates = 0;
|
let mut num_crates = 0;
|
||||||
let mut visited_modules = HashSet::new();
|
let mut visited_modules = HashSet::new();
|
||||||
let mut visit_queue = Vec::new();
|
let mut visit_queue = Vec::new();
|
||||||
|
@ -96,5 +98,6 @@ pub fn run(verbose: bool) -> Result<()> {
|
||||||
num_exprs_partially_unknown,
|
num_exprs_partially_unknown,
|
||||||
(num_exprs_partially_unknown * 100 / num_exprs)
|
(num_exprs_partially_unknown * 100 / num_exprs)
|
||||||
);
|
);
|
||||||
|
println!("Analysis: {:?}", analysis_time.elapsed());
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,10 +3,8 @@ mod analysis_stats;
|
||||||
use std::{fs, io::Read, path::Path, time::Instant};
|
use std::{fs, io::Read, path::Path, time::Instant};
|
||||||
|
|
||||||
use clap::{App, Arg, SubCommand};
|
use clap::{App, Arg, SubCommand};
|
||||||
use join_to_string::join;
|
|
||||||
use ra_ide_api::{Analysis, FileRange};
|
|
||||||
use ra_ide_api::file_structure;
|
use ra_ide_api::file_structure;
|
||||||
use ra_syntax::{SourceFile, TextRange, TreeArc, AstNode};
|
use ra_syntax::{SourceFile, TreeArc, AstNode};
|
||||||
use tools::collect_tests;
|
use tools::collect_tests;
|
||||||
use flexi_logger::Logger;
|
use flexi_logger::Logger;
|
||||||
|
|
||||||
|
@ -23,11 +21,6 @@ fn main() -> Result<()> {
|
||||||
)
|
)
|
||||||
.subcommand(SubCommand::with_name("parse").arg(Arg::with_name("no-dump").long("--no-dump")))
|
.subcommand(SubCommand::with_name("parse").arg(Arg::with_name("no-dump").long("--no-dump")))
|
||||||
.subcommand(SubCommand::with_name("symbols"))
|
.subcommand(SubCommand::with_name("symbols"))
|
||||||
.subcommand(
|
|
||||||
SubCommand::with_name("extend-selection")
|
|
||||||
.arg(Arg::with_name("start"))
|
|
||||||
.arg(Arg::with_name("end")),
|
|
||||||
)
|
|
||||||
.subcommand(
|
.subcommand(
|
||||||
SubCommand::with_name("analysis-stats").arg(Arg::with_name("verbose").short("v")),
|
SubCommand::with_name("analysis-stats").arg(Arg::with_name("verbose").short("v")),
|
||||||
)
|
)
|
||||||
|
@ -57,13 +50,6 @@ fn main() -> Result<()> {
|
||||||
let (test, tree) = render_test(file, line)?;
|
let (test, tree) = render_test(file, line)?;
|
||||||
println!("{}\n{}", test, tree);
|
println!("{}\n{}", test, tree);
|
||||||
}
|
}
|
||||||
("extend-selection", Some(matches)) => {
|
|
||||||
let start: u32 = matches.value_of("start").unwrap().parse()?;
|
|
||||||
let end: u32 = matches.value_of("end").unwrap().parse()?;
|
|
||||||
let text = read_stdin()?;
|
|
||||||
let sels = selections(text, start, end);
|
|
||||||
println!("{}", sels)
|
|
||||||
}
|
|
||||||
("analysis-stats", Some(matches)) => {
|
("analysis-stats", Some(matches)) => {
|
||||||
let verbose = matches.is_present("verbose");
|
let verbose = matches.is_present("verbose");
|
||||||
analysis_stats::run(verbose)?;
|
analysis_stats::run(verbose)?;
|
||||||
|
@ -98,22 +84,3 @@ fn render_test(file: &Path, line: usize) -> Result<(String, String)> {
|
||||||
let tree = file.syntax().debug_dump();
|
let tree = file.syntax().debug_dump();
|
||||||
Ok((test.text, tree))
|
Ok((test.text, tree))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn selections(text: String, start: u32, end: u32) -> String {
|
|
||||||
let (analysis, file_id) = Analysis::from_single_file(text);
|
|
||||||
let mut ranges = Vec::new();
|
|
||||||
let mut range = TextRange::from_to((start - 1).into(), (end - 1).into());
|
|
||||||
loop {
|
|
||||||
ranges.push(range);
|
|
||||||
let next = analysis.extend_selection(FileRange { file_id, range }).unwrap();
|
|
||||||
if range == next {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
range = next;
|
|
||||||
}
|
|
||||||
let ranges = ranges
|
|
||||||
.iter()
|
|
||||||
.map(|r| (1 + u32::from(r.start()), 1 + u32::from(r.end())))
|
|
||||||
.map(|(s, e)| format!("({} {})", s, e));
|
|
||||||
join(ranges).separator(" ").surround_with("(", ")").to_string()
|
|
||||||
}
|
|
||||||
|
|
|
@ -3,8 +3,8 @@
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
AstNode,
|
AstNode,
|
||||||
SyntaxNode, SyntaxKind::*,
|
SyntaxNode, SyntaxKind::*, SyntaxToken, SyntaxKind,
|
||||||
ast::{self, AstToken},
|
ast,
|
||||||
algo::generate,
|
algo::generate,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -15,26 +15,22 @@ pub fn reindent(text: &str, indent: &str) -> String {
|
||||||
|
|
||||||
/// If the node is on the beginning of the line, calculate indent.
|
/// If the node is on the beginning of the line, calculate indent.
|
||||||
pub fn leading_indent(node: &SyntaxNode) -> Option<&str> {
|
pub fn leading_indent(node: &SyntaxNode) -> Option<&str> {
|
||||||
for leaf in prev_leaves(node) {
|
for token in prev_tokens(node.first_token()?) {
|
||||||
if let Some(ws) = ast::Whitespace::cast(leaf) {
|
if let Some(ws) = ast::Whitespace::cast(token) {
|
||||||
let ws_text = ws.text();
|
let ws_text = ws.text();
|
||||||
if let Some(pos) = ws_text.rfind('\n') {
|
if let Some(pos) = ws_text.rfind('\n') {
|
||||||
return Some(&ws_text[pos + 1..]);
|
return Some(&ws_text[pos + 1..]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if leaf.leaf_text().unwrap().contains('\n') {
|
if token.text().contains('\n') {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn prev_leaves(node: &SyntaxNode) -> impl Iterator<Item = &SyntaxNode> {
|
fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> {
|
||||||
generate(prev_leaf(node), |&node| prev_leaf(node))
|
generate(token.prev_token(), |&token| token.prev_token())
|
||||||
}
|
|
||||||
|
|
||||||
fn prev_leaf(node: &SyntaxNode) -> Option<&SyntaxNode> {
|
|
||||||
generate(node.ancestors().find_map(SyntaxNode::prev_sibling), |it| it.last_child()).last()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn extract_trivial_expression(block: &ast::Block) -> Option<&ast::Expr> {
|
pub fn extract_trivial_expression(block: &ast::Block) -> Option<&ast::Expr> {
|
||||||
|
@ -52,20 +48,20 @@ pub fn extract_trivial_expression(block: &ast::Block) -> Option<&ast::Expr> {
|
||||||
Some(expr)
|
Some(expr)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compute_ws(left: &SyntaxNode, right: &SyntaxNode) -> &'static str {
|
pub fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str {
|
||||||
match left.kind() {
|
match left {
|
||||||
L_PAREN | L_BRACK => return "",
|
L_PAREN | L_BRACK => return "",
|
||||||
L_CURLY => {
|
L_CURLY => {
|
||||||
if let USE_TREE = right.kind() {
|
if let USE_TREE = right {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
match right.kind() {
|
match right {
|
||||||
R_PAREN | R_BRACK => return "",
|
R_PAREN | R_BRACK => return "",
|
||||||
R_CURLY => {
|
R_CURLY => {
|
||||||
if let USE_TREE = left.kind() {
|
if let USE_TREE = left {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -726,13 +726,7 @@ impl ExprCollector {
|
||||||
self.alloc_expr(Expr::Array { exprs }, syntax_ptr)
|
self.alloc_expr(Expr::Array { exprs }, syntax_ptr)
|
||||||
}
|
}
|
||||||
ast::ExprKind::Literal(e) => {
|
ast::ExprKind::Literal(e) => {
|
||||||
let child = if let Some(child) = e.literal_expr() {
|
let lit = match e.flavor() {
|
||||||
child
|
|
||||||
} else {
|
|
||||||
return self.alloc_expr(Expr::Missing, syntax_ptr);
|
|
||||||
};
|
|
||||||
|
|
||||||
let lit = match child.flavor() {
|
|
||||||
LiteralFlavor::IntNumber { suffix } => {
|
LiteralFlavor::IntNumber { suffix } => {
|
||||||
let known_name = suffix
|
let known_name = suffix
|
||||||
.and_then(|it| IntTy::from_suffix(&it).map(UncertainIntTy::Known));
|
.and_then(|it| IntTy::from_suffix(&it).map(UncertainIntTy::Known));
|
||||||
|
@ -874,9 +868,7 @@ impl ExprCollector {
|
||||||
fn collect_fn_body(&mut self, node: &ast::FnDef) {
|
fn collect_fn_body(&mut self, node: &ast::FnDef) {
|
||||||
if let Some(param_list) = node.param_list() {
|
if let Some(param_list) = node.param_list() {
|
||||||
if let Some(self_param) = param_list.self_param() {
|
if let Some(self_param) = param_list.self_param() {
|
||||||
let self_param = SyntaxNodePtr::new(
|
let self_param = SyntaxNodePtr::new(self_param.syntax());
|
||||||
self_param.self_kw().expect("self param without self keyword").syntax(),
|
|
||||||
);
|
|
||||||
let param_pat = self.alloc_pat(
|
let param_pat = self.alloc_pat(
|
||||||
Pat::Bind {
|
Pat::Bind {
|
||||||
name: Name::self_param(),
|
name: Name::self_param(),
|
||||||
|
|
|
@ -9,7 +9,7 @@ use ra_db::{FileId, FilePosition};
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
SyntaxNode,
|
SyntaxNode,
|
||||||
ast::{self, AstNode, NameOwner},
|
ast::{self, AstNode, NameOwner},
|
||||||
algo::{find_node_at_offset, find_leaf_at_offset},
|
algo::{find_node_at_offset, find_token_at_offset},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -155,9 +155,9 @@ pub fn trait_from_module(
|
||||||
pub fn resolver_for_position(db: &impl HirDatabase, position: FilePosition) -> Resolver {
|
pub fn resolver_for_position(db: &impl HirDatabase, position: FilePosition) -> Resolver {
|
||||||
let file_id = position.file_id;
|
let file_id = position.file_id;
|
||||||
let file = db.parse(file_id);
|
let file = db.parse(file_id);
|
||||||
find_leaf_at_offset(file.syntax(), position.offset)
|
find_token_at_offset(file.syntax(), position.offset)
|
||||||
.find_map(|node| {
|
.find_map(|token| {
|
||||||
node.ancestors().find_map(|node| {
|
token.parent().ancestors().find_map(|node| {
|
||||||
if ast::Expr::cast(node).is_some() || ast::Block::cast(node).is_some() {
|
if ast::Expr::cast(node).is_some() || ast::Block::cast(node).is_some() {
|
||||||
if let Some(func) = function_from_child_node(db, file_id, node) {
|
if let Some(func) = function_from_child_node(db, file_id, node) {
|
||||||
let scopes = func.scopes(db);
|
let scopes = func.scopes(db);
|
||||||
|
|
|
@ -2251,14 +2251,12 @@ fn infer(content: &str) -> String {
|
||||||
types.sort_by_key(|(ptr, _)| (ptr.range().start(), ptr.range().end()));
|
types.sort_by_key(|(ptr, _)| (ptr.range().start(), ptr.range().end()));
|
||||||
for (syntax_ptr, ty) in &types {
|
for (syntax_ptr, ty) in &types {
|
||||||
let node = syntax_ptr.to_node(&source_file);
|
let node = syntax_ptr.to_node(&source_file);
|
||||||
write!(
|
let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node) {
|
||||||
acc,
|
(self_param.self_kw_token().range(), "self".to_string())
|
||||||
"{} '{}': {}\n",
|
} else {
|
||||||
syntax_ptr.range(),
|
(syntax_ptr.range(), node.text().to_string().replace("\n", " "))
|
||||||
ellipsize(node.text().to_string().replace("\n", " "), 15),
|
};
|
||||||
ty.display(&db)
|
write!(acc, "{} '{}': {}\n", range, ellipsize(text, 15), ty.display(&db)).unwrap();
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
acc.truncate(acc.trim_end().len());
|
acc.truncate(acc.trim_end().len());
|
||||||
|
|
|
@ -13,7 +13,7 @@ mod complete_scope;
|
||||||
mod complete_postfix;
|
mod complete_postfix;
|
||||||
|
|
||||||
use ra_db::SourceDatabase;
|
use ra_db::SourceDatabase;
|
||||||
use ra_syntax::ast::{self, AstNode};
|
use ra_syntax::{ast::{self, AstNode}, SyntaxKind::{ATTR, COMMENT}};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db,
|
db,
|
||||||
|
@ -76,11 +76,10 @@ pub fn function_label(node: &ast::FnDef) -> Option<String> {
|
||||||
let body_range = body.syntax().range();
|
let body_range = body.syntax().range();
|
||||||
let label: String = node
|
let label: String = node
|
||||||
.syntax()
|
.syntax()
|
||||||
.children()
|
.children_with_tokens()
|
||||||
.filter(|child| !child.range().is_subrange(&body_range)) // Filter out body
|
.filter(|child| !child.range().is_subrange(&body_range)) // Filter out body
|
||||||
.filter(|child| ast::Comment::cast(child).is_none()) // Filter out comments
|
.filter(|child| !(child.kind() == COMMENT || child.kind() == ATTR)) // Filter out comments and attrs
|
||||||
.filter(|child| ast::Attr::cast(child).is_none()) // Filter out attributes
|
.map(|node| node.to_string())
|
||||||
.map(|node| node.text().to_string())
|
|
||||||
.collect();
|
.collect();
|
||||||
label
|
label
|
||||||
} else {
|
} else {
|
||||||
|
@ -93,10 +92,9 @@ pub fn function_label(node: &ast::FnDef) -> Option<String> {
|
||||||
pub fn const_label(node: &ast::ConstDef) -> String {
|
pub fn const_label(node: &ast::ConstDef) -> String {
|
||||||
let label: String = node
|
let label: String = node
|
||||||
.syntax()
|
.syntax()
|
||||||
.children()
|
.children_with_tokens()
|
||||||
.filter(|child| ast::Comment::cast(child).is_none())
|
.filter(|child| !(child.kind() == COMMENT || child.kind() == ATTR))
|
||||||
.filter(|child| ast::Attr::cast(child).is_none())
|
.map(|node| node.to_string())
|
||||||
.map(|node| node.text().to_string())
|
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
label.trim().to_owned()
|
label.trim().to_owned()
|
||||||
|
@ -105,10 +103,9 @@ pub fn const_label(node: &ast::ConstDef) -> String {
|
||||||
pub fn type_label(node: &ast::TypeAliasDef) -> String {
|
pub fn type_label(node: &ast::TypeAliasDef) -> String {
|
||||||
let label: String = node
|
let label: String = node
|
||||||
.syntax()
|
.syntax()
|
||||||
.children()
|
.children_with_tokens()
|
||||||
.filter(|child| ast::Comment::cast(child).is_none())
|
.filter(|child| !(child.kind() == COMMENT || child.kind() == ATTR))
|
||||||
.filter(|child| ast::Attr::cast(child).is_none())
|
.map(|node| node.to_string())
|
||||||
.map(|node| node.text().to_string())
|
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
label.trim().to_owned()
|
label.trim().to_owned()
|
||||||
|
|
|
@ -17,7 +17,7 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut params = FxHashMap::default();
|
let mut params = FxHashMap::default();
|
||||||
for node in ctx.leaf.ancestors() {
|
for node in ctx.token.parent().ancestors() {
|
||||||
let _ = visitor_ctx(&mut params)
|
let _ = visitor_ctx(&mut params)
|
||||||
.visit::<ast::SourceFile, _>(process)
|
.visit::<ast::SourceFile, _>(process)
|
||||||
.visit::<ast::ItemList, _>(process)
|
.visit::<ast::ItemList, _>(process)
|
||||||
|
|
|
@ -2,7 +2,7 @@ use ra_syntax::{
|
||||||
algo::visit::{visitor, Visitor},
|
algo::visit::{visitor, Visitor},
|
||||||
AstNode,
|
AstNode,
|
||||||
ast::{self, LoopBodyOwner},
|
ast::{self, LoopBodyOwner},
|
||||||
SyntaxKind::*, SyntaxNode,
|
SyntaxKind::*, SyntaxToken,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::completion::{CompletionContext, CompletionItem, Completions, CompletionKind, CompletionItemKind};
|
use crate::completion::{CompletionContext, CompletionItem, Completions, CompletionKind, CompletionItemKind};
|
||||||
|
@ -62,7 +62,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
|
||||||
acc.add(keyword(ctx, "else", "else {$0}"));
|
acc.add(keyword(ctx, "else", "else {$0}"));
|
||||||
acc.add(keyword(ctx, "else if", "else if $0 {}"));
|
acc.add(keyword(ctx, "else if", "else if $0 {}"));
|
||||||
}
|
}
|
||||||
if is_in_loop_body(ctx.leaf) {
|
if is_in_loop_body(ctx.token) {
|
||||||
if ctx.can_be_stmt {
|
if ctx.can_be_stmt {
|
||||||
acc.add(keyword(ctx, "continue", "continue;"));
|
acc.add(keyword(ctx, "continue", "continue;"));
|
||||||
acc.add(keyword(ctx, "break", "break;"));
|
acc.add(keyword(ctx, "break", "break;"));
|
||||||
|
@ -74,8 +74,8 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
|
||||||
acc.add_all(complete_return(ctx, fn_def, ctx.can_be_stmt));
|
acc.add_all(complete_return(ctx, fn_def, ctx.can_be_stmt));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_in_loop_body(leaf: &SyntaxNode) -> bool {
|
fn is_in_loop_body(leaf: SyntaxToken) -> bool {
|
||||||
for node in leaf.ancestors() {
|
for node in leaf.parent().ancestors() {
|
||||||
if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
|
if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
use ra_text_edit::AtomTextEdit;
|
use ra_text_edit::AtomTextEdit;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
AstNode, SyntaxNode, SourceFile, TextUnit, TextRange,
|
AstNode, SyntaxNode, SourceFile, TextUnit, TextRange, SyntaxToken,
|
||||||
ast,
|
ast,
|
||||||
algo::{find_leaf_at_offset, find_covering_node, find_node_at_offset},
|
algo::{find_token_at_offset, find_covering_element, find_node_at_offset},
|
||||||
SyntaxKind::*,
|
SyntaxKind::*,
|
||||||
};
|
};
|
||||||
use hir::{source_binder, Resolver};
|
use hir::{source_binder, Resolver};
|
||||||
|
@ -15,7 +15,7 @@ use crate::{db, FilePosition};
|
||||||
pub(crate) struct CompletionContext<'a> {
|
pub(crate) struct CompletionContext<'a> {
|
||||||
pub(super) db: &'a db::RootDatabase,
|
pub(super) db: &'a db::RootDatabase,
|
||||||
pub(super) offset: TextUnit,
|
pub(super) offset: TextUnit,
|
||||||
pub(super) leaf: &'a SyntaxNode,
|
pub(super) token: SyntaxToken<'a>,
|
||||||
pub(super) resolver: Resolver,
|
pub(super) resolver: Resolver,
|
||||||
pub(super) module: Option<hir::Module>,
|
pub(super) module: Option<hir::Module>,
|
||||||
pub(super) function: Option<hir::Function>,
|
pub(super) function: Option<hir::Function>,
|
||||||
|
@ -49,10 +49,10 @@ impl<'a> CompletionContext<'a> {
|
||||||
) -> Option<CompletionContext<'a>> {
|
) -> Option<CompletionContext<'a>> {
|
||||||
let resolver = source_binder::resolver_for_position(db, position);
|
let resolver = source_binder::resolver_for_position(db, position);
|
||||||
let module = source_binder::module_from_position(db, position);
|
let module = source_binder::module_from_position(db, position);
|
||||||
let leaf = find_leaf_at_offset(original_file.syntax(), position.offset).left_biased()?;
|
let token = find_token_at_offset(original_file.syntax(), position.offset).left_biased()?;
|
||||||
let mut ctx = CompletionContext {
|
let mut ctx = CompletionContext {
|
||||||
db,
|
db,
|
||||||
leaf,
|
token,
|
||||||
offset: position.offset,
|
offset: position.offset,
|
||||||
resolver,
|
resolver,
|
||||||
module,
|
module,
|
||||||
|
@ -76,9 +76,9 @@ impl<'a> CompletionContext<'a> {
|
||||||
|
|
||||||
// The range of the identifier that is being completed.
|
// The range of the identifier that is being completed.
|
||||||
pub(crate) fn source_range(&self) -> TextRange {
|
pub(crate) fn source_range(&self) -> TextRange {
|
||||||
match self.leaf.kind() {
|
match self.token.kind() {
|
||||||
// workaroud when completion is triggered by trigger characters.
|
// workaroud when completion is triggered by trigger characters.
|
||||||
IDENT => self.leaf.range(),
|
IDENT => self.token.range(),
|
||||||
_ => TextRange::offset_len(self.offset, 0.into()),
|
_ => TextRange::offset_len(self.offset, 0.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -139,10 +139,11 @@ impl<'a> CompletionContext<'a> {
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
self.use_item_syntax = self.leaf.ancestors().find_map(ast::UseItem::cast);
|
self.use_item_syntax = self.token.parent().ancestors().find_map(ast::UseItem::cast);
|
||||||
|
|
||||||
self.function_syntax = self
|
self.function_syntax = self
|
||||||
.leaf
|
.token
|
||||||
|
.parent()
|
||||||
.ancestors()
|
.ancestors()
|
||||||
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
|
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
|
||||||
.find_map(ast::FnDef::cast);
|
.find_map(ast::FnDef::cast);
|
||||||
|
@ -224,8 +225,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<&N> {
|
fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<&N> {
|
||||||
let node = find_covering_node(syntax, range);
|
find_covering_element(syntax, range).ancestors().find_map(N::cast)
|
||||||
node.ancestors().find_map(N::cast)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_node<N: AstNode>(node: &SyntaxNode) -> bool {
|
fn is_node<N: AstNode>(node: &SyntaxNode) -> bool {
|
||||||
|
|
|
@ -106,8 +106,10 @@ fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(
|
||||||
single_use_tree: &ast::UseTree,
|
single_use_tree: &ast::UseTree,
|
||||||
) -> Option<TextEdit> {
|
) -> Option<TextEdit> {
|
||||||
let use_tree_list_node = single_use_tree.syntax().parent()?;
|
let use_tree_list_node = single_use_tree.syntax().parent()?;
|
||||||
if single_use_tree.path()?.segment()?.syntax().first_child()?.kind() == SyntaxKind::SELF_KW {
|
if single_use_tree.path()?.segment()?.syntax().first_child_or_token()?.kind()
|
||||||
let start = use_tree_list_node.prev_sibling()?.range().start();
|
== SyntaxKind::SELF_KW
|
||||||
|
{
|
||||||
|
let start = use_tree_list_node.prev_sibling_or_token()?.range().start();
|
||||||
let end = use_tree_list_node.range().end();
|
let end = use_tree_list_node.range().end();
|
||||||
let range = TextRange::from_to(start, end);
|
let range = TextRange::from_to(start, end);
|
||||||
let mut edit_builder = TextEditBuilder::default();
|
let mut edit_builder = TextEditBuilder::default();
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
use ra_db::SourceDatabase;
|
use ra_db::SourceDatabase;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
Direction, SyntaxNode, TextRange, TextUnit, AstNode,
|
Direction, SyntaxNode, TextRange, TextUnit, AstNode, SyntaxElement,
|
||||||
algo::{find_covering_node, find_leaf_at_offset, LeafAtOffset},
|
algo::{find_covering_element, find_token_at_offset, TokenAtOffset},
|
||||||
SyntaxKind::*,
|
SyntaxKind::*, SyntaxToken,
|
||||||
|
ast::Comment,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{FileRange, db::RootDatabase};
|
use crate::{FileRange, db::RootDatabase};
|
||||||
|
@ -32,53 +33,58 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
||||||
|
|
||||||
if range.is_empty() {
|
if range.is_empty() {
|
||||||
let offset = range.start();
|
let offset = range.start();
|
||||||
let mut leaves = find_leaf_at_offset(root, offset);
|
let mut leaves = find_token_at_offset(root, offset);
|
||||||
if leaves.clone().all(|it| it.kind() == WHITESPACE) {
|
if leaves.clone().all(|it| it.kind() == WHITESPACE) {
|
||||||
return Some(extend_ws(root, leaves.next()?, offset));
|
return Some(extend_ws(root, leaves.next()?, offset));
|
||||||
}
|
}
|
||||||
let leaf_range = match leaves {
|
let leaf_range = match leaves {
|
||||||
LeafAtOffset::None => return None,
|
TokenAtOffset::None => return None,
|
||||||
LeafAtOffset::Single(l) => {
|
TokenAtOffset::Single(l) => {
|
||||||
if string_kinds.contains(&l.kind()) {
|
if string_kinds.contains(&l.kind()) {
|
||||||
extend_single_word_in_comment_or_string(l, offset).unwrap_or_else(|| l.range())
|
extend_single_word_in_comment_or_string(l, offset).unwrap_or_else(|| l.range())
|
||||||
} else {
|
} else {
|
||||||
l.range()
|
l.range()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
LeafAtOffset::Between(l, r) => pick_best(l, r).range(),
|
TokenAtOffset::Between(l, r) => pick_best(l, r).range(),
|
||||||
};
|
};
|
||||||
return Some(leaf_range);
|
return Some(leaf_range);
|
||||||
};
|
};
|
||||||
let node = find_covering_node(root, range);
|
let node = match find_covering_element(root, range) {
|
||||||
|
SyntaxElement::Token(token) => {
|
||||||
|
if token.range() != range {
|
||||||
|
return Some(token.range());
|
||||||
|
}
|
||||||
|
if let Some(comment) = Comment::cast(token) {
|
||||||
|
if let Some(range) = extend_comments(comment) {
|
||||||
|
return Some(range);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
token.parent()
|
||||||
|
}
|
||||||
|
SyntaxElement::Node(node) => node,
|
||||||
|
};
|
||||||
|
if node.range() != range {
|
||||||
|
return Some(node.range());
|
||||||
|
}
|
||||||
|
|
||||||
// Using shallowest node with same range allows us to traverse siblings.
|
// Using shallowest node with same range allows us to traverse siblings.
|
||||||
let node = node.ancestors().take_while(|n| n.range() == node.range()).last().unwrap();
|
let node = node.ancestors().take_while(|n| n.range() == node.range()).last().unwrap();
|
||||||
|
|
||||||
if range == node.range() {
|
if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
|
||||||
if string_kinds.contains(&node.kind()) {
|
if let Some(range) = extend_list_item(node) {
|
||||||
if let Some(range) = extend_comments(node) {
|
return Some(range);
|
||||||
return Some(range);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
|
|
||||||
if let Some(range) = extend_list_item(node) {
|
|
||||||
return Some(range);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
match node.ancestors().skip_while(|n| n.range() == range).next() {
|
node.parent().map(|it| it.range())
|
||||||
None => None,
|
|
||||||
Some(parent) => Some(parent.range()),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extend_single_word_in_comment_or_string(
|
fn extend_single_word_in_comment_or_string(
|
||||||
leaf: &SyntaxNode,
|
leaf: SyntaxToken,
|
||||||
offset: TextUnit,
|
offset: TextUnit,
|
||||||
) -> Option<TextRange> {
|
) -> Option<TextRange> {
|
||||||
let text: &str = leaf.leaf_text()?;
|
let text: &str = leaf.text();
|
||||||
let cursor_position: u32 = (offset - leaf.range().start()).into();
|
let cursor_position: u32 = (offset - leaf.range().start()).into();
|
||||||
|
|
||||||
let (before, after) = text.split_at(cursor_position as usize);
|
let (before, after) = text.split_at(cursor_position as usize);
|
||||||
|
@ -101,14 +107,14 @@ fn extend_single_word_in_comment_or_string(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extend_ws(root: &SyntaxNode, ws: &SyntaxNode, offset: TextUnit) -> TextRange {
|
fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextUnit) -> TextRange {
|
||||||
let ws_text = ws.leaf_text().unwrap();
|
let ws_text = ws.text();
|
||||||
let suffix = TextRange::from_to(offset, ws.range().end()) - ws.range().start();
|
let suffix = TextRange::from_to(offset, ws.range().end()) - ws.range().start();
|
||||||
let prefix = TextRange::from_to(ws.range().start(), offset) - ws.range().start();
|
let prefix = TextRange::from_to(ws.range().start(), offset) - ws.range().start();
|
||||||
let ws_suffix = &ws_text.as_str()[suffix];
|
let ws_suffix = &ws_text.as_str()[suffix];
|
||||||
let ws_prefix = &ws_text.as_str()[prefix];
|
let ws_prefix = &ws_text.as_str()[prefix];
|
||||||
if ws_text.contains('\n') && !ws_suffix.contains('\n') {
|
if ws_text.contains('\n') && !ws_suffix.contains('\n') {
|
||||||
if let Some(node) = ws.next_sibling() {
|
if let Some(node) = ws.next_sibling_or_token() {
|
||||||
let start = match ws_prefix.rfind('\n') {
|
let start = match ws_prefix.rfind('\n') {
|
||||||
Some(idx) => ws.range().start() + TextUnit::from((idx + 1) as u32),
|
Some(idx) => ws.range().start() + TextUnit::from((idx + 1) as u32),
|
||||||
None => node.range().start(),
|
None => node.range().start(),
|
||||||
|
@ -124,9 +130,9 @@ fn extend_ws(root: &SyntaxNode, ws: &SyntaxNode, offset: TextUnit) -> TextRange
|
||||||
ws.range()
|
ws.range()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pick_best<'a>(l: &'a SyntaxNode, r: &'a SyntaxNode) -> &'a SyntaxNode {
|
fn pick_best<'a>(l: SyntaxToken<'a>, r: SyntaxToken<'a>) -> SyntaxToken<'a> {
|
||||||
return if priority(r) > priority(l) { r } else { l };
|
return if priority(r) > priority(l) { r } else { l };
|
||||||
fn priority(n: &SyntaxNode) -> usize {
|
fn priority(n: SyntaxToken) -> usize {
|
||||||
match n.kind() {
|
match n.kind() {
|
||||||
WHITESPACE => 0,
|
WHITESPACE => 0,
|
||||||
IDENT | SELF_KW | SUPER_KW | CRATE_KW | LIFETIME => 2,
|
IDENT | SELF_KW | SUPER_KW | CRATE_KW | LIFETIME => 2,
|
||||||
|
@ -137,54 +143,60 @@ fn pick_best<'a>(l: &'a SyntaxNode, r: &'a SyntaxNode) -> &'a SyntaxNode {
|
||||||
|
|
||||||
/// Extend list item selection to include nearby comma and whitespace.
|
/// Extend list item selection to include nearby comma and whitespace.
|
||||||
fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
|
fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
|
||||||
fn is_single_line_ws(node: &SyntaxNode) -> bool {
|
fn is_single_line_ws(node: &SyntaxToken) -> bool {
|
||||||
node.kind() == WHITESPACE && !node.leaf_text().unwrap().contains('\n')
|
node.kind() == WHITESPACE && !node.text().contains('\n')
|
||||||
}
|
}
|
||||||
|
|
||||||
fn nearby_comma(node: &SyntaxNode, dir: Direction) -> Option<&SyntaxNode> {
|
fn nearby_comma(node: &SyntaxNode, dir: Direction) -> Option<SyntaxToken> {
|
||||||
node.siblings(dir)
|
node.siblings_with_tokens(dir)
|
||||||
.skip(1)
|
.skip(1)
|
||||||
.skip_while(|node| is_single_line_ws(node))
|
.skip_while(|node| match node {
|
||||||
|
SyntaxElement::Node(_) => false,
|
||||||
|
SyntaxElement::Token(it) => is_single_line_ws(it),
|
||||||
|
})
|
||||||
.next()
|
.next()
|
||||||
|
.and_then(|it| it.as_token())
|
||||||
.filter(|node| node.kind() == COMMA)
|
.filter(|node| node.kind() == COMMA)
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(comma_node) = nearby_comma(node, Direction::Prev) {
|
if let Some(comma_node) = nearby_comma(node, Direction::Prev) {
|
||||||
return Some(TextRange::from_to(comma_node.range().start(), node.range().end()));
|
return Some(TextRange::from_to(comma_node.range().start(), node.range().end()));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(comma_node) = nearby_comma(node, Direction::Next) {
|
if let Some(comma_node) = nearby_comma(node, Direction::Next) {
|
||||||
// Include any following whitespace when comma if after list item.
|
// Include any following whitespace when comma if after list item.
|
||||||
let final_node = comma_node
|
let final_node = comma_node
|
||||||
.siblings(Direction::Next)
|
.next_sibling_or_token()
|
||||||
.skip(1)
|
.and_then(|it| it.as_token())
|
||||||
.next()
|
|
||||||
.filter(|node| is_single_line_ws(node))
|
.filter(|node| is_single_line_ws(node))
|
||||||
.unwrap_or(comma_node);
|
.unwrap_or(comma_node);
|
||||||
|
|
||||||
return Some(TextRange::from_to(node.range().start(), final_node.range().end()));
|
return Some(TextRange::from_to(node.range().start(), final_node.range().end()));
|
||||||
}
|
}
|
||||||
|
|
||||||
return None;
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extend_comments(node: &SyntaxNode) -> Option<TextRange> {
|
fn extend_comments(comment: Comment) -> Option<TextRange> {
|
||||||
let prev = adj_comments(node, Direction::Prev);
|
let prev = adj_comments(comment, Direction::Prev);
|
||||||
let next = adj_comments(node, Direction::Next);
|
let next = adj_comments(comment, Direction::Next);
|
||||||
if prev != next {
|
if prev != next {
|
||||||
Some(TextRange::from_to(prev.range().start(), next.range().end()))
|
Some(TextRange::from_to(prev.syntax().range().start(), next.syntax().range().end()))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn adj_comments(node: &SyntaxNode, dir: Direction) -> &SyntaxNode {
|
fn adj_comments(comment: Comment, dir: Direction) -> Comment {
|
||||||
let mut res = node;
|
let mut res = comment;
|
||||||
for node in node.siblings(dir) {
|
for element in comment.syntax().siblings_with_tokens(dir) {
|
||||||
match node.kind() {
|
let token = match element.as_token() {
|
||||||
COMMENT => res = node,
|
None => break,
|
||||||
WHITESPACE if !node.leaf_text().unwrap().as_str().contains("\n\n") => (),
|
Some(token) => token,
|
||||||
_ => break,
|
};
|
||||||
|
if let Some(c) = Comment::cast(token) {
|
||||||
|
res = c
|
||||||
|
} else if token.kind() != WHITESPACE || token.text().contains("\n\n") {
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
res
|
res
|
||||||
|
@ -308,23 +320,13 @@ fn bar(){}
|
||||||
/*
|
/*
|
||||||
foo
|
foo
|
||||||
_bar1<|>*/
|
_bar1<|>*/
|
||||||
"#,
|
"#,
|
||||||
&["_bar1", "/*\nfoo\n_bar1*/"],
|
&["_bar1", "/*\nfoo\n_bar1*/"],
|
||||||
);
|
);
|
||||||
|
|
||||||
do_check(
|
do_check(r#"//!<|>foo_2 bar"#, &["foo_2", "//!foo_2 bar"]);
|
||||||
r#"
|
|
||||||
//!<|>foo_2 bar
|
|
||||||
"#,
|
|
||||||
&["foo_2", "//!foo_2 bar"],
|
|
||||||
);
|
|
||||||
|
|
||||||
do_check(
|
do_check(r#"/<|>/foo bar"#, &["//foo bar"]);
|
||||||
r#"
|
|
||||||
/<|>/foo bar
|
|
||||||
"#,
|
|
||||||
&["//foo bar"],
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -332,13 +334,13 @@ _bar1<|>*/
|
||||||
do_check(
|
do_check(
|
||||||
r#"
|
r#"
|
||||||
fn main() { foo<|>+bar;}
|
fn main() { foo<|>+bar;}
|
||||||
"#,
|
"#,
|
||||||
&["foo", "foo+bar"],
|
&["foo", "foo+bar"],
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r#"
|
r#"
|
||||||
fn main() { foo+<|>bar;}
|
fn main() { foo+<|>bar;}
|
||||||
"#,
|
"#,
|
||||||
&["bar", "foo+bar"],
|
&["bar", "foo+bar"],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -355,11 +357,11 @@ fn main() { foo+<|>bar;}
|
||||||
do_check(
|
do_check(
|
||||||
r#"
|
r#"
|
||||||
impl S {
|
impl S {
|
||||||
fn foo() {
|
fn foo() {
|
||||||
// hel<|>lo world
|
// hel<|>lo world
|
||||||
}
|
|
||||||
}
|
}
|
||||||
"#,
|
}
|
||||||
|
"#,
|
||||||
&["hello", "// hello world"],
|
&["hello", "// hello world"],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -371,7 +373,7 @@ impl S {
|
||||||
fn bar(){}
|
fn bar(){}
|
||||||
|
|
||||||
" fn f<|>oo() {"
|
" fn f<|>oo() {"
|
||||||
"#,
|
"#,
|
||||||
&["foo", "\" fn foo() {\""],
|
&["foo", "\" fn foo() {\""],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
AstNode, Direction, SourceFile, SyntaxNode, TextRange,
|
AstNode, SourceFile, SyntaxNode, TextRange, Direction, SyntaxElement,
|
||||||
SyntaxKind::{self, *},
|
SyntaxKind::{self, *},
|
||||||
ast::{self, VisibilityOwner},
|
ast::{self, VisibilityOwner, Comment},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
@ -26,34 +26,49 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
|
||||||
let mut visited_imports = FxHashSet::default();
|
let mut visited_imports = FxHashSet::default();
|
||||||
let mut visited_mods = FxHashSet::default();
|
let mut visited_mods = FxHashSet::default();
|
||||||
|
|
||||||
for node in file.syntax().descendants() {
|
for element in file.syntax().descendants_with_tokens() {
|
||||||
// Fold items that span multiple lines
|
// Fold items that span multiple lines
|
||||||
if let Some(kind) = fold_kind(node.kind()) {
|
if let Some(kind) = fold_kind(element.kind()) {
|
||||||
if node.text().contains('\n') {
|
let is_multiline = match element {
|
||||||
res.push(Fold { range: node.range(), kind });
|
SyntaxElement::Node(node) => node.text().contains('\n'),
|
||||||
|
SyntaxElement::Token(token) => token.text().contains('\n'),
|
||||||
|
};
|
||||||
|
if is_multiline {
|
||||||
|
res.push(Fold { range: element.range(), kind });
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fold groups of comments
|
match element {
|
||||||
if node.kind() == COMMENT && !visited_comments.contains(&node) {
|
SyntaxElement::Token(token) => {
|
||||||
if let Some(range) = contiguous_range_for_comment(node, &mut visited_comments) {
|
// Fold groups of comments
|
||||||
res.push(Fold { range, kind: FoldKind::Comment })
|
if let Some(comment) = ast::Comment::cast(token) {
|
||||||
|
if !visited_comments.contains(&comment) {
|
||||||
|
if let Some(range) =
|
||||||
|
contiguous_range_for_comment(comment, &mut visited_comments)
|
||||||
|
{
|
||||||
|
res.push(Fold { range, kind: FoldKind::Comment })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
SyntaxElement::Node(node) => {
|
||||||
|
// Fold groups of imports
|
||||||
|
if node.kind() == USE_ITEM && !visited_imports.contains(&node) {
|
||||||
|
if let Some(range) = contiguous_range_for_group(node, &mut visited_imports) {
|
||||||
|
res.push(Fold { range, kind: FoldKind::Imports })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Fold groups of imports
|
// Fold groups of mods
|
||||||
if node.kind() == USE_ITEM && !visited_imports.contains(&node) {
|
if node.kind() == MODULE && !has_visibility(&node) && !visited_mods.contains(&node)
|
||||||
if let Some(range) = contiguous_range_for_group(node, &mut visited_imports) {
|
{
|
||||||
res.push(Fold { range, kind: FoldKind::Imports })
|
if let Some(range) =
|
||||||
}
|
contiguous_range_for_group_unless(node, has_visibility, &mut visited_mods)
|
||||||
}
|
{
|
||||||
|
res.push(Fold { range, kind: FoldKind::Mods })
|
||||||
// Fold groups of mods
|
}
|
||||||
if node.kind() == MODULE && !has_visibility(&node) && !visited_mods.contains(&node) {
|
}
|
||||||
if let Some(range) =
|
|
||||||
contiguous_range_for_group_unless(node, has_visibility, &mut visited_mods)
|
|
||||||
{
|
|
||||||
res.push(Fold { range, kind: FoldKind::Mods })
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -90,16 +105,21 @@ fn contiguous_range_for_group_unless<'a>(
|
||||||
visited.insert(first);
|
visited.insert(first);
|
||||||
|
|
||||||
let mut last = first;
|
let mut last = first;
|
||||||
for node in first.siblings(Direction::Next) {
|
for element in first.siblings_with_tokens(Direction::Next) {
|
||||||
if let Some(ws) = ast::Whitespace::cast(node) {
|
let node = match element {
|
||||||
// There is a blank line, which means that the group ends here
|
SyntaxElement::Token(token) => {
|
||||||
if ws.count_newlines_lazy().take(2).count() == 2 {
|
if let Some(ws) = ast::Whitespace::cast(token) {
|
||||||
|
if !ws.spans_multiple_lines() {
|
||||||
|
// Ignore whitespace without blank lines
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// There is a blank line or another token, which means that the
|
||||||
|
// group ends here
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
SyntaxElement::Node(node) => node,
|
||||||
// Ignore whitespace without blank lines
|
};
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Stop if we find a node that doesn't belong to the group
|
// Stop if we find a node that doesn't belong to the group
|
||||||
if node.kind() != first.kind() || unless(node) {
|
if node.kind() != first.kind() || unless(node) {
|
||||||
|
@ -119,40 +139,42 @@ fn contiguous_range_for_group_unless<'a>(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn contiguous_range_for_comment<'a>(
|
fn contiguous_range_for_comment<'a>(
|
||||||
first: &'a SyntaxNode,
|
first: Comment<'a>,
|
||||||
visited: &mut FxHashSet<&'a SyntaxNode>,
|
visited: &mut FxHashSet<Comment<'a>>,
|
||||||
) -> Option<TextRange> {
|
) -> Option<TextRange> {
|
||||||
visited.insert(first);
|
visited.insert(first);
|
||||||
|
|
||||||
// Only fold comments of the same flavor
|
// Only fold comments of the same flavor
|
||||||
let group_flavor = ast::Comment::cast(first)?.flavor();
|
let group_flavor = first.flavor();
|
||||||
|
|
||||||
let mut last = first;
|
let mut last = first;
|
||||||
for node in first.siblings(Direction::Next) {
|
for element in first.syntax().siblings_with_tokens(Direction::Next) {
|
||||||
if let Some(ws) = ast::Whitespace::cast(node) {
|
match element {
|
||||||
// There is a blank line, which means the group ends here
|
SyntaxElement::Token(token) => {
|
||||||
if ws.count_newlines_lazy().take(2).count() == 2 {
|
if let Some(ws) = ast::Whitespace::cast(token) {
|
||||||
|
if !ws.spans_multiple_lines() {
|
||||||
|
// Ignore whitespace without blank lines
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(c) = Comment::cast(token) {
|
||||||
|
if c.flavor() == group_flavor {
|
||||||
|
visited.insert(c);
|
||||||
|
last = c;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// The comment group ends because either:
|
||||||
|
// * An element of a different kind was reached
|
||||||
|
// * A comment of a different flavor was reached
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
SyntaxElement::Node(_) => break,
|
||||||
// Ignore whitespace without blank lines
|
};
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
match ast::Comment::cast(node) {
|
|
||||||
Some(next_comment) if next_comment.flavor() == group_flavor => {
|
|
||||||
visited.insert(node);
|
|
||||||
last = node;
|
|
||||||
}
|
|
||||||
// The comment group ends because either:
|
|
||||||
// * An element of a different kind was reached
|
|
||||||
// * A comment of a different flavor was reached
|
|
||||||
_ => break,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if first != last {
|
if first != last {
|
||||||
Some(TextRange::from_to(first.range().start(), last.range().end()))
|
Some(TextRange::from_to(first.syntax().range().start(), last.syntax().range().end()))
|
||||||
} else {
|
} else {
|
||||||
// The group consists of only one element, therefore it cannot be folded
|
// The group consists of only one element, therefore it cannot be folded
|
||||||
None
|
None
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use ra_db::SourceDatabase;
|
use ra_db::SourceDatabase;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
AstNode, SyntaxNode, TreeArc, ast::{self, NameOwner, VisibilityOwner, TypeAscriptionOwner},
|
AstNode, SyntaxNode, TreeArc, ast::{self, NameOwner, VisibilityOwner, TypeAscriptionOwner},
|
||||||
algo::{find_covering_node, find_node_at_offset, find_leaf_at_offset, visit::{visitor, Visitor}},
|
algo::{find_covering_element, find_node_at_offset, find_token_at_offset, visit::{visitor, Visitor}},
|
||||||
};
|
};
|
||||||
use hir::HirDisplay;
|
use hir::HirDisplay;
|
||||||
|
|
||||||
|
@ -104,8 +104,11 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||||
}
|
}
|
||||||
|
|
||||||
if range.is_none() {
|
if range.is_none() {
|
||||||
let node = find_leaf_at_offset(file.syntax(), position.offset).find_map(|leaf| {
|
let node = find_token_at_offset(file.syntax(), position.offset).find_map(|token| {
|
||||||
leaf.ancestors().find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some())
|
token
|
||||||
|
.parent()
|
||||||
|
.ancestors()
|
||||||
|
.find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some())
|
||||||
})?;
|
})?;
|
||||||
let frange = FileRange { file_id: position.file_id, range: node.range() };
|
let frange = FileRange { file_id: position.file_id, range: node.range() };
|
||||||
res.extend(type_of(db, frange).map(rust_code_markup));
|
res.extend(type_of(db, frange).map(rust_code_markup));
|
||||||
|
@ -123,13 +126,12 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||||
pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
|
pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
|
||||||
let file = db.parse(frange.file_id);
|
let file = db.parse(frange.file_id);
|
||||||
let syntax = file.syntax();
|
let syntax = file.syntax();
|
||||||
let leaf_node = find_covering_node(syntax, frange.range);
|
let leaf_node = find_covering_element(syntax, frange.range);
|
||||||
// if we picked identifier, expand to pattern/expression
|
// if we picked identifier, expand to pattern/expression
|
||||||
let node = leaf_node
|
let node = leaf_node
|
||||||
.ancestors()
|
.ancestors()
|
||||||
.take_while(|it| it.range() == leaf_node.range())
|
.take_while(|it| it.range() == leaf_node.range())
|
||||||
.find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some())
|
.find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some())?;
|
||||||
.unwrap_or(leaf_node);
|
|
||||||
let parent_fn = node.ancestors().find_map(ast::FnDef::cast)?;
|
let parent_fn = node.ancestors().find_map(ast::FnDef::cast)?;
|
||||||
let function = hir::source_binder::function_from_source(db, frange.file_id, parent_fn)?;
|
let function = hir::source_binder::function_from_source(db, frange.file_id, parent_fn)?;
|
||||||
let infer = function.infer(db);
|
let infer = function.infer(db);
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
SourceFile, TextRange, TextUnit, AstNode, SyntaxNode,
|
SourceFile, TextRange, TextUnit, AstNode, SyntaxNode, SyntaxElement, SyntaxToken,
|
||||||
SyntaxKind::{self, WHITESPACE, COMMA, R_CURLY, R_PAREN, R_BRACK},
|
SyntaxKind::{self, WHITESPACE, COMMA, R_CURLY, R_PAREN, R_BRACK},
|
||||||
algo::{find_covering_node, non_trivia_sibling},
|
algo::{find_covering_element, non_trivia_sibling},
|
||||||
ast,
|
ast,
|
||||||
Direction,
|
Direction,
|
||||||
};
|
};
|
||||||
|
@ -24,22 +24,22 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
|
||||||
range
|
range
|
||||||
};
|
};
|
||||||
|
|
||||||
let node = find_covering_node(file.syntax(), range);
|
let node = match find_covering_element(file.syntax(), range) {
|
||||||
|
SyntaxElement::Node(node) => node,
|
||||||
|
SyntaxElement::Token(token) => token.parent(),
|
||||||
|
};
|
||||||
let mut edit = TextEditBuilder::default();
|
let mut edit = TextEditBuilder::default();
|
||||||
for node in node.descendants() {
|
for token in node.descendants_with_tokens().filter_map(|it| it.as_token()) {
|
||||||
let text = match node.leaf_text() {
|
let range = match range.intersection(&token.range()) {
|
||||||
Some(text) => text,
|
|
||||||
None => continue,
|
|
||||||
};
|
|
||||||
let range = match range.intersection(&node.range()) {
|
|
||||||
Some(range) => range,
|
Some(range) => range,
|
||||||
None => continue,
|
None => continue,
|
||||||
} - node.range().start();
|
} - token.range().start();
|
||||||
|
let text = token.text();
|
||||||
for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') {
|
for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') {
|
||||||
let pos: TextUnit = (pos as u32).into();
|
let pos: TextUnit = (pos as u32).into();
|
||||||
let off = node.range().start() + range.start() + pos;
|
let off = token.range().start() + range.start() + pos;
|
||||||
if !edit.invalidates_offset(off) {
|
if !edit.invalidates_offset(off) {
|
||||||
remove_newline(&mut edit, node, text.as_str(), off);
|
remove_newline(&mut edit, token, off);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -47,17 +47,12 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
|
||||||
edit.finish()
|
edit.finish()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove_newline(
|
fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUnit) {
|
||||||
edit: &mut TextEditBuilder,
|
if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 {
|
||||||
node: &SyntaxNode,
|
|
||||||
node_text: &str,
|
|
||||||
offset: TextUnit,
|
|
||||||
) {
|
|
||||||
if node.kind() != WHITESPACE || node_text.bytes().filter(|&b| b == b'\n').count() != 1 {
|
|
||||||
// The node is either the first or the last in the file
|
// The node is either the first or the last in the file
|
||||||
let suff = &node_text[TextRange::from_to(
|
let suff = &token.text()[TextRange::from_to(
|
||||||
offset - node.range().start() + TextUnit::of_char('\n'),
|
offset - token.range().start() + TextUnit::of_char('\n'),
|
||||||
TextUnit::of_str(node_text),
|
TextUnit::of_str(token.text()),
|
||||||
)];
|
)];
|
||||||
let spaces = suff.bytes().take_while(|&b| b == b' ').count();
|
let spaces = suff.bytes().take_while(|&b| b == b' ').count();
|
||||||
|
|
||||||
|
@ -74,7 +69,7 @@ fn remove_newline(
|
||||||
// ```
|
// ```
|
||||||
//
|
//
|
||||||
// into `my_function(<some-expr>)`
|
// into `my_function(<some-expr>)`
|
||||||
if join_single_expr_block(edit, node).is_some() {
|
if join_single_expr_block(edit, token).is_some() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// ditto for
|
// ditto for
|
||||||
|
@ -84,44 +79,50 @@ fn remove_newline(
|
||||||
// bar
|
// bar
|
||||||
// };
|
// };
|
||||||
// ```
|
// ```
|
||||||
if join_single_use_tree(edit, node).is_some() {
|
if join_single_use_tree(edit, token).is_some() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// The node is between two other nodes
|
// The node is between two other nodes
|
||||||
let prev = node.prev_sibling().unwrap();
|
let prev = token.prev_sibling_or_token().unwrap();
|
||||||
let next = node.next_sibling().unwrap();
|
let next = token.next_sibling_or_token().unwrap();
|
||||||
if is_trailing_comma(prev.kind(), next.kind()) {
|
if is_trailing_comma(prev.kind(), next.kind()) {
|
||||||
// Removes: trailing comma, newline (incl. surrounding whitespace)
|
// Removes: trailing comma, newline (incl. surrounding whitespace)
|
||||||
edit.delete(TextRange::from_to(prev.range().start(), node.range().end()));
|
edit.delete(TextRange::from_to(prev.range().start(), token.range().end()));
|
||||||
} else if prev.kind() == COMMA && next.kind() == R_CURLY {
|
} else if prev.kind() == COMMA && next.kind() == R_CURLY {
|
||||||
// Removes: comma, newline (incl. surrounding whitespace)
|
// Removes: comma, newline (incl. surrounding whitespace)
|
||||||
let space = if let Some(left) = prev.prev_sibling() { compute_ws(left, next) } else { " " };
|
let space = if let Some(left) = prev.prev_sibling_or_token() {
|
||||||
|
compute_ws(left.kind(), next.kind())
|
||||||
|
} else {
|
||||||
|
" "
|
||||||
|
};
|
||||||
edit.replace(
|
edit.replace(
|
||||||
TextRange::from_to(prev.range().start(), node.range().end()),
|
TextRange::from_to(prev.range().start(), token.range().end()),
|
||||||
space.to_string(),
|
space.to_string(),
|
||||||
);
|
);
|
||||||
} else if let (Some(_), Some(next)) = (ast::Comment::cast(prev), ast::Comment::cast(next)) {
|
} else if let (Some(_), Some(next)) =
|
||||||
|
(prev.as_token().and_then(ast::Comment::cast), next.as_token().and_then(ast::Comment::cast))
|
||||||
|
{
|
||||||
// Removes: newline (incl. surrounding whitespace), start of the next comment
|
// Removes: newline (incl. surrounding whitespace), start of the next comment
|
||||||
edit.delete(TextRange::from_to(
|
edit.delete(TextRange::from_to(
|
||||||
node.range().start(),
|
token.range().start(),
|
||||||
next.syntax().range().start() + TextUnit::of_str(next.prefix()),
|
next.syntax().range().start() + TextUnit::of_str(next.prefix()),
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
// Remove newline but add a computed amount of whitespace characters
|
// Remove newline but add a computed amount of whitespace characters
|
||||||
edit.replace(node.range(), compute_ws(prev, next).to_string());
|
edit.replace(token.range(), compute_ws(prev.kind(), next.kind()).to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn has_comma_after(node: &SyntaxNode) -> bool {
|
fn has_comma_after(node: &SyntaxNode) -> bool {
|
||||||
match non_trivia_sibling(node, Direction::Next) {
|
match non_trivia_sibling(node.into(), Direction::Next) {
|
||||||
Some(n) => n.kind() == COMMA,
|
Some(n) => n.kind() == COMMA,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn join_single_expr_block(edit: &mut TextEditBuilder, node: &SyntaxNode) -> Option<()> {
|
fn join_single_expr_block(edit: &mut TextEditBuilder, token: SyntaxToken) -> Option<()> {
|
||||||
let block = ast::Block::cast(node.parent()?)?;
|
let block = ast::Block::cast(token.parent())?;
|
||||||
let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?;
|
let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?;
|
||||||
let expr = extract_trivial_expression(block)?;
|
let expr = extract_trivial_expression(block)?;
|
||||||
|
|
||||||
|
@ -140,8 +141,8 @@ fn join_single_expr_block(edit: &mut TextEditBuilder, node: &SyntaxNode) -> Opti
|
||||||
Some(())
|
Some(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn join_single_use_tree(edit: &mut TextEditBuilder, node: &SyntaxNode) -> Option<()> {
|
fn join_single_use_tree(edit: &mut TextEditBuilder, token: SyntaxToken) -> Option<()> {
|
||||||
let use_tree_list = ast::UseTreeList::cast(node.parent()?)?;
|
let use_tree_list = ast::UseTreeList::cast(token.parent())?;
|
||||||
let (tree,) = use_tree_list.use_trees().collect_tuple()?;
|
let (tree,) = use_tree_list.use_trees().collect_tuple()?;
|
||||||
edit.replace(use_tree_list.syntax().range(), tree.syntax().text().to_string());
|
edit.replace(use_tree_list.syntax().range(), tree.syntax().text().to_string());
|
||||||
Some(())
|
Some(())
|
||||||
|
@ -401,13 +402,13 @@ use ra_syntax::{
|
||||||
r"
|
r"
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
algo::<|>{
|
algo::<|>{
|
||||||
find_leaf_at_offset,
|
find_token_at_offset,
|
||||||
},
|
},
|
||||||
ast,
|
ast,
|
||||||
};",
|
};",
|
||||||
r"
|
r"
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
algo::<|>find_leaf_at_offset,
|
algo::<|>find_token_at_offset,
|
||||||
ast,
|
ast,
|
||||||
};",
|
};",
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
SourceFile, TextUnit,
|
SourceFile, TextUnit,
|
||||||
algo::find_leaf_at_offset,
|
algo::find_token_at_offset,
|
||||||
SyntaxKind::{self, *},
|
SyntaxKind::{self, *},
|
||||||
ast::AstNode,
|
ast::AstNode,
|
||||||
};
|
};
|
||||||
|
@ -8,15 +8,15 @@ use ra_syntax::{
|
||||||
pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> {
|
pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> {
|
||||||
const BRACES: &[SyntaxKind] =
|
const BRACES: &[SyntaxKind] =
|
||||||
&[L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE];
|
&[L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE];
|
||||||
let (brace_node, brace_idx) = find_leaf_at_offset(file.syntax(), offset)
|
let (brace_node, brace_idx) = find_token_at_offset(file.syntax(), offset)
|
||||||
.filter_map(|node| {
|
.filter_map(|node| {
|
||||||
let idx = BRACES.iter().position(|&brace| brace == node.kind())?;
|
let idx = BRACES.iter().position(|&brace| brace == node.kind())?;
|
||||||
Some((node, idx))
|
Some((node, idx))
|
||||||
})
|
})
|
||||||
.next()?;
|
.next()?;
|
||||||
let parent = brace_node.parent()?;
|
let parent = brace_node.parent();
|
||||||
let matching_kind = BRACES[brace_idx ^ 1];
|
let matching_kind = BRACES[brace_idx ^ 1];
|
||||||
let matching_node = parent.children().find(|node| node.kind() == matching_kind)?;
|
let matching_node = parent.children_with_tokens().find(|node| node.kind() == matching_kind)?;
|
||||||
Some(matching_node.range().start())
|
Some(matching_node.range().start())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -41,5 +41,4 @@ mod tests {
|
||||||
|
|
||||||
do_check("struct Foo { a: i32, }<|>", "struct Foo <|>{ a: i32, }");
|
do_check("struct Foo { a: i32, }<|>", "struct Foo <|>{ a: i32, }");
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
|
||||||
use ra_syntax::{ast, AstNode, TextRange, Direction, SyntaxKind::*};
|
use ra_syntax::{ast, AstNode, TextRange, Direction, SyntaxKind::*, SyntaxElement};
|
||||||
use ra_db::SourceDatabase;
|
use ra_db::SourceDatabase;
|
||||||
|
|
||||||
use crate::{FileId, db::RootDatabase};
|
use crate::{FileId, db::RootDatabase};
|
||||||
|
@ -15,9 +15,9 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
||||||
let source_file = db.parse(file_id);
|
let source_file = db.parse(file_id);
|
||||||
|
|
||||||
// Visited nodes to handle highlighting priorities
|
// Visited nodes to handle highlighting priorities
|
||||||
let mut highlighted = FxHashSet::default();
|
let mut highlighted: FxHashSet<SyntaxElement> = FxHashSet::default();
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
for node in source_file.syntax().descendants() {
|
for node in source_file.syntax().descendants_with_tokens() {
|
||||||
if highlighted.contains(&node) {
|
if highlighted.contains(&node) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -31,14 +31,14 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
||||||
LIFETIME => "parameter",
|
LIFETIME => "parameter",
|
||||||
k if k.is_keyword() => "keyword",
|
k if k.is_keyword() => "keyword",
|
||||||
_ => {
|
_ => {
|
||||||
if let Some(macro_call) = ast::MacroCall::cast(node) {
|
if let Some(macro_call) = node.as_node().and_then(ast::MacroCall::cast) {
|
||||||
if let Some(path) = macro_call.path() {
|
if let Some(path) = macro_call.path() {
|
||||||
if let Some(segment) = path.segment() {
|
if let Some(segment) = path.segment() {
|
||||||
if let Some(name_ref) = segment.name_ref() {
|
if let Some(name_ref) = segment.name_ref() {
|
||||||
highlighted.insert(name_ref.syntax());
|
highlighted.insert(name_ref.syntax().into());
|
||||||
let range_start = name_ref.syntax().range().start();
|
let range_start = name_ref.syntax().range().start();
|
||||||
let mut range_end = name_ref.syntax().range().end();
|
let mut range_end = name_ref.syntax().range().end();
|
||||||
for sibling in path.syntax().siblings(Direction::Next) {
|
for sibling in path.syntax().siblings_with_tokens(Direction::Next) {
|
||||||
match sibling.kind() {
|
match sibling.kind() {
|
||||||
EXCL | IDENT => range_end = sibling.range().end(),
|
EXCL | IDENT => range_end = sibling.range().end(),
|
||||||
_ => (),
|
_ => (),
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
use ra_db::SourceDatabase;
|
use ra_db::SourceDatabase;
|
||||||
use crate::db::RootDatabase;
|
use crate::db::RootDatabase;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
SourceFile, SyntaxNode, TextRange, AstNode,
|
SourceFile, TextRange, AstNode, SyntaxToken, SyntaxElement,
|
||||||
algo::{self, visit::{visitor, Visitor}}, ast::{self, AstToken}
|
algo,
|
||||||
|
SyntaxKind::{STRING, RAW_STRING},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub use ra_db::FileId;
|
pub use ra_db::FileId;
|
||||||
|
@ -14,11 +15,15 @@ pub(crate) fn syntax_tree(
|
||||||
) -> String {
|
) -> String {
|
||||||
if let Some(text_range) = text_range {
|
if let Some(text_range) = text_range {
|
||||||
let file = db.parse(file_id);
|
let file = db.parse(file_id);
|
||||||
let node = algo::find_covering_node(file.syntax(), text_range);
|
let node = match algo::find_covering_element(file.syntax(), text_range) {
|
||||||
|
SyntaxElement::Node(node) => node,
|
||||||
if let Some(tree) = syntax_tree_for_string(node, text_range) {
|
SyntaxElement::Token(token) => {
|
||||||
return tree;
|
if let Some(tree) = syntax_tree_for_string(token, text_range) {
|
||||||
}
|
return tree;
|
||||||
|
}
|
||||||
|
token.parent()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
node.debug_dump()
|
node.debug_dump()
|
||||||
} else {
|
} else {
|
||||||
|
@ -28,19 +33,19 @@ pub(crate) fn syntax_tree(
|
||||||
|
|
||||||
/// Attempts parsing the selected contents of a string literal
|
/// Attempts parsing the selected contents of a string literal
|
||||||
/// as rust syntax and returns its syntax tree
|
/// as rust syntax and returns its syntax tree
|
||||||
fn syntax_tree_for_string(node: &SyntaxNode, text_range: TextRange) -> Option<String> {
|
fn syntax_tree_for_string(token: SyntaxToken, text_range: TextRange) -> Option<String> {
|
||||||
// When the range is inside a string
|
// When the range is inside a string
|
||||||
// we'll attempt parsing it as rust syntax
|
// we'll attempt parsing it as rust syntax
|
||||||
// to provide the syntax tree of the contents of the string
|
// to provide the syntax tree of the contents of the string
|
||||||
visitor()
|
match token.kind() {
|
||||||
.visit(|node: &ast::String| syntax_tree_for_token(node, text_range))
|
STRING | RAW_STRING => syntax_tree_for_token(token, text_range),
|
||||||
.visit(|node: &ast::RawString| syntax_tree_for_token(node, text_range))
|
_ => None,
|
||||||
.accept(node)?
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn syntax_tree_for_token<T: AstToken>(node: &T, text_range: TextRange) -> Option<String> {
|
fn syntax_tree_for_token(node: SyntaxToken, text_range: TextRange) -> Option<String> {
|
||||||
// Range of the full node
|
// Range of the full node
|
||||||
let node_range = node.syntax().range();
|
let node_range = node.range();
|
||||||
let text = node.text().to_string();
|
let text = node.text().to_string();
|
||||||
|
|
||||||
// We start at some point inside the node
|
// We start at some point inside the node
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
AstNode, SourceFile, SyntaxKind::*,
|
AstNode, SourceFile, SyntaxKind::*,
|
||||||
SyntaxNode, TextUnit, TextRange,
|
TextUnit, TextRange, SyntaxToken,
|
||||||
algo::{find_node_at_offset, find_leaf_at_offset, LeafAtOffset},
|
algo::{find_node_at_offset, find_token_at_offset, TokenAtOffset},
|
||||||
ast::{self, AstToken},
|
ast::{self},
|
||||||
};
|
};
|
||||||
use ra_fmt::leading_indent;
|
use ra_fmt::leading_indent;
|
||||||
use ra_text_edit::{TextEdit, TextEditBuilder};
|
use ra_text_edit::{TextEdit, TextEditBuilder};
|
||||||
|
@ -11,11 +11,11 @@ use crate::{db::RootDatabase, SourceChange, SourceFileEdit};
|
||||||
|
|
||||||
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
|
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
|
||||||
let file = db.parse(position.file_id);
|
let file = db.parse(position.file_id);
|
||||||
let comment = find_leaf_at_offset(file.syntax(), position.offset)
|
let comment = find_token_at_offset(file.syntax(), position.offset)
|
||||||
.left_biased()
|
.left_biased()
|
||||||
.and_then(ast::Comment::cast)?;
|
.and_then(ast::Comment::cast)?;
|
||||||
|
|
||||||
if let ast::CommentFlavor::Multiline = comment.flavor() {
|
if comment.flavor() == ast::CommentFlavor::Multiline {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -41,23 +41,23 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Sour
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn node_indent<'a>(file: &'a SourceFile, node: &SyntaxNode) -> Option<&'a str> {
|
fn node_indent<'a>(file: &'a SourceFile, token: SyntaxToken) -> Option<&'a str> {
|
||||||
let ws = match find_leaf_at_offset(file.syntax(), node.range().start()) {
|
let ws = match find_token_at_offset(file.syntax(), token.range().start()) {
|
||||||
LeafAtOffset::Between(l, r) => {
|
TokenAtOffset::Between(l, r) => {
|
||||||
assert!(r == node);
|
assert!(r == token);
|
||||||
l
|
l
|
||||||
}
|
}
|
||||||
LeafAtOffset::Single(n) => {
|
TokenAtOffset::Single(n) => {
|
||||||
assert!(n == node);
|
assert!(n == token);
|
||||||
return Some("");
|
return Some("");
|
||||||
}
|
}
|
||||||
LeafAtOffset::None => unreachable!(),
|
TokenAtOffset::None => unreachable!(),
|
||||||
};
|
};
|
||||||
if ws.kind() != WHITESPACE {
|
if ws.kind() != WHITESPACE {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let text = ws.leaf_text().unwrap();
|
let text = ws.text();
|
||||||
let pos = text.as_str().rfind('\n').map(|it| it + 1).unwrap_or(0);
|
let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0);
|
||||||
Some(&text[pos..])
|
Some(&text[pos..])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -88,7 +88,7 @@ pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option<
|
||||||
let file = db.parse(position.file_id);
|
let file = db.parse(position.file_id);
|
||||||
assert_eq!(file.syntax().text().char_at(position.offset), Some('.'));
|
assert_eq!(file.syntax().text().char_at(position.offset), Some('.'));
|
||||||
|
|
||||||
let whitespace = find_leaf_at_offset(file.syntax(), position.offset)
|
let whitespace = find_token_at_offset(file.syntax(), position.offset)
|
||||||
.left_biased()
|
.left_biased()
|
||||||
.and_then(ast::Whitespace::cast)?;
|
.and_then(ast::Whitespace::cast)?;
|
||||||
|
|
||||||
|
@ -100,7 +100,7 @@ pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option<
|
||||||
let current_indent_len = TextUnit::of_str(current_indent);
|
let current_indent_len = TextUnit::of_str(current_indent);
|
||||||
|
|
||||||
// Make sure dot is a part of call chain
|
// Make sure dot is a part of call chain
|
||||||
let field_expr = whitespace.syntax().parent().and_then(ast::FieldExpr::cast)?;
|
let field_expr = ast::FieldExpr::cast(whitespace.syntax().parent())?;
|
||||||
let prev_indent = leading_indent(field_expr.syntax())?;
|
let prev_indent = leading_indent(field_expr.syntax())?;
|
||||||
let target_indent = format!(" {}", prev_indent);
|
let target_indent = format!(" {}", prev_indent);
|
||||||
let target_indent_len = TextUnit::of_str(&target_indent);
|
let target_indent_len = TextUnit::of_str(&target_indent);
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use ra_parser::{TokenSource, TreeSink, ParseError};
|
use ra_parser::{TokenSource, TreeSink, ParseError};
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc,
|
AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement,
|
||||||
ast, SyntaxKind::*, TextUnit
|
ast, SyntaxKind::*, TextUnit
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -47,8 +47,8 @@ fn convert_tt(
|
||||||
global_offset: TextUnit,
|
global_offset: TextUnit,
|
||||||
tt: &SyntaxNode,
|
tt: &SyntaxNode,
|
||||||
) -> Option<tt::Subtree> {
|
) -> Option<tt::Subtree> {
|
||||||
let first_child = tt.first_child()?;
|
let first_child = tt.first_child_or_token()?;
|
||||||
let last_child = tt.last_child()?;
|
let last_child = tt.last_child_or_token()?;
|
||||||
let delimiter = match (first_child.kind(), last_child.kind()) {
|
let delimiter = match (first_child.kind(), last_child.kind()) {
|
||||||
(L_PAREN, R_PAREN) => tt::Delimiter::Parenthesis,
|
(L_PAREN, R_PAREN) => tt::Delimiter::Parenthesis,
|
||||||
(L_CURLY, R_CURLY) => tt::Delimiter::Brace,
|
(L_CURLY, R_CURLY) => tt::Delimiter::Brace,
|
||||||
|
@ -56,39 +56,47 @@ fn convert_tt(
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
let mut token_trees = Vec::new();
|
let mut token_trees = Vec::new();
|
||||||
for child in tt.children().skip(1) {
|
for child in tt.children_with_tokens().skip(1) {
|
||||||
if child == first_child || child == last_child || child.kind().is_trivia() {
|
if child == first_child || child == last_child || child.kind().is_trivia() {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if child.kind().is_punct() {
|
match child {
|
||||||
let mut prev = None;
|
SyntaxElement::Token(token) => {
|
||||||
for char in child.leaf_text().unwrap().chars() {
|
if token.kind().is_punct() {
|
||||||
if let Some(char) = prev {
|
let mut prev = None;
|
||||||
token_trees.push(
|
for char in token.text().chars() {
|
||||||
tt::Leaf::from(tt::Punct { char, spacing: tt::Spacing::Joint }).into(),
|
if let Some(char) = prev {
|
||||||
);
|
token_trees.push(
|
||||||
|
tt::Leaf::from(tt::Punct { char, spacing: tt::Spacing::Joint })
|
||||||
|
.into(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
prev = Some(char)
|
||||||
|
}
|
||||||
|
if let Some(char) = prev {
|
||||||
|
token_trees.push(
|
||||||
|
tt::Leaf::from(tt::Punct { char, spacing: tt::Spacing::Alone }).into(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let child = if token.kind().is_keyword() || token.kind() == IDENT {
|
||||||
|
let relative_range = token.range() - global_offset;
|
||||||
|
let id = token_map.alloc(relative_range);
|
||||||
|
let text = token.text().clone();
|
||||||
|
tt::Leaf::from(tt::Ident { text, id }).into()
|
||||||
|
} else if token.kind().is_literal() {
|
||||||
|
tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
|
||||||
|
} else {
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
token_trees.push(child);
|
||||||
}
|
}
|
||||||
prev = Some(char)
|
|
||||||
}
|
}
|
||||||
if let Some(char) = prev {
|
SyntaxElement::Node(node) => {
|
||||||
token_trees
|
let child = convert_tt(token_map, global_offset, node)?.into();
|
||||||
.push(tt::Leaf::from(tt::Punct { char, spacing: tt::Spacing::Alone }).into());
|
token_trees.push(child);
|
||||||
}
|
}
|
||||||
} else {
|
};
|
||||||
let child: tt::TokenTree = if child.kind() == TOKEN_TREE {
|
|
||||||
convert_tt(token_map, global_offset, child)?.into()
|
|
||||||
} else if child.kind().is_keyword() || child.kind() == IDENT {
|
|
||||||
let relative_range = child.range() - global_offset;
|
|
||||||
let id = token_map.alloc(relative_range);
|
|
||||||
let text = child.leaf_text().unwrap().clone();
|
|
||||||
tt::Leaf::from(tt::Ident { text, id }).into()
|
|
||||||
} else if child.kind().is_literal() {
|
|
||||||
tt::Leaf::from(tt::Literal { text: child.leaf_text().unwrap().clone() }).into()
|
|
||||||
} else {
|
|
||||||
return None;
|
|
||||||
};
|
|
||||||
token_trees.push(child)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let res = tt::Subtree { delimiter, token_trees };
|
let res = tt::Subtree { delimiter, token_trees };
|
||||||
|
@ -118,12 +126,12 @@ impl TtTokenSource {
|
||||||
}
|
}
|
||||||
fn convert_tt(&mut self, tt: &tt::TokenTree) {
|
fn convert_tt(&mut self, tt: &tt::TokenTree) {
|
||||||
match tt {
|
match tt {
|
||||||
tt::TokenTree::Leaf(leaf) => self.convert_leaf(leaf),
|
tt::TokenTree::Leaf(token) => self.convert_token(token),
|
||||||
tt::TokenTree::Subtree(sub) => self.convert_subtree(sub),
|
tt::TokenTree::Subtree(sub) => self.convert_subtree(sub),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn convert_leaf(&mut self, leaf: &tt::Leaf) {
|
fn convert_token(&mut self, token: &tt::Leaf) {
|
||||||
let tok = match leaf {
|
let tok = match token {
|
||||||
tt::Leaf::Literal(l) => TtToken {
|
tt::Leaf::Literal(l) => TtToken {
|
||||||
kind: SyntaxKind::INT_NUMBER, // FIXME
|
kind: SyntaxKind::INT_NUMBER, // FIXME
|
||||||
is_joint_to_next: false,
|
is_joint_to_next: false,
|
||||||
|
@ -206,7 +214,7 @@ impl<'a> TtTreeSink<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> TreeSink for TtTreeSink<'a> {
|
impl<'a> TreeSink for TtTreeSink<'a> {
|
||||||
fn leaf(&mut self, kind: SyntaxKind, n_tokens: u8) {
|
fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
|
||||||
for _ in 0..n_tokens {
|
for _ in 0..n_tokens {
|
||||||
self.buf += self.tokens[self.token_pos].text.as_str();
|
self.buf += self.tokens[self.token_pos].text.as_str();
|
||||||
self.token_pos += 1;
|
self.token_pos += 1;
|
||||||
|
@ -214,15 +222,15 @@ impl<'a> TreeSink for TtTreeSink<'a> {
|
||||||
self.text_pos += TextUnit::of_str(&self.buf);
|
self.text_pos += TextUnit::of_str(&self.buf);
|
||||||
let text = SmolStr::new(self.buf.as_str());
|
let text = SmolStr::new(self.buf.as_str());
|
||||||
self.buf.clear();
|
self.buf.clear();
|
||||||
self.inner.leaf(kind, text)
|
self.inner.token(kind, text)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn start_branch(&mut self, kind: SyntaxKind) {
|
fn start_node(&mut self, kind: SyntaxKind) {
|
||||||
self.inner.start_branch(kind);
|
self.inner.start_node(kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn finish_branch(&mut self) {
|
fn finish_node(&mut self) {
|
||||||
self.inner.finish_branch();
|
self.inner.finish_node();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn error(&mut self, error: ParseError) {
|
fn error(&mut self, error: ParseError) {
|
||||||
|
|
|
@ -116,12 +116,12 @@ pub(super) fn process(sink: &mut dyn TreeSink, mut events: Vec<Event>) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for kind in forward_parents.drain(..).rev() {
|
for kind in forward_parents.drain(..).rev() {
|
||||||
sink.start_branch(kind);
|
sink.start_node(kind);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Event::Finish => sink.finish_branch(),
|
Event::Finish => sink.finish_node(),
|
||||||
Event::Token { kind, n_raw_tokens } => {
|
Event::Token { kind, n_raw_tokens } => {
|
||||||
sink.leaf(kind, n_raw_tokens);
|
sink.token(kind, n_raw_tokens);
|
||||||
}
|
}
|
||||||
Event::Error { msg } => sink.error(msg),
|
Event::Error { msg } => sink.error(msg),
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,15 +40,15 @@ pub trait TokenSource {
|
||||||
|
|
||||||
/// `TreeSink` abstracts details of a particular syntax tree implementation.
|
/// `TreeSink` abstracts details of a particular syntax tree implementation.
|
||||||
pub trait TreeSink {
|
pub trait TreeSink {
|
||||||
/// Adds new leaf to the current branch.
|
/// Adds new token to the current branch.
|
||||||
fn leaf(&mut self, kind: SyntaxKind, n_tokens: u8);
|
fn token(&mut self, kind: SyntaxKind, n_tokens: u8);
|
||||||
|
|
||||||
/// Start new branch and make it current.
|
/// Start new branch and make it current.
|
||||||
fn start_branch(&mut self, kind: SyntaxKind);
|
fn start_node(&mut self, kind: SyntaxKind);
|
||||||
|
|
||||||
/// Finish current branch and restore previous
|
/// Finish current branch and restore previous
|
||||||
/// branch as current.
|
/// branch as current.
|
||||||
fn finish_branch(&mut self);
|
fn finish_node(&mut self);
|
||||||
|
|
||||||
fn error(&mut self, error: ParseError);
|
fn error(&mut self, error: ParseError);
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,7 @@ unicode-xid = "0.1.0"
|
||||||
itertools = "0.8.0"
|
itertools = "0.8.0"
|
||||||
drop_bomb = "0.1.4"
|
drop_bomb = "0.1.4"
|
||||||
parking_lot = "0.7.0"
|
parking_lot = "0.7.0"
|
||||||
rowan = "0.3.3"
|
rowan = "0.4.0"
|
||||||
|
|
||||||
# ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here
|
# ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here
|
||||||
# to reduce number of compilations
|
# to reduce number of compilations
|
||||||
|
|
|
@ -1,18 +1,14 @@
|
||||||
pub mod visit;
|
pub mod visit;
|
||||||
|
|
||||||
use rowan::TransparentNewType;
|
use crate::{SyntaxNode, TextRange, TextUnit, AstNode, Direction, SyntaxToken, SyntaxElement};
|
||||||
|
|
||||||
use crate::{SyntaxNode, TextRange, TextUnit, AstNode, Direction};
|
pub use rowan::TokenAtOffset;
|
||||||
|
|
||||||
pub use rowan::LeafAtOffset;
|
pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffset<SyntaxToken> {
|
||||||
|
match node.0.token_at_offset(offset) {
|
||||||
pub fn find_leaf_at_offset(node: &SyntaxNode, offset: TextUnit) -> LeafAtOffset<&SyntaxNode> {
|
TokenAtOffset::None => TokenAtOffset::None,
|
||||||
match node.0.leaf_at_offset(offset) {
|
TokenAtOffset::Single(n) => TokenAtOffset::Single(n.into()),
|
||||||
LeafAtOffset::None => LeafAtOffset::None,
|
TokenAtOffset::Between(l, r) => TokenAtOffset::Between(l.into(), r.into()),
|
||||||
LeafAtOffset::Single(n) => LeafAtOffset::Single(SyntaxNode::from_repr(n)),
|
|
||||||
LeafAtOffset::Between(l, r) => {
|
|
||||||
LeafAtOffset::Between(SyntaxNode::from_repr(l), SyntaxNode::from_repr(r))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -26,16 +22,29 @@ pub fn find_leaf_at_offset(node: &SyntaxNode, offset: TextUnit) -> LeafAtOffset<
|
||||||
///
|
///
|
||||||
/// then the left node will be silently preferred.
|
/// then the left node will be silently preferred.
|
||||||
pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) -> Option<&N> {
|
pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) -> Option<&N> {
|
||||||
find_leaf_at_offset(syntax, offset).find_map(|leaf| leaf.ancestors().find_map(N::cast))
|
find_token_at_offset(syntax, offset)
|
||||||
|
.find_map(|leaf| leaf.parent().ancestors().find_map(N::cast))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finds the first sibling in the given direction which is not `trivia`
|
/// Finds the first sibling in the given direction which is not `trivia`
|
||||||
pub fn non_trivia_sibling(node: &SyntaxNode, direction: Direction) -> Option<&SyntaxNode> {
|
pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Option<SyntaxElement> {
|
||||||
node.siblings(direction).skip(1).find(|node| !node.kind().is_trivia())
|
return match element {
|
||||||
|
SyntaxElement::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia),
|
||||||
|
SyntaxElement::Token(token) => {
|
||||||
|
token.siblings_with_tokens(direction).skip(1).find(not_trivia)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fn not_trivia(element: &SyntaxElement) -> bool {
|
||||||
|
match element {
|
||||||
|
SyntaxElement::Node(_) => true,
|
||||||
|
SyntaxElement::Token(token) => !token.kind().is_trivia(),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_covering_node(root: &SyntaxNode, range: TextRange) -> &SyntaxNode {
|
pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement {
|
||||||
SyntaxNode::from_repr(root.0.covering_node(range))
|
root.0.covering_node(range).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Replace with `std::iter::successors` in `1.34.0`
|
// Replace with `std::iter::successors` in `1.34.0`
|
||||||
|
|
|
@ -7,7 +7,7 @@ use itertools::Itertools;
|
||||||
|
|
||||||
pub use self::generated::*;
|
pub use self::generated::*;
|
||||||
use crate::{
|
use crate::{
|
||||||
syntax_node::{SyntaxNode, SyntaxNodeChildren, TreeArc, RaTypes},
|
syntax_node::{SyntaxNode, SyntaxNodeChildren, TreeArc, RaTypes, SyntaxToken, SyntaxElement, SyntaxElementChildren},
|
||||||
SmolStr,
|
SmolStr,
|
||||||
SyntaxKind::*,
|
SyntaxKind::*,
|
||||||
};
|
};
|
||||||
|
@ -27,7 +27,8 @@ pub trait AstNode:
|
||||||
|
|
||||||
pub trait AstToken: AstNode {
|
pub trait AstToken: AstNode {
|
||||||
fn text(&self) -> &SmolStr {
|
fn text(&self) -> &SmolStr {
|
||||||
self.syntax().leaf_text().unwrap()
|
// self.syntax().leaf_text().unwrap()
|
||||||
|
unimplemented!()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -126,8 +127,8 @@ pub trait AttrsOwner: AstNode {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait DocCommentsOwner: AstNode {
|
pub trait DocCommentsOwner: AstNode {
|
||||||
fn doc_comments(&self) -> AstChildren<Comment> {
|
fn doc_comments(&self) -> CommentIter {
|
||||||
children(self)
|
CommentIter { iter: self.syntax().children_with_tokens() }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the textual content of a doc comment block as a single string.
|
/// Returns the textual content of a doc comment block as a single string.
|
||||||
|
@ -179,9 +180,9 @@ impl Attr {
|
||||||
|
|
||||||
pub fn as_atom(&self) -> Option<SmolStr> {
|
pub fn as_atom(&self) -> Option<SmolStr> {
|
||||||
let tt = self.value()?;
|
let tt = self.value()?;
|
||||||
let (_bra, attr, _ket) = tt.syntax().children().collect_tuple()?;
|
let (_bra, attr, _ket) = tt.syntax().children_with_tokens().collect_tuple()?;
|
||||||
if attr.kind() == IDENT {
|
if attr.kind() == IDENT {
|
||||||
Some(attr.leaf_text().unwrap().clone())
|
Some(attr.as_token()?.text().clone())
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -189,10 +190,10 @@ impl Attr {
|
||||||
|
|
||||||
pub fn as_call(&self) -> Option<(SmolStr, &TokenTree)> {
|
pub fn as_call(&self) -> Option<(SmolStr, &TokenTree)> {
|
||||||
let tt = self.value()?;
|
let tt = self.value()?;
|
||||||
let (_bra, attr, args, _ket) = tt.syntax().children().collect_tuple()?;
|
let (_bra, attr, args, _ket) = tt.syntax().children_with_tokens().collect_tuple()?;
|
||||||
let args = TokenTree::cast(args)?;
|
let args = TokenTree::cast(args.as_node()?)?;
|
||||||
if attr.kind() == IDENT {
|
if attr.kind() == IDENT {
|
||||||
Some((attr.leaf_text().unwrap().clone(), args))
|
Some((attr.as_token()?.text().clone(), args))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -200,16 +201,35 @@ impl Attr {
|
||||||
|
|
||||||
pub fn as_named(&self) -> Option<SmolStr> {
|
pub fn as_named(&self) -> Option<SmolStr> {
|
||||||
let tt = self.value()?;
|
let tt = self.value()?;
|
||||||
let attr = tt.syntax().children().nth(1)?;
|
let attr = tt.syntax().children_with_tokens().nth(1)?;
|
||||||
if attr.kind() == IDENT {
|
if attr.kind() == IDENT {
|
||||||
Some(attr.leaf_text().unwrap().clone())
|
Some(attr.as_token()?.text().clone())
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Comment {
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
pub struct Comment<'a>(SyntaxToken<'a>);
|
||||||
|
|
||||||
|
impl<'a> Comment<'a> {
|
||||||
|
pub fn cast(token: SyntaxToken<'a>) -> Option<Self> {
|
||||||
|
if token.kind() == COMMENT {
|
||||||
|
Some(Comment(token))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn syntax(&self) -> SyntaxToken<'a> {
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn text(&self) -> &'a SmolStr {
|
||||||
|
self.0.text()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn flavor(&self) -> CommentFlavor {
|
pub fn flavor(&self) -> CommentFlavor {
|
||||||
let text = self.text();
|
let text = self.text();
|
||||||
if text.starts_with("///") {
|
if text.starts_with("///") {
|
||||||
|
@ -230,13 +250,16 @@ impl Comment {
|
||||||
pub fn prefix(&self) -> &'static str {
|
pub fn prefix(&self) -> &'static str {
|
||||||
self.flavor().prefix()
|
self.flavor().prefix()
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn count_newlines_lazy(&self) -> impl Iterator<Item = &()> {
|
pub struct CommentIter<'a> {
|
||||||
self.text().chars().filter(|&c| c == '\n').map(|_| &())
|
iter: SyntaxElementChildren<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn has_newlines(&self) -> bool {
|
impl<'a> Iterator for CommentIter<'a> {
|
||||||
self.count_newlines_lazy().count() > 0
|
type Item = Comment<'a>;
|
||||||
|
fn next(&mut self) -> Option<Comment<'a>> {
|
||||||
|
self.iter.by_ref().find_map(|el| el.as_token().and_then(Comment::cast))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -267,27 +290,42 @@ impl CommentFlavor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Whitespace {
|
pub struct Whitespace<'a>(SyntaxToken<'a>);
|
||||||
pub fn count_newlines_lazy(&self) -> impl Iterator<Item = &()> {
|
|
||||||
self.text().chars().filter(|&c| c == '\n').map(|_| &())
|
impl<'a> Whitespace<'a> {
|
||||||
|
pub fn cast(token: SyntaxToken<'a>) -> Option<Self> {
|
||||||
|
if token.kind() == WHITESPACE {
|
||||||
|
Some(Whitespace(token))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn has_newlines(&self) -> bool {
|
pub fn syntax(&self) -> SyntaxToken<'a> {
|
||||||
self.text().contains('\n')
|
self.0
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn text(&self) -> &'a SmolStr {
|
||||||
|
self.0.text()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn spans_multiple_lines(&self) -> bool {
|
||||||
|
let text = self.text();
|
||||||
|
text.find('\n').map_or(false, |idx| text[idx + 1..].contains('\n'))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Name {
|
impl Name {
|
||||||
pub fn text(&self) -> &SmolStr {
|
pub fn text(&self) -> &SmolStr {
|
||||||
let ident = self.syntax().first_child().unwrap();
|
let ident = self.syntax().first_child_or_token().unwrap().as_token().unwrap();
|
||||||
ident.leaf_text().unwrap()
|
ident.text()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NameRef {
|
impl NameRef {
|
||||||
pub fn text(&self) -> &SmolStr {
|
pub fn text(&self) -> &SmolStr {
|
||||||
let ident = self.syntax().first_child().unwrap();
|
let ident = self.syntax().first_child_or_token().unwrap().as_token().unwrap();
|
||||||
ident.leaf_text().unwrap()
|
ident.text()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -316,7 +354,7 @@ impl ImplBlock {
|
||||||
|
|
||||||
impl Module {
|
impl Module {
|
||||||
pub fn has_semi(&self) -> bool {
|
pub fn has_semi(&self) -> bool {
|
||||||
match self.syntax().last_child() {
|
match self.syntax().last_child_or_token() {
|
||||||
None => false,
|
None => false,
|
||||||
Some(node) => node.kind() == SEMI,
|
Some(node) => node.kind() == SEMI,
|
||||||
}
|
}
|
||||||
|
@ -325,7 +363,7 @@ impl Module {
|
||||||
|
|
||||||
impl LetStmt {
|
impl LetStmt {
|
||||||
pub fn has_semi(&self) -> bool {
|
pub fn has_semi(&self) -> bool {
|
||||||
match self.syntax().last_child() {
|
match self.syntax().last_child_or_token() {
|
||||||
None => false,
|
None => false,
|
||||||
Some(node) => node.kind() == SEMI,
|
Some(node) => node.kind() == SEMI,
|
||||||
}
|
}
|
||||||
|
@ -360,7 +398,7 @@ impl IfExpr {
|
||||||
|
|
||||||
impl ExprStmt {
|
impl ExprStmt {
|
||||||
pub fn has_semi(&self) -> bool {
|
pub fn has_semi(&self) -> bool {
|
||||||
match self.syntax().last_child() {
|
match self.syntax().last_child_or_token() {
|
||||||
None => false,
|
None => false,
|
||||||
Some(node) => node.kind() == SEMI,
|
Some(node) => node.kind() == SEMI,
|
||||||
}
|
}
|
||||||
|
@ -384,7 +422,7 @@ impl PathSegment {
|
||||||
let res = if let Some(name_ref) = self.name_ref() {
|
let res = if let Some(name_ref) = self.name_ref() {
|
||||||
PathSegmentKind::Name(name_ref)
|
PathSegmentKind::Name(name_ref)
|
||||||
} else {
|
} else {
|
||||||
match self.syntax().first_child()?.kind() {
|
match self.syntax().first_child_or_token()?.kind() {
|
||||||
SELF_KW => PathSegmentKind::SelfKw,
|
SELF_KW => PathSegmentKind::SelfKw,
|
||||||
SUPER_KW => PathSegmentKind::SuperKw,
|
SUPER_KW => PathSegmentKind::SuperKw,
|
||||||
CRATE_KW => PathSegmentKind::CrateKw,
|
CRATE_KW => PathSegmentKind::CrateKw,
|
||||||
|
@ -395,7 +433,7 @@ impl PathSegment {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn has_colon_colon(&self) -> bool {
|
pub fn has_colon_colon(&self) -> bool {
|
||||||
match self.syntax.first_child().map(|s| s.kind()) {
|
match self.syntax.first_child_or_token().map(|s| s.kind()) {
|
||||||
Some(COLONCOLON) => true,
|
Some(COLONCOLON) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
|
@ -410,7 +448,7 @@ impl Path {
|
||||||
|
|
||||||
impl UseTree {
|
impl UseTree {
|
||||||
pub fn has_star(&self) -> bool {
|
pub fn has_star(&self) -> bool {
|
||||||
self.syntax().children().any(|it| it.kind() == STAR)
|
self.syntax().children_with_tokens().any(|it| it.kind() == STAR)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -425,7 +463,7 @@ impl UseTreeList {
|
||||||
|
|
||||||
impl RefPat {
|
impl RefPat {
|
||||||
pub fn is_mut(&self) -> bool {
|
pub fn is_mut(&self) -> bool {
|
||||||
self.syntax().children().any(|n| n.kind() == MUT_KW)
|
self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -500,19 +538,19 @@ impl EnumVariant {
|
||||||
|
|
||||||
impl PointerType {
|
impl PointerType {
|
||||||
pub fn is_mut(&self) -> bool {
|
pub fn is_mut(&self) -> bool {
|
||||||
self.syntax().children().any(|n| n.kind() == MUT_KW)
|
self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ReferenceType {
|
impl ReferenceType {
|
||||||
pub fn is_mut(&self) -> bool {
|
pub fn is_mut(&self) -> bool {
|
||||||
self.syntax().children().any(|n| n.kind() == MUT_KW)
|
self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RefExpr {
|
impl RefExpr {
|
||||||
pub fn is_mut(&self) -> bool {
|
pub fn is_mut(&self) -> bool {
|
||||||
self.syntax().children().any(|n| n.kind() == MUT_KW)
|
self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -528,7 +566,7 @@ pub enum PrefixOp {
|
||||||
|
|
||||||
impl PrefixExpr {
|
impl PrefixExpr {
|
||||||
pub fn op_kind(&self) -> Option<PrefixOp> {
|
pub fn op_kind(&self) -> Option<PrefixOp> {
|
||||||
match self.syntax().first_child()?.kind() {
|
match self.op_token()?.kind() {
|
||||||
STAR => Some(PrefixOp::Deref),
|
STAR => Some(PrefixOp::Deref),
|
||||||
EXCL => Some(PrefixOp::Not),
|
EXCL => Some(PrefixOp::Not),
|
||||||
MINUS => Some(PrefixOp::Neg),
|
MINUS => Some(PrefixOp::Neg),
|
||||||
|
@ -536,8 +574,8 @@ impl PrefixExpr {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn op(&self) -> Option<&SyntaxNode> {
|
pub fn op_token(&self) -> Option<SyntaxToken> {
|
||||||
self.syntax().first_child()
|
self.syntax().first_child_or_token()?.as_token()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -608,40 +646,42 @@ pub enum BinOp {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BinExpr {
|
impl BinExpr {
|
||||||
fn op_details(&self) -> Option<(&SyntaxNode, BinOp)> {
|
fn op_details(&self) -> Option<(SyntaxToken, BinOp)> {
|
||||||
self.syntax().children().find_map(|c| match c.kind() {
|
self.syntax().children_with_tokens().filter_map(|it| it.as_token()).find_map(|c| {
|
||||||
PIPEPIPE => Some((c, BinOp::BooleanOr)),
|
match c.kind() {
|
||||||
AMPAMP => Some((c, BinOp::BooleanAnd)),
|
PIPEPIPE => Some((c, BinOp::BooleanOr)),
|
||||||
EQEQ => Some((c, BinOp::EqualityTest)),
|
AMPAMP => Some((c, BinOp::BooleanAnd)),
|
||||||
NEQ => Some((c, BinOp::NegatedEqualityTest)),
|
EQEQ => Some((c, BinOp::EqualityTest)),
|
||||||
LTEQ => Some((c, BinOp::LesserEqualTest)),
|
NEQ => Some((c, BinOp::NegatedEqualityTest)),
|
||||||
GTEQ => Some((c, BinOp::GreaterEqualTest)),
|
LTEQ => Some((c, BinOp::LesserEqualTest)),
|
||||||
L_ANGLE => Some((c, BinOp::LesserTest)),
|
GTEQ => Some((c, BinOp::GreaterEqualTest)),
|
||||||
R_ANGLE => Some((c, BinOp::GreaterTest)),
|
L_ANGLE => Some((c, BinOp::LesserTest)),
|
||||||
PLUS => Some((c, BinOp::Addition)),
|
R_ANGLE => Some((c, BinOp::GreaterTest)),
|
||||||
STAR => Some((c, BinOp::Multiplication)),
|
PLUS => Some((c, BinOp::Addition)),
|
||||||
MINUS => Some((c, BinOp::Subtraction)),
|
STAR => Some((c, BinOp::Multiplication)),
|
||||||
SLASH => Some((c, BinOp::Division)),
|
MINUS => Some((c, BinOp::Subtraction)),
|
||||||
PERCENT => Some((c, BinOp::Remainder)),
|
SLASH => Some((c, BinOp::Division)),
|
||||||
SHL => Some((c, BinOp::LeftShift)),
|
PERCENT => Some((c, BinOp::Remainder)),
|
||||||
SHR => Some((c, BinOp::RightShift)),
|
SHL => Some((c, BinOp::LeftShift)),
|
||||||
CARET => Some((c, BinOp::BitwiseXor)),
|
SHR => Some((c, BinOp::RightShift)),
|
||||||
PIPE => Some((c, BinOp::BitwiseOr)),
|
CARET => Some((c, BinOp::BitwiseXor)),
|
||||||
AMP => Some((c, BinOp::BitwiseAnd)),
|
PIPE => Some((c, BinOp::BitwiseOr)),
|
||||||
DOTDOT => Some((c, BinOp::RangeRightOpen)),
|
AMP => Some((c, BinOp::BitwiseAnd)),
|
||||||
DOTDOTEQ => Some((c, BinOp::RangeRightClosed)),
|
DOTDOT => Some((c, BinOp::RangeRightOpen)),
|
||||||
EQ => Some((c, BinOp::Assignment)),
|
DOTDOTEQ => Some((c, BinOp::RangeRightClosed)),
|
||||||
PLUSEQ => Some((c, BinOp::AddAssign)),
|
EQ => Some((c, BinOp::Assignment)),
|
||||||
SLASHEQ => Some((c, BinOp::DivAssign)),
|
PLUSEQ => Some((c, BinOp::AddAssign)),
|
||||||
STAREQ => Some((c, BinOp::MulAssign)),
|
SLASHEQ => Some((c, BinOp::DivAssign)),
|
||||||
PERCENTEQ => Some((c, BinOp::RemAssign)),
|
STAREQ => Some((c, BinOp::MulAssign)),
|
||||||
SHREQ => Some((c, BinOp::ShrAssign)),
|
PERCENTEQ => Some((c, BinOp::RemAssign)),
|
||||||
SHLEQ => Some((c, BinOp::ShlAssign)),
|
SHREQ => Some((c, BinOp::ShrAssign)),
|
||||||
MINUSEQ => Some((c, BinOp::SubAssign)),
|
SHLEQ => Some((c, BinOp::ShlAssign)),
|
||||||
PIPEEQ => Some((c, BinOp::BitOrAssign)),
|
MINUSEQ => Some((c, BinOp::SubAssign)),
|
||||||
AMPEQ => Some((c, BinOp::BitAndAssign)),
|
PIPEEQ => Some((c, BinOp::BitOrAssign)),
|
||||||
CARETEQ => Some((c, BinOp::BitXorAssign)),
|
AMPEQ => Some((c, BinOp::BitAndAssign)),
|
||||||
_ => None,
|
CARETEQ => Some((c, BinOp::BitXorAssign)),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -649,7 +689,7 @@ impl BinExpr {
|
||||||
self.op_details().map(|t| t.1)
|
self.op_details().map(|t| t.1)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn op(&self) -> Option<&SyntaxNode> {
|
pub fn op_token(&self) -> Option<SyntaxToken> {
|
||||||
self.op_details().map(|t| t.0)
|
self.op_details().map(|t| t.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -680,11 +720,23 @@ pub enum SelfParamFlavor {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SelfParam {
|
impl SelfParam {
|
||||||
|
pub fn self_kw_token(&self) -> SyntaxToken {
|
||||||
|
self.syntax()
|
||||||
|
.children_with_tokens()
|
||||||
|
.filter_map(|it| it.as_token())
|
||||||
|
.find(|it| it.kind() == SELF_KW)
|
||||||
|
.expect("invalid tree: self param must have self")
|
||||||
|
}
|
||||||
|
|
||||||
pub fn flavor(&self) -> SelfParamFlavor {
|
pub fn flavor(&self) -> SelfParamFlavor {
|
||||||
let borrowed = self.syntax().children().any(|n| n.kind() == AMP);
|
let borrowed = self.syntax().children_with_tokens().any(|n| n.kind() == AMP);
|
||||||
if borrowed {
|
if borrowed {
|
||||||
// check for a `mut` coming after the & -- `mut &self` != `&mut self`
|
// check for a `mut` coming after the & -- `mut &self` != `&mut self`
|
||||||
if self.syntax().children().skip_while(|n| n.kind() != AMP).any(|n| n.kind() == MUT_KW)
|
if self
|
||||||
|
.syntax()
|
||||||
|
.children_with_tokens()
|
||||||
|
.skip_while(|n| n.kind() != AMP)
|
||||||
|
.any(|n| n.kind() == MUT_KW)
|
||||||
{
|
{
|
||||||
SelfParamFlavor::MutRef
|
SelfParamFlavor::MutRef
|
||||||
} else {
|
} else {
|
||||||
|
@ -707,25 +759,31 @@ pub enum LiteralFlavor {
|
||||||
Bool,
|
Bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LiteralExpr {
|
impl Literal {
|
||||||
|
pub fn token(&self) -> SyntaxToken {
|
||||||
|
match self.syntax().first_child_or_token().unwrap() {
|
||||||
|
SyntaxElement::Token(token) => token,
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn flavor(&self) -> LiteralFlavor {
|
pub fn flavor(&self) -> LiteralFlavor {
|
||||||
let syntax = self.syntax();
|
match self.token().kind() {
|
||||||
match syntax.kind() {
|
|
||||||
INT_NUMBER => {
|
INT_NUMBER => {
|
||||||
let allowed_suffix_list = [
|
let allowed_suffix_list = [
|
||||||
"isize", "i128", "i64", "i32", "i16", "i8", "usize", "u128", "u64", "u32",
|
"isize", "i128", "i64", "i32", "i16", "i8", "usize", "u128", "u64", "u32",
|
||||||
"u16", "u8",
|
"u16", "u8",
|
||||||
];
|
];
|
||||||
let text = syntax.text().to_string();
|
let text = self.token().text().to_string();
|
||||||
let suffix = allowed_suffix_list
|
let suffix = allowed_suffix_list
|
||||||
.iter()
|
.iter()
|
||||||
.find(|&s| text.ends_with(s))
|
.find(|&s| text.ends_with(s))
|
||||||
.map(|&suf| SmolStr::new(suf));
|
.map(|&suf| SmolStr::new(suf));
|
||||||
LiteralFlavor::IntNumber { suffix: suffix }
|
LiteralFlavor::IntNumber { suffix }
|
||||||
}
|
}
|
||||||
FLOAT_NUMBER => {
|
FLOAT_NUMBER => {
|
||||||
let allowed_suffix_list = ["f64", "f32"];
|
let allowed_suffix_list = ["f64", "f32"];
|
||||||
let text = syntax.text().to_string();
|
let text = self.token().text().to_string();
|
||||||
let suffix = allowed_suffix_list
|
let suffix = allowed_suffix_list
|
||||||
.iter()
|
.iter()
|
||||||
.find(|&s| text.ends_with(s))
|
.find(|&s| text.ends_with(s))
|
||||||
|
@ -750,11 +808,29 @@ impl NamedField {
|
||||||
|
|
||||||
impl BindPat {
|
impl BindPat {
|
||||||
pub fn is_mutable(&self) -> bool {
|
pub fn is_mutable(&self) -> bool {
|
||||||
self.syntax().children().any(|n| n.kind() == MUT_KW)
|
self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_ref(&self) -> bool {
|
pub fn is_ref(&self) -> bool {
|
||||||
self.syntax().children().any(|n| n.kind() == REF_KW)
|
self.syntax().children_with_tokens().any(|n| n.kind() == REF_KW)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LifetimeParam {
|
||||||
|
pub fn lifetime_token(&self) -> Option<SyntaxToken> {
|
||||||
|
self.syntax()
|
||||||
|
.children_with_tokens()
|
||||||
|
.filter_map(|it| it.as_token())
|
||||||
|
.find(|it| it.kind() == LIFETIME)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WherePred {
|
||||||
|
pub fn lifetime_token(&self) -> Option<SyntaxToken> {
|
||||||
|
self.syntax()
|
||||||
|
.children_with_tokens()
|
||||||
|
.filter_map(|it| it.as_token())
|
||||||
|
.find(|it| it.kind() == LIFETIME)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -835,7 +911,7 @@ where
|
||||||
let pred = predicates.next().unwrap();
|
let pred = predicates.next().unwrap();
|
||||||
let mut bounds = pred.type_bound_list().unwrap().bounds();
|
let mut bounds = pred.type_bound_list().unwrap().bounds();
|
||||||
|
|
||||||
assert_eq!("'a", pred.lifetime().unwrap().syntax().text().to_string());
|
assert_eq!("'a", pred.lifetime_token().unwrap().text());
|
||||||
|
|
||||||
assert_bound("'b", bounds.next());
|
assert_bound("'b", bounds.next());
|
||||||
assert_bound("'c", bounds.next());
|
assert_bound("'c", bounds.next());
|
||||||
|
|
|
@ -376,64 +376,6 @@ impl BreakExpr {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Byte
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
||||||
#[repr(transparent)]
|
|
||||||
pub struct Byte {
|
|
||||||
pub(crate) syntax: SyntaxNode,
|
|
||||||
}
|
|
||||||
unsafe impl TransparentNewType for Byte {
|
|
||||||
type Repr = rowan::SyntaxNode<RaTypes>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AstNode for Byte {
|
|
||||||
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
|
|
||||||
match syntax.kind() {
|
|
||||||
BYTE => Some(Byte::from_repr(syntax.into_repr())),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToOwned for Byte {
|
|
||||||
type Owned = TreeArc<Byte>;
|
|
||||||
fn to_owned(&self) -> TreeArc<Byte> { TreeArc::cast(self.syntax.to_owned()) }
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
impl ast::AstToken for Byte {}
|
|
||||||
impl Byte {}
|
|
||||||
|
|
||||||
// ByteString
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
||||||
#[repr(transparent)]
|
|
||||||
pub struct ByteString {
|
|
||||||
pub(crate) syntax: SyntaxNode,
|
|
||||||
}
|
|
||||||
unsafe impl TransparentNewType for ByteString {
|
|
||||||
type Repr = rowan::SyntaxNode<RaTypes>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AstNode for ByteString {
|
|
||||||
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
|
|
||||||
match syntax.kind() {
|
|
||||||
BYTE_STRING => Some(ByteString::from_repr(syntax.into_repr())),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToOwned for ByteString {
|
|
||||||
type Owned = TreeArc<ByteString>;
|
|
||||||
fn to_owned(&self) -> TreeArc<ByteString> { TreeArc::cast(self.syntax.to_owned()) }
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
impl ast::AstToken for ByteString {}
|
|
||||||
impl ByteString {}
|
|
||||||
|
|
||||||
// CallExpr
|
// CallExpr
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
|
@ -503,64 +445,6 @@ impl CastExpr {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Char
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
||||||
#[repr(transparent)]
|
|
||||||
pub struct Char {
|
|
||||||
pub(crate) syntax: SyntaxNode,
|
|
||||||
}
|
|
||||||
unsafe impl TransparentNewType for Char {
|
|
||||||
type Repr = rowan::SyntaxNode<RaTypes>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AstNode for Char {
|
|
||||||
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
|
|
||||||
match syntax.kind() {
|
|
||||||
CHAR => Some(Char::from_repr(syntax.into_repr())),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToOwned for Char {
|
|
||||||
type Owned = TreeArc<Char>;
|
|
||||||
fn to_owned(&self) -> TreeArc<Char> { TreeArc::cast(self.syntax.to_owned()) }
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
impl ast::AstToken for Char {}
|
|
||||||
impl Char {}
|
|
||||||
|
|
||||||
// Comment
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
||||||
#[repr(transparent)]
|
|
||||||
pub struct Comment {
|
|
||||||
pub(crate) syntax: SyntaxNode,
|
|
||||||
}
|
|
||||||
unsafe impl TransparentNewType for Comment {
|
|
||||||
type Repr = rowan::SyntaxNode<RaTypes>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AstNode for Comment {
|
|
||||||
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
|
|
||||||
match syntax.kind() {
|
|
||||||
COMMENT => Some(Comment::from_repr(syntax.into_repr())),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToOwned for Comment {
|
|
||||||
type Owned = TreeArc<Comment>;
|
|
||||||
fn to_owned(&self) -> TreeArc<Comment> { TreeArc::cast(self.syntax.to_owned()) }
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
impl ast::AstToken for Comment {}
|
|
||||||
impl Comment {}
|
|
||||||
|
|
||||||
// Condition
|
// Condition
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
|
@ -1115,35 +999,6 @@ impl ExternCrateItem {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// FalseKw
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
||||||
#[repr(transparent)]
|
|
||||||
pub struct FalseKw {
|
|
||||||
pub(crate) syntax: SyntaxNode,
|
|
||||||
}
|
|
||||||
unsafe impl TransparentNewType for FalseKw {
|
|
||||||
type Repr = rowan::SyntaxNode<RaTypes>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AstNode for FalseKw {
|
|
||||||
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
|
|
||||||
match syntax.kind() {
|
|
||||||
FALSE_KW => Some(FalseKw::from_repr(syntax.into_repr())),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToOwned for FalseKw {
|
|
||||||
type Owned = TreeArc<FalseKw>;
|
|
||||||
fn to_owned(&self) -> TreeArc<FalseKw> { TreeArc::cast(self.syntax.to_owned()) }
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
impl ast::AstToken for FalseKw {}
|
|
||||||
impl FalseKw {}
|
|
||||||
|
|
||||||
// FieldExpr
|
// FieldExpr
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
|
@ -1249,35 +1104,6 @@ impl FieldPatList {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// FloatNumber
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
||||||
#[repr(transparent)]
|
|
||||||
pub struct FloatNumber {
|
|
||||||
pub(crate) syntax: SyntaxNode,
|
|
||||||
}
|
|
||||||
unsafe impl TransparentNewType for FloatNumber {
|
|
||||||
type Repr = rowan::SyntaxNode<RaTypes>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AstNode for FloatNumber {
|
|
||||||
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
|
|
||||||
match syntax.kind() {
|
|
||||||
FLOAT_NUMBER => Some(FloatNumber::from_repr(syntax.into_repr())),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToOwned for FloatNumber {
|
|
||||||
type Owned = TreeArc<FloatNumber>;
|
|
||||||
fn to_owned(&self) -> TreeArc<FloatNumber> { TreeArc::cast(self.syntax.to_owned()) }
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
impl ast::AstToken for FloatNumber {}
|
|
||||||
impl FloatNumber {}
|
|
||||||
|
|
||||||
// FnDef
|
// FnDef
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
|
@ -1613,35 +1439,6 @@ impl ToOwned for IndexExpr {
|
||||||
|
|
||||||
impl IndexExpr {}
|
impl IndexExpr {}
|
||||||
|
|
||||||
// IntNumber
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
||||||
#[repr(transparent)]
|
|
||||||
pub struct IntNumber {
|
|
||||||
pub(crate) syntax: SyntaxNode,
|
|
||||||
}
|
|
||||||
unsafe impl TransparentNewType for IntNumber {
|
|
||||||
type Repr = rowan::SyntaxNode<RaTypes>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AstNode for IntNumber {
|
|
||||||
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
|
|
||||||
match syntax.kind() {
|
|
||||||
INT_NUMBER => Some(IntNumber::from_repr(syntax.into_repr())),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToOwned for IntNumber {
|
|
||||||
type Owned = TreeArc<IntNumber>;
|
|
||||||
fn to_owned(&self) -> TreeArc<IntNumber> { TreeArc::cast(self.syntax.to_owned()) }
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
impl ast::AstToken for IntNumber {}
|
|
||||||
impl IntNumber {}
|
|
||||||
|
|
||||||
// ItemList
|
// ItemList
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
|
@ -1777,35 +1574,6 @@ impl LetStmt {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Lifetime
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
||||||
#[repr(transparent)]
|
|
||||||
pub struct Lifetime {
|
|
||||||
pub(crate) syntax: SyntaxNode,
|
|
||||||
}
|
|
||||||
unsafe impl TransparentNewType for Lifetime {
|
|
||||||
type Repr = rowan::SyntaxNode<RaTypes>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AstNode for Lifetime {
|
|
||||||
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
|
|
||||||
match syntax.kind() {
|
|
||||||
LIFETIME => Some(Lifetime::from_repr(syntax.into_repr())),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToOwned for Lifetime {
|
|
||||||
type Owned = TreeArc<Lifetime>;
|
|
||||||
fn to_owned(&self) -> TreeArc<Lifetime> { TreeArc::cast(self.syntax.to_owned()) }
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
impl ast::AstToken for Lifetime {}
|
|
||||||
impl Lifetime {}
|
|
||||||
|
|
||||||
// LifetimeArg
|
// LifetimeArg
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
|
@ -1832,11 +1600,7 @@ impl ToOwned for LifetimeArg {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
impl LifetimeArg {
|
impl LifetimeArg {}
|
||||||
pub fn lifetime(&self) -> Option<&Lifetime> {
|
|
||||||
super::child_opt(self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// LifetimeParam
|
// LifetimeParam
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||||
|
@ -1865,11 +1629,7 @@ impl ToOwned for LifetimeParam {
|
||||||
|
|
||||||
|
|
||||||
impl ast::AttrsOwner for LifetimeParam {}
|
impl ast::AttrsOwner for LifetimeParam {}
|
||||||
impl LifetimeParam {
|
impl LifetimeParam {}
|
||||||
pub fn lifetime(&self) -> Option<&Lifetime> {
|
|
||||||
super::child_opt(self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Literal
|
// Literal
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||||
|
@ -1897,130 +1657,7 @@ impl ToOwned for Literal {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
impl Literal {
|
impl Literal {}
|
||||||
pub fn literal_expr(&self) -> Option<&LiteralExpr> {
|
|
||||||
super::child_opt(self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// LiteralExpr
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
||||||
#[repr(transparent)]
|
|
||||||
pub struct LiteralExpr {
|
|
||||||
pub(crate) syntax: SyntaxNode,
|
|
||||||
}
|
|
||||||
unsafe impl TransparentNewType for LiteralExpr {
|
|
||||||
type Repr = rowan::SyntaxNode<RaTypes>;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
|
||||||
pub enum LiteralExprKind<'a> {
|
|
||||||
String(&'a String),
|
|
||||||
ByteString(&'a ByteString),
|
|
||||||
RawString(&'a RawString),
|
|
||||||
RawByteString(&'a RawByteString),
|
|
||||||
Char(&'a Char),
|
|
||||||
Byte(&'a Byte),
|
|
||||||
IntNumber(&'a IntNumber),
|
|
||||||
FloatNumber(&'a FloatNumber),
|
|
||||||
TrueKw(&'a TrueKw),
|
|
||||||
FalseKw(&'a FalseKw),
|
|
||||||
}
|
|
||||||
impl<'a> From<&'a String> for &'a LiteralExpr {
|
|
||||||
fn from(n: &'a String) -> &'a LiteralExpr {
|
|
||||||
LiteralExpr::cast(&n.syntax).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<'a> From<&'a ByteString> for &'a LiteralExpr {
|
|
||||||
fn from(n: &'a ByteString) -> &'a LiteralExpr {
|
|
||||||
LiteralExpr::cast(&n.syntax).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<'a> From<&'a RawString> for &'a LiteralExpr {
|
|
||||||
fn from(n: &'a RawString) -> &'a LiteralExpr {
|
|
||||||
LiteralExpr::cast(&n.syntax).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<'a> From<&'a RawByteString> for &'a LiteralExpr {
|
|
||||||
fn from(n: &'a RawByteString) -> &'a LiteralExpr {
|
|
||||||
LiteralExpr::cast(&n.syntax).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<'a> From<&'a Char> for &'a LiteralExpr {
|
|
||||||
fn from(n: &'a Char) -> &'a LiteralExpr {
|
|
||||||
LiteralExpr::cast(&n.syntax).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<'a> From<&'a Byte> for &'a LiteralExpr {
|
|
||||||
fn from(n: &'a Byte) -> &'a LiteralExpr {
|
|
||||||
LiteralExpr::cast(&n.syntax).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<'a> From<&'a IntNumber> for &'a LiteralExpr {
|
|
||||||
fn from(n: &'a IntNumber) -> &'a LiteralExpr {
|
|
||||||
LiteralExpr::cast(&n.syntax).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<'a> From<&'a FloatNumber> for &'a LiteralExpr {
|
|
||||||
fn from(n: &'a FloatNumber) -> &'a LiteralExpr {
|
|
||||||
LiteralExpr::cast(&n.syntax).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<'a> From<&'a TrueKw> for &'a LiteralExpr {
|
|
||||||
fn from(n: &'a TrueKw) -> &'a LiteralExpr {
|
|
||||||
LiteralExpr::cast(&n.syntax).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<'a> From<&'a FalseKw> for &'a LiteralExpr {
|
|
||||||
fn from(n: &'a FalseKw) -> &'a LiteralExpr {
|
|
||||||
LiteralExpr::cast(&n.syntax).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
impl AstNode for LiteralExpr {
|
|
||||||
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
|
|
||||||
match syntax.kind() {
|
|
||||||
| STRING
|
|
||||||
| BYTE_STRING
|
|
||||||
| RAW_STRING
|
|
||||||
| RAW_BYTE_STRING
|
|
||||||
| CHAR
|
|
||||||
| BYTE
|
|
||||||
| INT_NUMBER
|
|
||||||
| FLOAT_NUMBER
|
|
||||||
| TRUE_KW
|
|
||||||
| FALSE_KW => Some(LiteralExpr::from_repr(syntax.into_repr())),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToOwned for LiteralExpr {
|
|
||||||
type Owned = TreeArc<LiteralExpr>;
|
|
||||||
fn to_owned(&self) -> TreeArc<LiteralExpr> { TreeArc::cast(self.syntax.to_owned()) }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LiteralExpr {
|
|
||||||
pub fn kind(&self) -> LiteralExprKind {
|
|
||||||
match self.syntax.kind() {
|
|
||||||
STRING => LiteralExprKind::String(String::cast(&self.syntax).unwrap()),
|
|
||||||
BYTE_STRING => LiteralExprKind::ByteString(ByteString::cast(&self.syntax).unwrap()),
|
|
||||||
RAW_STRING => LiteralExprKind::RawString(RawString::cast(&self.syntax).unwrap()),
|
|
||||||
RAW_BYTE_STRING => LiteralExprKind::RawByteString(RawByteString::cast(&self.syntax).unwrap()),
|
|
||||||
CHAR => LiteralExprKind::Char(Char::cast(&self.syntax).unwrap()),
|
|
||||||
BYTE => LiteralExprKind::Byte(Byte::cast(&self.syntax).unwrap()),
|
|
||||||
INT_NUMBER => LiteralExprKind::IntNumber(IntNumber::cast(&self.syntax).unwrap()),
|
|
||||||
FLOAT_NUMBER => LiteralExprKind::FloatNumber(FloatNumber::cast(&self.syntax).unwrap()),
|
|
||||||
TRUE_KW => LiteralExprKind::TrueKw(TrueKw::cast(&self.syntax).unwrap()),
|
|
||||||
FALSE_KW => LiteralExprKind::FalseKw(FalseKw::cast(&self.syntax).unwrap()),
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LiteralExpr {}
|
|
||||||
|
|
||||||
// LiteralPat
|
// LiteralPat
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||||
|
@ -3404,64 +3041,6 @@ impl ToOwned for RangePat {
|
||||||
|
|
||||||
impl RangePat {}
|
impl RangePat {}
|
||||||
|
|
||||||
// RawByteString
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
||||||
#[repr(transparent)]
|
|
||||||
pub struct RawByteString {
|
|
||||||
pub(crate) syntax: SyntaxNode,
|
|
||||||
}
|
|
||||||
unsafe impl TransparentNewType for RawByteString {
|
|
||||||
type Repr = rowan::SyntaxNode<RaTypes>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AstNode for RawByteString {
|
|
||||||
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
|
|
||||||
match syntax.kind() {
|
|
||||||
RAW_BYTE_STRING => Some(RawByteString::from_repr(syntax.into_repr())),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToOwned for RawByteString {
|
|
||||||
type Owned = TreeArc<RawByteString>;
|
|
||||||
fn to_owned(&self) -> TreeArc<RawByteString> { TreeArc::cast(self.syntax.to_owned()) }
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
impl ast::AstToken for RawByteString {}
|
|
||||||
impl RawByteString {}
|
|
||||||
|
|
||||||
// RawString
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
||||||
#[repr(transparent)]
|
|
||||||
pub struct RawString {
|
|
||||||
pub(crate) syntax: SyntaxNode,
|
|
||||||
}
|
|
||||||
unsafe impl TransparentNewType for RawString {
|
|
||||||
type Repr = rowan::SyntaxNode<RaTypes>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AstNode for RawString {
|
|
||||||
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
|
|
||||||
match syntax.kind() {
|
|
||||||
RAW_STRING => Some(RawString::from_repr(syntax.into_repr())),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToOwned for RawString {
|
|
||||||
type Owned = TreeArc<RawString>;
|
|
||||||
fn to_owned(&self) -> TreeArc<RawString> { TreeArc::cast(self.syntax.to_owned()) }
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
impl ast::AstToken for RawString {}
|
|
||||||
impl RawString {}
|
|
||||||
|
|
||||||
// RefExpr
|
// RefExpr
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
|
@ -3622,34 +3201,6 @@ impl ReturnExpr {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// SelfKw
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
||||||
#[repr(transparent)]
|
|
||||||
pub struct SelfKw {
|
|
||||||
pub(crate) syntax: SyntaxNode,
|
|
||||||
}
|
|
||||||
unsafe impl TransparentNewType for SelfKw {
|
|
||||||
type Repr = rowan::SyntaxNode<RaTypes>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AstNode for SelfKw {
|
|
||||||
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
|
|
||||||
match syntax.kind() {
|
|
||||||
SELF_KW => Some(SelfKw::from_repr(syntax.into_repr())),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToOwned for SelfKw {
|
|
||||||
type Owned = TreeArc<SelfKw>;
|
|
||||||
fn to_owned(&self) -> TreeArc<SelfKw> { TreeArc::cast(self.syntax.to_owned()) }
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
impl SelfKw {}
|
|
||||||
|
|
||||||
// SelfParam
|
// SelfParam
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
|
@ -3677,11 +3228,7 @@ impl ToOwned for SelfParam {
|
||||||
|
|
||||||
|
|
||||||
impl ast::TypeAscriptionOwner for SelfParam {}
|
impl ast::TypeAscriptionOwner for SelfParam {}
|
||||||
impl SelfParam {
|
impl SelfParam {}
|
||||||
pub fn self_kw(&self) -> Option<&SelfKw> {
|
|
||||||
super::child_opt(self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// SlicePat
|
// SlicePat
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||||
|
@ -3866,35 +3413,6 @@ impl Stmt {
|
||||||
|
|
||||||
impl Stmt {}
|
impl Stmt {}
|
||||||
|
|
||||||
// String
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
||||||
#[repr(transparent)]
|
|
||||||
pub struct String {
|
|
||||||
pub(crate) syntax: SyntaxNode,
|
|
||||||
}
|
|
||||||
unsafe impl TransparentNewType for String {
|
|
||||||
type Repr = rowan::SyntaxNode<RaTypes>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AstNode for String {
|
|
||||||
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
|
|
||||||
match syntax.kind() {
|
|
||||||
STRING => Some(String::from_repr(syntax.into_repr())),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToOwned for String {
|
|
||||||
type Owned = TreeArc<String>;
|
|
||||||
fn to_owned(&self) -> TreeArc<String> { TreeArc::cast(self.syntax.to_owned()) }
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
impl ast::AstToken for String {}
|
|
||||||
impl String {}
|
|
||||||
|
|
||||||
// StructDef
|
// StructDef
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
|
@ -4070,35 +3588,6 @@ impl TraitDef {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TrueKw
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
||||||
#[repr(transparent)]
|
|
||||||
pub struct TrueKw {
|
|
||||||
pub(crate) syntax: SyntaxNode,
|
|
||||||
}
|
|
||||||
unsafe impl TransparentNewType for TrueKw {
|
|
||||||
type Repr = rowan::SyntaxNode<RaTypes>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AstNode for TrueKw {
|
|
||||||
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
|
|
||||||
match syntax.kind() {
|
|
||||||
TRUE_KW => Some(TrueKw::from_repr(syntax.into_repr())),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToOwned for TrueKw {
|
|
||||||
type Owned = TreeArc<TrueKw>;
|
|
||||||
fn to_owned(&self) -> TreeArc<TrueKw> { TreeArc::cast(self.syntax.to_owned()) }
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
impl ast::AstToken for TrueKw {}
|
|
||||||
impl TrueKw {}
|
|
||||||
|
|
||||||
// TryExpr
|
// TryExpr
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
|
@ -4403,10 +3892,6 @@ impl TypeBound {
|
||||||
pub fn type_ref(&self) -> Option<&TypeRef> {
|
pub fn type_ref(&self) -> Option<&TypeRef> {
|
||||||
super::child_opt(self)
|
super::child_opt(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lifetime(&self) -> Option<&Lifetime> {
|
|
||||||
super::child_opt(self)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// TypeBoundList
|
// TypeBoundList
|
||||||
|
@ -4847,10 +4332,6 @@ impl WherePred {
|
||||||
pub fn type_ref(&self) -> Option<&TypeRef> {
|
pub fn type_ref(&self) -> Option<&TypeRef> {
|
||||||
super::child_opt(self)
|
super::child_opt(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lifetime(&self) -> Option<&Lifetime> {
|
|
||||||
super::child_opt(self)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// WhileExpr
|
// WhileExpr
|
||||||
|
|
|
@ -463,31 +463,7 @@ Grammar(
|
||||||
"RangeExpr": (),
|
"RangeExpr": (),
|
||||||
"BinExpr": (),
|
"BinExpr": (),
|
||||||
|
|
||||||
"IntNumber": ( traits: ["AstToken"] ),
|
"Literal": (),
|
||||||
"FloatNumber": ( traits: ["AstToken"] ),
|
|
||||||
"String": ( traits: ["AstToken"] ),
|
|
||||||
"RawString": ( traits: ["AstToken"] ),
|
|
||||||
"Byte": ( traits: ["AstToken"] ),
|
|
||||||
"RawByteString": ( traits: ["AstToken"] ),
|
|
||||||
"ByteString": ( traits: ["AstToken"] ),
|
|
||||||
"Char": ( traits: ["AstToken"] ),
|
|
||||||
"TrueKw": ( traits: ["AstToken"] ),
|
|
||||||
"FalseKw": ( traits: ["AstToken"] ),
|
|
||||||
"LiteralExpr": (
|
|
||||||
enum: [
|
|
||||||
"String",
|
|
||||||
"ByteString",
|
|
||||||
"RawString",
|
|
||||||
"RawByteString",
|
|
||||||
"Char",
|
|
||||||
"Byte",
|
|
||||||
"IntNumber",
|
|
||||||
"FloatNumber",
|
|
||||||
"TrueKw",
|
|
||||||
"FalseKw",
|
|
||||||
]
|
|
||||||
),
|
|
||||||
"Literal": (options: ["LiteralExpr"]),
|
|
||||||
|
|
||||||
"Expr": (
|
"Expr": (
|
||||||
enum: [
|
enum: [
|
||||||
|
@ -580,14 +556,11 @@ Grammar(
|
||||||
),
|
),
|
||||||
"TypeParam": ( traits: ["NameOwner", "AttrsOwner", "TypeBoundsOwner"] ),
|
"TypeParam": ( traits: ["NameOwner", "AttrsOwner", "TypeBoundsOwner"] ),
|
||||||
"LifetimeParam": (
|
"LifetimeParam": (
|
||||||
options: [ "Lifetime"],
|
|
||||||
traits: ["AttrsOwner"],
|
traits: ["AttrsOwner"],
|
||||||
),
|
),
|
||||||
"Lifetime": ( traits: ["AstToken"] ),
|
|
||||||
"TypeBound": (
|
"TypeBound": (
|
||||||
options: [
|
options: [
|
||||||
"TypeRef",
|
"TypeRef",
|
||||||
"Lifetime",
|
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
"TypeBoundList": (
|
"TypeBoundList": (
|
||||||
|
@ -598,7 +571,6 @@ Grammar(
|
||||||
"WherePred": (
|
"WherePred": (
|
||||||
options: [
|
options: [
|
||||||
"TypeRef",
|
"TypeRef",
|
||||||
"Lifetime",
|
|
||||||
],
|
],
|
||||||
traits: [
|
traits: [
|
||||||
"TypeBoundsOwner",
|
"TypeBoundsOwner",
|
||||||
|
@ -643,12 +615,10 @@ Grammar(
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
"SelfParam": (
|
"SelfParam": (
|
||||||
options: ["SelfKw"],
|
|
||||||
traits: [
|
traits: [
|
||||||
"TypeAscriptionOwner",
|
"TypeAscriptionOwner",
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
"SelfKw": (),
|
|
||||||
"Param": (
|
"Param": (
|
||||||
options: [ "Pat" ],
|
options: [ "Pat" ],
|
||||||
traits: [
|
traits: [
|
||||||
|
@ -692,8 +662,7 @@ Grammar(
|
||||||
]),
|
]),
|
||||||
"TypeArg": (options: ["TypeRef"]),
|
"TypeArg": (options: ["TypeRef"]),
|
||||||
"AssocTypeArg": (options: ["NameRef", "TypeRef"]),
|
"AssocTypeArg": (options: ["NameRef", "TypeRef"]),
|
||||||
"LifetimeArg": (options: ["Lifetime"]),
|
"LifetimeArg": (),
|
||||||
"Comment": ( traits: ["AstToken"] ),
|
|
||||||
"Whitespace": ( traits: ["AstToken"] ),
|
"Whitespace": ( traits: ["AstToken"] ),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
|
@ -38,7 +38,7 @@ pub use crate::{
|
||||||
ast::AstNode,
|
ast::AstNode,
|
||||||
syntax_error::{SyntaxError, SyntaxErrorKind, Location},
|
syntax_error::{SyntaxError, SyntaxErrorKind, Location},
|
||||||
syntax_text::SyntaxText,
|
syntax_text::SyntaxText,
|
||||||
syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc, SyntaxTreeBuilder},
|
syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc, SyntaxTreeBuilder, SyntaxElement, SyntaxToken},
|
||||||
ptr::{SyntaxNodePtr, AstPtr},
|
ptr::{SyntaxNodePtr, AstPtr},
|
||||||
parsing::{tokenize, Token},
|
parsing::{tokenize, Token},
|
||||||
};
|
};
|
||||||
|
@ -70,7 +70,7 @@ impl SourceFile {
|
||||||
|
|
||||||
pub fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<TreeArc<SourceFile>> {
|
pub fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<TreeArc<SourceFile>> {
|
||||||
parsing::incremental_reparse(self.syntax(), edit, self.errors())
|
parsing::incremental_reparse(self.syntax(), edit, self.errors())
|
||||||
.map(|(green_node, errors)| SourceFile::new(green_node, errors))
|
.map(|(green_node, errors, _reparsed_range)| SourceFile::new(green_node, errors))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn full_reparse(&self, edit: &AtomTextEdit) -> TreeArc<SourceFile> {
|
fn full_reparse(&self, edit: &AtomTextEdit) -> TreeArc<SourceFile> {
|
||||||
|
@ -179,15 +179,23 @@ fn api_walkthrough() {
|
||||||
|
|
||||||
// There's a bunch of traversal methods on `SyntaxNode`:
|
// There's a bunch of traversal methods on `SyntaxNode`:
|
||||||
assert_eq!(expr_syntax.parent(), Some(block.syntax()));
|
assert_eq!(expr_syntax.parent(), Some(block.syntax()));
|
||||||
assert_eq!(block.syntax().first_child().map(|it| it.kind()), Some(SyntaxKind::L_CURLY));
|
assert_eq!(
|
||||||
assert_eq!(expr_syntax.next_sibling().map(|it| it.kind()), Some(SyntaxKind::WHITESPACE));
|
block.syntax().first_child_or_token().map(|it| it.kind()),
|
||||||
|
Some(SyntaxKind::L_CURLY)
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
expr_syntax.next_sibling_or_token().map(|it| it.kind()),
|
||||||
|
Some(SyntaxKind::WHITESPACE)
|
||||||
|
);
|
||||||
|
|
||||||
// As well as some iterator helpers:
|
// As well as some iterator helpers:
|
||||||
let f = expr_syntax.ancestors().find_map(ast::FnDef::cast);
|
let f = expr_syntax.ancestors().find_map(ast::FnDef::cast);
|
||||||
assert_eq!(f, Some(&*func));
|
assert_eq!(f, Some(&*func));
|
||||||
assert!(expr_syntax.siblings(Direction::Next).any(|it| it.kind() == SyntaxKind::R_CURLY));
|
assert!(expr_syntax
|
||||||
|
.siblings_with_tokens(Direction::Next)
|
||||||
|
.any(|it| it.kind() == SyntaxKind::R_CURLY));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
expr_syntax.descendants().count(),
|
expr_syntax.descendants_with_tokens().count(),
|
||||||
8, // 5 tokens `1`, ` `, `+`, ` `, `!`
|
8, // 5 tokens `1`, ` `, `+`, ` `, `!`
|
||||||
// 2 child literal expressions: `1`, `1`
|
// 2 child literal expressions: `1`, `1`
|
||||||
// 1 the node itself: `1 + 1`
|
// 1 the node itself: `1 + 1`
|
||||||
|
@ -196,16 +204,14 @@ fn api_walkthrough() {
|
||||||
// There's also a `preorder` method with a more fine-grained iteration control:
|
// There's also a `preorder` method with a more fine-grained iteration control:
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
let mut indent = 0;
|
let mut indent = 0;
|
||||||
for event in expr_syntax.preorder() {
|
for event in expr_syntax.preorder_with_tokens() {
|
||||||
match event {
|
match event {
|
||||||
WalkEvent::Enter(node) => {
|
WalkEvent::Enter(node) => {
|
||||||
buf += &format!(
|
let text = match node {
|
||||||
"{:indent$}{:?} {:?}\n",
|
SyntaxElement::Node(it) => it.text().to_string(),
|
||||||
" ",
|
SyntaxElement::Token(it) => it.text().to_string(),
|
||||||
node.text(),
|
};
|
||||||
node.kind(),
|
buf += &format!("{:indent$}{:?} {:?}\n", " ", text, node.kind(), indent = indent);
|
||||||
indent = indent
|
|
||||||
);
|
|
||||||
indent += 2;
|
indent += 2;
|
||||||
}
|
}
|
||||||
WalkEvent::Leave(_) => indent -= 2,
|
WalkEvent::Leave(_) => indent -= 2,
|
||||||
|
|
|
@ -12,7 +12,7 @@ use ra_parser::Reparser;
|
||||||
use crate::{
|
use crate::{
|
||||||
SyntaxKind::*, TextRange, TextUnit, SyntaxError,
|
SyntaxKind::*, TextRange, TextUnit, SyntaxError,
|
||||||
algo,
|
algo,
|
||||||
syntax_node::{GreenNode, SyntaxNode},
|
syntax_node::{GreenNode, SyntaxNode, GreenToken, SyntaxElement},
|
||||||
parsing::{
|
parsing::{
|
||||||
text_token_source::TextTokenSource,
|
text_token_source::TextTokenSource,
|
||||||
text_tree_sink::TextTreeSink,
|
text_tree_sink::TextTreeSink,
|
||||||
|
@ -24,60 +24,62 @@ pub(crate) fn incremental_reparse(
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
edit: &AtomTextEdit,
|
edit: &AtomTextEdit,
|
||||||
errors: Vec<SyntaxError>,
|
errors: Vec<SyntaxError>,
|
||||||
) -> Option<(GreenNode, Vec<SyntaxError>)> {
|
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
|
||||||
let (node, green, new_errors) =
|
if let Some((green, old_range)) = reparse_token(node, &edit) {
|
||||||
reparse_leaf(node, &edit).or_else(|| reparse_block(node, &edit))?;
|
return Some((green, merge_errors(errors, Vec::new(), old_range, edit), old_range));
|
||||||
let green_root = node.replace_with(green);
|
}
|
||||||
let errors = merge_errors(errors, new_errors, node, edit);
|
|
||||||
Some((green_root, errors))
|
if let Some((green, new_errors, old_range)) = reparse_block(node, &edit) {
|
||||||
|
return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
|
||||||
|
}
|
||||||
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn reparse_leaf<'node>(
|
fn reparse_token<'node>(
|
||||||
root: &'node SyntaxNode,
|
root: &'node SyntaxNode,
|
||||||
edit: &AtomTextEdit,
|
edit: &AtomTextEdit,
|
||||||
) -> Option<(&'node SyntaxNode, GreenNode, Vec<SyntaxError>)> {
|
) -> Option<(GreenNode, TextRange)> {
|
||||||
let node = algo::find_covering_node(root, edit.delete);
|
let token = algo::find_covering_element(root, edit.delete).as_token()?;
|
||||||
match node.kind() {
|
match token.kind() {
|
||||||
WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => {
|
WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => {
|
||||||
if node.kind() == WHITESPACE || node.kind() == COMMENT {
|
if token.kind() == WHITESPACE || token.kind() == COMMENT {
|
||||||
// removing a new line may extends previous token
|
// removing a new line may extends previous token
|
||||||
if node.text().to_string()[edit.delete - node.range().start()].contains('\n') {
|
if token.text().to_string()[edit.delete - token.range().start()].contains('\n') {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let text = get_text_after_edit(node, &edit);
|
let text = get_text_after_edit(token.into(), &edit);
|
||||||
let tokens = tokenize(&text);
|
let lex_tokens = tokenize(&text);
|
||||||
let token = match tokens[..] {
|
let lex_token = match lex_tokens[..] {
|
||||||
[token] if token.kind == node.kind() => token,
|
[lex_token] if lex_token.kind == token.kind() => lex_token,
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
|
|
||||||
if token.kind == IDENT && is_contextual_kw(&text) {
|
if lex_token.kind == IDENT && is_contextual_kw(&text) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(next_char) = root.text().char_at(node.range().end()) {
|
if let Some(next_char) = root.text().char_at(token.range().end()) {
|
||||||
let tokens_with_next_char = tokenize(&format!("{}{}", text, next_char));
|
let tokens_with_next_char = tokenize(&format!("{}{}", text, next_char));
|
||||||
if tokens_with_next_char.len() == 1 {
|
if tokens_with_next_char.len() == 1 {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let green = GreenNode::new_leaf(node.kind(), text.into());
|
let new_token = GreenToken::new(token.kind(), text.into());
|
||||||
let new_errors = vec![];
|
Some((token.replace_with(new_token), token.range()))
|
||||||
Some((node, green, new_errors))
|
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn reparse_block<'node>(
|
fn reparse_block<'node>(
|
||||||
node: &'node SyntaxNode,
|
root: &'node SyntaxNode,
|
||||||
edit: &AtomTextEdit,
|
edit: &AtomTextEdit,
|
||||||
) -> Option<(&'node SyntaxNode, GreenNode, Vec<SyntaxError>)> {
|
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
|
||||||
let (node, reparser) = find_reparsable_node(node, edit.delete)?;
|
let (node, reparser) = find_reparsable_node(root, edit.delete)?;
|
||||||
let text = get_text_after_edit(node, &edit);
|
let text = get_text_after_edit(node.into(), &edit);
|
||||||
let tokens = tokenize(&text);
|
let tokens = tokenize(&text);
|
||||||
if !is_balanced(&tokens) {
|
if !is_balanced(&tokens) {
|
||||||
return None;
|
return None;
|
||||||
|
@ -86,12 +88,16 @@ fn reparse_block<'node>(
|
||||||
let mut tree_sink = TextTreeSink::new(&text, &tokens);
|
let mut tree_sink = TextTreeSink::new(&text, &tokens);
|
||||||
reparser.parse(&token_source, &mut tree_sink);
|
reparser.parse(&token_source, &mut tree_sink);
|
||||||
let (green, new_errors) = tree_sink.finish();
|
let (green, new_errors) = tree_sink.finish();
|
||||||
Some((node, green, new_errors))
|
Some((node.replace_with(green), new_errors, node.range()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_text_after_edit(node: &SyntaxNode, edit: &AtomTextEdit) -> String {
|
fn get_text_after_edit(element: SyntaxElement, edit: &AtomTextEdit) -> String {
|
||||||
let edit = AtomTextEdit::replace(edit.delete - node.range().start(), edit.insert.clone());
|
let edit = AtomTextEdit::replace(edit.delete - element.range().start(), edit.insert.clone());
|
||||||
edit.apply(node.text().to_string())
|
let text = match element {
|
||||||
|
SyntaxElement::Token(token) => token.text().to_string(),
|
||||||
|
SyntaxElement::Node(node) => node.text().to_string(),
|
||||||
|
};
|
||||||
|
edit.apply(text)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_contextual_kw(text: &str) -> bool {
|
fn is_contextual_kw(text: &str) -> bool {
|
||||||
|
@ -102,9 +108,13 @@ fn is_contextual_kw(text: &str) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(&SyntaxNode, Reparser)> {
|
fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(&SyntaxNode, Reparser)> {
|
||||||
let node = algo::find_covering_node(node, range);
|
let node = algo::find_covering_element(node, range);
|
||||||
node.ancestors().find_map(|node| {
|
let mut ancestors = match node {
|
||||||
let first_child = node.first_child().map(|it| it.kind());
|
SyntaxElement::Token(it) => it.parent().ancestors(),
|
||||||
|
SyntaxElement::Node(it) => it.ancestors(),
|
||||||
|
};
|
||||||
|
ancestors.find_map(|node| {
|
||||||
|
let first_child = node.first_child_or_token().map(|it| it.kind());
|
||||||
let parent = node.parent().map(|it| it.kind());
|
let parent = node.parent().map(|it| it.kind());
|
||||||
Reparser::for_node(node.kind(), first_child, parent).map(|r| (node, r))
|
Reparser::for_node(node.kind(), first_child, parent).map(|r| (node, r))
|
||||||
})
|
})
|
||||||
|
@ -136,19 +146,19 @@ fn is_balanced(tokens: &[Token]) -> bool {
|
||||||
fn merge_errors(
|
fn merge_errors(
|
||||||
old_errors: Vec<SyntaxError>,
|
old_errors: Vec<SyntaxError>,
|
||||||
new_errors: Vec<SyntaxError>,
|
new_errors: Vec<SyntaxError>,
|
||||||
old_node: &SyntaxNode,
|
old_range: TextRange,
|
||||||
edit: &AtomTextEdit,
|
edit: &AtomTextEdit,
|
||||||
) -> Vec<SyntaxError> {
|
) -> Vec<SyntaxError> {
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
for e in old_errors {
|
for e in old_errors {
|
||||||
if e.offset() <= old_node.range().start() {
|
if e.offset() <= old_range.start() {
|
||||||
res.push(e)
|
res.push(e)
|
||||||
} else if e.offset() >= old_node.range().end() {
|
} else if e.offset() >= old_range.end() {
|
||||||
res.push(e.add_offset(TextUnit::of_str(&edit.insert), edit.delete.len()));
|
res.push(e.add_offset(TextUnit::of_str(&edit.insert), edit.delete.len()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for e in new_errors {
|
for e in new_errors {
|
||||||
res.push(e.add_offset(old_node.range().start(), 0.into()));
|
res.push(e.add_offset(old_range.start(), 0.into()));
|
||||||
}
|
}
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
@ -160,13 +170,7 @@ mod tests {
|
||||||
use crate::{SourceFile, AstNode};
|
use crate::{SourceFile, AstNode};
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
fn do_check<F>(before: &str, replace_with: &str, reparser: F)
|
fn do_check(before: &str, replace_with: &str, reparsed_len: u32) {
|
||||||
where
|
|
||||||
for<'a> F: Fn(
|
|
||||||
&'a SyntaxNode,
|
|
||||||
&AtomTextEdit,
|
|
||||||
) -> Option<(&'a SyntaxNode, GreenNode, Vec<SyntaxError>)>,
|
|
||||||
{
|
|
||||||
let (range, before) = extract_range(before);
|
let (range, before) = extract_range(before);
|
||||||
let edit = AtomTextEdit::replace(range, replace_with.to_owned());
|
let edit = AtomTextEdit::replace(range, replace_with.to_owned());
|
||||||
let after = edit.apply(before.clone());
|
let after = edit.apply(before.clone());
|
||||||
|
@ -175,23 +179,20 @@ mod tests {
|
||||||
let incrementally_reparsed = {
|
let incrementally_reparsed = {
|
||||||
let f = SourceFile::parse(&before);
|
let f = SourceFile::parse(&before);
|
||||||
let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() };
|
let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() };
|
||||||
let (node, green, new_errors) =
|
let (green, new_errors, range) =
|
||||||
reparser(f.syntax(), &edit).expect("cannot incrementally reparse");
|
incremental_reparse(f.syntax(), &edit, f.errors()).unwrap();
|
||||||
let green_root = node.replace_with(green);
|
assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length");
|
||||||
let errors = super::merge_errors(f.errors(), new_errors, node, &edit);
|
SourceFile::new(green, new_errors)
|
||||||
SourceFile::new(green_root, errors)
|
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_eq_text!(
|
assert_eq_text!(
|
||||||
&fully_reparsed.syntax().debug_dump(),
|
&fully_reparsed.syntax().debug_dump(),
|
||||||
&incrementally_reparsed.syntax().debug_dump(),
|
&incrementally_reparsed.syntax().debug_dump(),
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test] // FIXME: some test here actually test token reparsing
|
||||||
fn reparse_block_tests() {
|
fn reparse_block_tests() {
|
||||||
let do_check = |before, replace_to| do_check(before, replace_to, reparse_block);
|
|
||||||
|
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
fn foo() {
|
fn foo() {
|
||||||
|
@ -199,6 +200,7 @@ fn foo() {
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
"baz",
|
"baz",
|
||||||
|
3,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
|
@ -207,6 +209,7 @@ fn foo() {
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
"baz",
|
"baz",
|
||||||
|
25,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
|
@ -215,6 +218,7 @@ struct Foo {
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
",\n g: (),",
|
",\n g: (),",
|
||||||
|
14,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
|
@ -225,6 +229,7 @@ fn foo {
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
"62",
|
"62",
|
||||||
|
31, // FIXME: reparse only int literal here
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
|
@ -233,7 +238,9 @@ mod foo {
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
"bar",
|
"bar",
|
||||||
|
11,
|
||||||
);
|
);
|
||||||
|
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
trait Foo {
|
trait Foo {
|
||||||
|
@ -241,6 +248,7 @@ trait Foo {
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
"Output",
|
"Output",
|
||||||
|
3,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
|
@ -249,13 +257,9 @@ impl IntoIterator<Item=i32> for Foo {
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
"n next(",
|
"n next(",
|
||||||
|
9,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(r"use a::b::{foo,<|>,bar<|>};", "baz", 10);
|
||||||
r"
|
|
||||||
use a::b::{foo,<|>,bar<|>};
|
|
||||||
",
|
|
||||||
"baz",
|
|
||||||
);
|
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
pub enum A {
|
pub enum A {
|
||||||
|
@ -263,12 +267,14 @@ pub enum A {
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
"\nBar;\n",
|
"\nBar;\n",
|
||||||
|
11,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
foo!{a, b<|><|> d}
|
foo!{a, b<|><|> d}
|
||||||
",
|
",
|
||||||
", c[3]",
|
", c[3]",
|
||||||
|
8,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
|
@ -277,6 +283,7 @@ fn foo() {
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
"123",
|
"123",
|
||||||
|
14,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
|
@ -285,54 +292,60 @@ extern {
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
" exit(code: c_int)",
|
" exit(code: c_int)",
|
||||||
|
11,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn reparse_leaf_tests() {
|
fn reparse_token_tests() {
|
||||||
let do_check = |before, replace_to| do_check(before, replace_to, reparse_leaf);
|
|
||||||
|
|
||||||
do_check(
|
do_check(
|
||||||
r"<|><|>
|
r"<|><|>
|
||||||
fn foo() -> i32 { 1 }
|
fn foo() -> i32 { 1 }
|
||||||
",
|
",
|
||||||
"\n\n\n \n",
|
"\n\n\n \n",
|
||||||
|
1,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
fn foo() -> <|><|> {}
|
fn foo() -> <|><|> {}
|
||||||
",
|
",
|
||||||
" \n",
|
" \n",
|
||||||
|
2,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
fn <|>foo<|>() -> i32 { 1 }
|
fn <|>foo<|>() -> i32 { 1 }
|
||||||
",
|
",
|
||||||
"bar",
|
"bar",
|
||||||
|
3,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
fn foo<|><|>foo() { }
|
fn foo<|><|>foo() { }
|
||||||
",
|
",
|
||||||
"bar",
|
"bar",
|
||||||
|
6,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
fn foo /* <|><|> */ () {}
|
fn foo /* <|><|> */ () {}
|
||||||
",
|
",
|
||||||
"some comment",
|
"some comment",
|
||||||
|
6,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
fn baz <|><|> () {}
|
fn baz <|><|> () {}
|
||||||
",
|
",
|
||||||
" \t\t\n\n",
|
" \t\t\n\n",
|
||||||
|
2,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
fn baz <|><|> () {}
|
fn baz <|><|> () {}
|
||||||
",
|
",
|
||||||
" \t\t\n\n",
|
" \t\t\n\n",
|
||||||
|
2,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
|
@ -340,24 +353,28 @@ fn baz <|><|> () {}
|
||||||
mod { }
|
mod { }
|
||||||
",
|
",
|
||||||
"c",
|
"c",
|
||||||
|
14,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r#"
|
r#"
|
||||||
fn -> &str { "Hello<|><|>" }
|
fn -> &str { "Hello<|><|>" }
|
||||||
"#,
|
"#,
|
||||||
", world",
|
", world",
|
||||||
|
7,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r#"
|
r#"
|
||||||
fn -> &str { // "Hello<|><|>"
|
fn -> &str { // "Hello<|><|>"
|
||||||
"#,
|
"#,
|
||||||
", world",
|
", world",
|
||||||
|
10,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r##"
|
r##"
|
||||||
fn -> &str { r#"Hello<|><|>"#
|
fn -> &str { r#"Hello<|><|>"#
|
||||||
"##,
|
"##,
|
||||||
", world",
|
", world",
|
||||||
|
10,
|
||||||
);
|
);
|
||||||
do_check(
|
do_check(
|
||||||
r"
|
r"
|
||||||
|
@ -367,6 +384,7 @@ enum Foo {
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
"Clone",
|
"Clone",
|
||||||
|
4,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,10 +28,10 @@ enum State {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> TreeSink for TextTreeSink<'a> {
|
impl<'a> TreeSink for TextTreeSink<'a> {
|
||||||
fn leaf(&mut self, kind: SyntaxKind, n_tokens: u8) {
|
fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
|
||||||
match mem::replace(&mut self.state, State::Normal) {
|
match mem::replace(&mut self.state, State::Normal) {
|
||||||
State::PendingStart => unreachable!(),
|
State::PendingStart => unreachable!(),
|
||||||
State::PendingFinish => self.inner.finish_branch(),
|
State::PendingFinish => self.inner.finish_node(),
|
||||||
State::Normal => (),
|
State::Normal => (),
|
||||||
}
|
}
|
||||||
self.eat_trivias();
|
self.eat_trivias();
|
||||||
|
@ -40,18 +40,18 @@ impl<'a> TreeSink for TextTreeSink<'a> {
|
||||||
.iter()
|
.iter()
|
||||||
.map(|it| it.len)
|
.map(|it| it.len)
|
||||||
.sum::<TextUnit>();
|
.sum::<TextUnit>();
|
||||||
self.do_leaf(kind, len, n_tokens);
|
self.do_token(kind, len, n_tokens);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn start_branch(&mut self, kind: SyntaxKind) {
|
fn start_node(&mut self, kind: SyntaxKind) {
|
||||||
match mem::replace(&mut self.state, State::Normal) {
|
match mem::replace(&mut self.state, State::Normal) {
|
||||||
State::PendingStart => {
|
State::PendingStart => {
|
||||||
self.inner.start_branch(kind);
|
self.inner.start_node(kind);
|
||||||
// No need to attach trivias to previous node: there is no
|
// No need to attach trivias to previous node: there is no
|
||||||
// previous node.
|
// previous node.
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
State::PendingFinish => self.inner.finish_branch(),
|
State::PendingFinish => self.inner.finish_node(),
|
||||||
State::Normal => (),
|
State::Normal => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,14 +71,14 @@ impl<'a> TreeSink for TextTreeSink<'a> {
|
||||||
n_attached_trivias(kind, leading_trivias)
|
n_attached_trivias(kind, leading_trivias)
|
||||||
};
|
};
|
||||||
self.eat_n_trivias(n_trivias - n_attached_trivias);
|
self.eat_n_trivias(n_trivias - n_attached_trivias);
|
||||||
self.inner.start_branch(kind);
|
self.inner.start_node(kind);
|
||||||
self.eat_n_trivias(n_attached_trivias);
|
self.eat_n_trivias(n_attached_trivias);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn finish_branch(&mut self) {
|
fn finish_node(&mut self) {
|
||||||
match mem::replace(&mut self.state, State::PendingFinish) {
|
match mem::replace(&mut self.state, State::PendingFinish) {
|
||||||
State::PendingStart => unreachable!(),
|
State::PendingStart => unreachable!(),
|
||||||
State::PendingFinish => self.inner.finish_branch(),
|
State::PendingFinish => self.inner.finish_node(),
|
||||||
State::Normal => (),
|
State::Normal => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -104,7 +104,7 @@ impl<'a> TextTreeSink<'a> {
|
||||||
match mem::replace(&mut self.state, State::Normal) {
|
match mem::replace(&mut self.state, State::Normal) {
|
||||||
State::PendingFinish => {
|
State::PendingFinish => {
|
||||||
self.eat_trivias();
|
self.eat_trivias();
|
||||||
self.inner.finish_branch()
|
self.inner.finish_node()
|
||||||
}
|
}
|
||||||
State::PendingStart | State::Normal => unreachable!(),
|
State::PendingStart | State::Normal => unreachable!(),
|
||||||
}
|
}
|
||||||
|
@ -117,7 +117,7 @@ impl<'a> TextTreeSink<'a> {
|
||||||
if !token.kind.is_trivia() {
|
if !token.kind.is_trivia() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
self.do_leaf(token.kind, token.len, 1);
|
self.do_token(token.kind, token.len, 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -125,16 +125,16 @@ impl<'a> TextTreeSink<'a> {
|
||||||
for _ in 0..n {
|
for _ in 0..n {
|
||||||
let token = self.tokens[self.token_pos];
|
let token = self.tokens[self.token_pos];
|
||||||
assert!(token.kind.is_trivia());
|
assert!(token.kind.is_trivia());
|
||||||
self.do_leaf(token.kind, token.len, 1);
|
self.do_token(token.kind, token.len, 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn do_leaf(&mut self, kind: SyntaxKind, len: TextUnit, n_tokens: usize) {
|
fn do_token(&mut self, kind: SyntaxKind, len: TextUnit, n_tokens: usize) {
|
||||||
let range = TextRange::offset_len(self.text_pos, len);
|
let range = TextRange::offset_len(self.text_pos, len);
|
||||||
let text: SmolStr = self.text[range].into();
|
let text: SmolStr = self.text[range].into();
|
||||||
self.text_pos += len;
|
self.text_pos += len;
|
||||||
self.token_pos += n_tokens;
|
self.token_pos += n_tokens;
|
||||||
self.inner.leaf(kind, text);
|
self.inner.token(kind, text);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -29,6 +29,9 @@ impl Types for RaTypes {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) type GreenNode = rowan::GreenNode<RaTypes>;
|
pub(crate) type GreenNode = rowan::GreenNode<RaTypes>;
|
||||||
|
pub(crate) type GreenToken = rowan::GreenToken<RaTypes>;
|
||||||
|
#[allow(unused)]
|
||||||
|
pub(crate) type GreenElement = rowan::GreenElement<RaTypes>;
|
||||||
|
|
||||||
/// Marker trait for CST and AST nodes
|
/// Marker trait for CST and AST nodes
|
||||||
pub trait SyntaxNodeWrapper: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>> {}
|
pub trait SyntaxNodeWrapper: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>> {}
|
||||||
|
@ -113,11 +116,13 @@ impl ToOwned for SyntaxNode {
|
||||||
|
|
||||||
impl fmt::Debug for SyntaxNode {
|
impl fmt::Debug for SyntaxNode {
|
||||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(fmt, "{:?}@{:?}", self.kind(), self.range())?;
|
write!(fmt, "{:?}@{:?}", self.kind(), self.range())
|
||||||
if has_short_text(self.kind()) {
|
}
|
||||||
write!(fmt, " \"{}\"", self.text())?;
|
}
|
||||||
}
|
|
||||||
Ok(())
|
impl fmt::Display for SyntaxNode {
|
||||||
|
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
fmt::Display::fmt(&self.text(), fmt)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -145,14 +150,6 @@ impl SyntaxNode {
|
||||||
SyntaxText::new(self)
|
SyntaxText::new(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_leaf(&self) -> bool {
|
|
||||||
self.0.is_leaf()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn leaf_text(&self) -> Option<&SmolStr> {
|
|
||||||
self.0.leaf_text()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parent(&self) -> Option<&SyntaxNode> {
|
pub fn parent(&self) -> Option<&SyntaxNode> {
|
||||||
self.0.parent().map(SyntaxNode::from_repr)
|
self.0.parent().map(SyntaxNode::from_repr)
|
||||||
}
|
}
|
||||||
|
@ -161,22 +158,50 @@ impl SyntaxNode {
|
||||||
self.0.first_child().map(SyntaxNode::from_repr)
|
self.0.first_child().map(SyntaxNode::from_repr)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn first_child_or_token(&self) -> Option<SyntaxElement> {
|
||||||
|
self.0.first_child_or_token().map(SyntaxElement::from)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn last_child(&self) -> Option<&SyntaxNode> {
|
pub fn last_child(&self) -> Option<&SyntaxNode> {
|
||||||
self.0.last_child().map(SyntaxNode::from_repr)
|
self.0.last_child().map(SyntaxNode::from_repr)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn last_child_or_token(&self) -> Option<SyntaxElement> {
|
||||||
|
self.0.last_child_or_token().map(SyntaxElement::from)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn next_sibling(&self) -> Option<&SyntaxNode> {
|
pub fn next_sibling(&self) -> Option<&SyntaxNode> {
|
||||||
self.0.next_sibling().map(SyntaxNode::from_repr)
|
self.0.next_sibling().map(SyntaxNode::from_repr)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
|
||||||
|
self.0.next_sibling_or_token().map(SyntaxElement::from)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn prev_sibling(&self) -> Option<&SyntaxNode> {
|
pub fn prev_sibling(&self) -> Option<&SyntaxNode> {
|
||||||
self.0.prev_sibling().map(SyntaxNode::from_repr)
|
self.0.prev_sibling().map(SyntaxNode::from_repr)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
|
||||||
|
self.0.prev_sibling_or_token().map(SyntaxElement::from)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn children(&self) -> SyntaxNodeChildren {
|
pub fn children(&self) -> SyntaxNodeChildren {
|
||||||
SyntaxNodeChildren(self.0.children())
|
SyntaxNodeChildren(self.0.children())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn children_with_tokens(&self) -> SyntaxElementChildren {
|
||||||
|
SyntaxElementChildren(self.0.children_with_tokens())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn first_token(&self) -> Option<SyntaxToken> {
|
||||||
|
self.0.first_token().map(SyntaxToken::from)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn last_token(&self) -> Option<SyntaxToken> {
|
||||||
|
self.0.last_token().map(SyntaxToken::from)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode> {
|
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode> {
|
||||||
crate::algo::generate(Some(self), |&node| node.parent())
|
crate::algo::generate(Some(self), |&node| node.parent())
|
||||||
}
|
}
|
||||||
|
@ -188,6 +213,13 @@ impl SyntaxNode {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn descendants_with_tokens(&self) -> impl Iterator<Item = SyntaxElement> {
|
||||||
|
self.preorder_with_tokens().filter_map(|event| match event {
|
||||||
|
WalkEvent::Enter(it) => Some(it),
|
||||||
|
WalkEvent::Leave(_) => None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = &SyntaxNode> {
|
pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = &SyntaxNode> {
|
||||||
crate::algo::generate(Some(self), move |&node| match direction {
|
crate::algo::generate(Some(self), move |&node| match direction {
|
||||||
Direction::Next => node.next_sibling(),
|
Direction::Next => node.next_sibling(),
|
||||||
|
@ -195,6 +227,17 @@ impl SyntaxNode {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn siblings_with_tokens(
|
||||||
|
&self,
|
||||||
|
direction: Direction,
|
||||||
|
) -> impl Iterator<Item = SyntaxElement> {
|
||||||
|
let me: SyntaxElement = self.into();
|
||||||
|
crate::algo::generate(Some(me), move |el| match direction {
|
||||||
|
Direction::Next => el.next_sibling_or_token(),
|
||||||
|
Direction::Prev => el.prev_sibling_or_token(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<&SyntaxNode>> {
|
pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<&SyntaxNode>> {
|
||||||
self.0.preorder().map(|event| match event {
|
self.0.preorder().map(|event| match event {
|
||||||
WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode::from_repr(n)),
|
WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode::from_repr(n)),
|
||||||
|
@ -202,6 +245,13 @@ impl SyntaxNode {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn preorder_with_tokens(&self) -> impl Iterator<Item = WalkEvent<SyntaxElement>> {
|
||||||
|
self.0.preorder_with_tokens().map(|event| match event {
|
||||||
|
WalkEvent::Enter(n) => WalkEvent::Enter(n.into()),
|
||||||
|
WalkEvent::Leave(n) => WalkEvent::Leave(n.into()),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
pub fn memory_size_of_subtree(&self) -> usize {
|
pub fn memory_size_of_subtree(&self) -> usize {
|
||||||
self.0.memory_size_of_subtree()
|
self.0.memory_size_of_subtree()
|
||||||
}
|
}
|
||||||
|
@ -223,17 +273,20 @@ impl SyntaxNode {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
for event in self.preorder() {
|
for event in self.preorder_with_tokens() {
|
||||||
match event {
|
match event {
|
||||||
WalkEvent::Enter(node) => {
|
WalkEvent::Enter(element) => {
|
||||||
indent!();
|
indent!();
|
||||||
writeln!(buf, "{:?}", node).unwrap();
|
match element {
|
||||||
if node.first_child().is_none() {
|
SyntaxElement::Node(node) => writeln!(buf, "{:?}", node).unwrap(),
|
||||||
let off = node.range().end();
|
SyntaxElement::Token(token) => {
|
||||||
while err_pos < errors.len() && errors[err_pos].offset() <= off {
|
writeln!(buf, "{:?}", token).unwrap();
|
||||||
indent!();
|
let off = token.range().end();
|
||||||
writeln!(buf, "err: `{}`", errors[err_pos]).unwrap();
|
while err_pos < errors.len() && errors[err_pos].offset() <= off {
|
||||||
err_pos += 1;
|
indent!();
|
||||||
|
writeln!(buf, "err: `{}`", errors[err_pos]).unwrap();
|
||||||
|
err_pos += 1;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
level += 1;
|
level += 1;
|
||||||
|
@ -255,7 +308,172 @@ impl SyntaxNode {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn replace_with(&self, replacement: GreenNode) -> GreenNode {
|
pub(crate) fn replace_with(&self, replacement: GreenNode) -> GreenNode {
|
||||||
self.0.replace_self(replacement)
|
self.0.replace_with(replacement)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
pub struct SyntaxToken<'a>(pub(crate) rowan::SyntaxToken<'a, RaTypes>);
|
||||||
|
|
||||||
|
//FIXME: always output text
|
||||||
|
impl<'a> fmt::Debug for SyntaxToken<'a> {
|
||||||
|
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(fmt, "{:?}@{:?}", self.kind(), self.range())?;
|
||||||
|
if has_short_text(self.kind()) {
|
||||||
|
write!(fmt, " \"{}\"", self.text())?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> fmt::Display for SyntaxToken<'a> {
|
||||||
|
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
fmt::Display::fmt(self.text(), fmt)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> From<rowan::SyntaxToken<'a, RaTypes>> for SyntaxToken<'a> {
|
||||||
|
fn from(t: rowan::SyntaxToken<'a, RaTypes>) -> Self {
|
||||||
|
SyntaxToken(t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> SyntaxToken<'a> {
|
||||||
|
pub fn kind(&self) -> SyntaxKind {
|
||||||
|
self.0.kind()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn text(&self) -> &'a SmolStr {
|
||||||
|
self.0.text()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn range(&self) -> TextRange {
|
||||||
|
self.0.range()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parent(&self) -> &'a SyntaxNode {
|
||||||
|
SyntaxNode::from_repr(self.0.parent())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement<'a>> {
|
||||||
|
self.0.next_sibling_or_token().map(SyntaxElement::from)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement<'a>> {
|
||||||
|
self.0.prev_sibling_or_token().map(SyntaxElement::from)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn siblings_with_tokens(
|
||||||
|
&self,
|
||||||
|
direction: Direction,
|
||||||
|
) -> impl Iterator<Item = SyntaxElement<'a>> {
|
||||||
|
let me: SyntaxElement = (*self).into();
|
||||||
|
crate::algo::generate(Some(me), move |el| match direction {
|
||||||
|
Direction::Next => el.next_sibling_or_token(),
|
||||||
|
Direction::Prev => el.prev_sibling_or_token(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn next_token(&self) -> Option<SyntaxToken<'a>> {
|
||||||
|
self.0.next_token().map(SyntaxToken::from)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn prev_token(&self) -> Option<SyntaxToken<'a>> {
|
||||||
|
self.0.prev_token().map(SyntaxToken::from)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn replace_with(&self, new_token: GreenToken) -> GreenNode {
|
||||||
|
self.0.replace_with(new_token)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]
|
||||||
|
pub enum SyntaxElement<'a> {
|
||||||
|
Node(&'a SyntaxNode),
|
||||||
|
Token(SyntaxToken<'a>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> fmt::Display for SyntaxElement<'a> {
|
||||||
|
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
SyntaxElement::Node(it) => fmt::Display::fmt(it, fmt),
|
||||||
|
SyntaxElement::Token(it) => fmt::Display::fmt(it, fmt),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> SyntaxElement<'a> {
|
||||||
|
pub fn kind(&self) -> SyntaxKind {
|
||||||
|
match self {
|
||||||
|
SyntaxElement::Node(it) => it.kind(),
|
||||||
|
SyntaxElement::Token(it) => it.kind(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_node(&self) -> Option<&'a SyntaxNode> {
|
||||||
|
match self {
|
||||||
|
SyntaxElement::Node(node) => Some(*node),
|
||||||
|
SyntaxElement::Token(_) => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_token(&self) -> Option<SyntaxToken<'a>> {
|
||||||
|
match self {
|
||||||
|
SyntaxElement::Node(_) => None,
|
||||||
|
SyntaxElement::Token(token) => Some(*token),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement<'a>> {
|
||||||
|
match self {
|
||||||
|
SyntaxElement::Node(it) => it.next_sibling_or_token(),
|
||||||
|
SyntaxElement::Token(it) => it.next_sibling_or_token(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement<'a>> {
|
||||||
|
match self {
|
||||||
|
SyntaxElement::Node(it) => it.prev_sibling_or_token(),
|
||||||
|
SyntaxElement::Token(it) => it.prev_sibling_or_token(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ancestors(&self) -> impl Iterator<Item = &'a SyntaxNode> {
|
||||||
|
match self {
|
||||||
|
SyntaxElement::Node(it) => it,
|
||||||
|
SyntaxElement::Token(it) => it.parent(),
|
||||||
|
}
|
||||||
|
.ancestors()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> From<rowan::SyntaxElement<'a, RaTypes>> for SyntaxElement<'a> {
|
||||||
|
fn from(el: rowan::SyntaxElement<'a, RaTypes>) -> Self {
|
||||||
|
match el {
|
||||||
|
rowan::SyntaxElement::Node(n) => SyntaxElement::Node(SyntaxNode::from_repr(n)),
|
||||||
|
rowan::SyntaxElement::Token(t) => SyntaxElement::Token(t.into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> From<&'a SyntaxNode> for SyntaxElement<'a> {
|
||||||
|
fn from(node: &'a SyntaxNode) -> SyntaxElement<'a> {
|
||||||
|
SyntaxElement::Node(node)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> From<SyntaxToken<'a>> for SyntaxElement<'a> {
|
||||||
|
fn from(token: SyntaxToken<'a>) -> SyntaxElement<'a> {
|
||||||
|
SyntaxElement::Token(token)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> SyntaxElement<'a> {
|
||||||
|
pub fn range(&self) -> TextRange {
|
||||||
|
match self {
|
||||||
|
SyntaxElement::Node(it) => it.range(),
|
||||||
|
SyntaxElement::Token(it) => it.range(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -270,6 +488,17 @@ impl<'a> Iterator for SyntaxNodeChildren<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct SyntaxElementChildren<'a>(rowan::SyntaxElementChildren<'a, RaTypes>);
|
||||||
|
|
||||||
|
impl<'a> Iterator for SyntaxElementChildren<'a> {
|
||||||
|
type Item = SyntaxElement<'a>;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<SyntaxElement<'a>> {
|
||||||
|
self.0.next().map(SyntaxElement::from)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn has_short_text(kind: SyntaxKind) -> bool {
|
fn has_short_text(kind: SyntaxKind) -> bool {
|
||||||
use crate::SyntaxKind::*;
|
use crate::SyntaxKind::*;
|
||||||
match kind {
|
match kind {
|
||||||
|
@ -304,16 +533,16 @@ impl SyntaxTreeBuilder {
|
||||||
node
|
node
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn leaf(&mut self, kind: SyntaxKind, text: SmolStr) {
|
pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) {
|
||||||
self.inner.leaf(kind, text)
|
self.inner.token(kind, text)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn start_branch(&mut self, kind: SyntaxKind) {
|
pub fn start_node(&mut self, kind: SyntaxKind) {
|
||||||
self.inner.start_internal(kind)
|
self.inner.start_node(kind)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn finish_branch(&mut self) {
|
pub fn finish_node(&mut self) {
|
||||||
self.inner.finish_internal()
|
self.inner.finish_node()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn error(&mut self, error: ParseError, text_pos: TextUnit) {
|
pub fn error(&mut self, error: ParseError, text_pos: TextUnit) {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use std::{fmt, ops};
|
use std::{fmt, ops};
|
||||||
|
|
||||||
use crate::{SyntaxNode, TextRange, TextUnit};
|
use crate::{SyntaxNode, TextRange, TextUnit, SyntaxElement};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct SyntaxText<'a> {
|
pub struct SyntaxText<'a> {
|
||||||
|
@ -15,11 +15,14 @@ impl<'a> SyntaxText<'a> {
|
||||||
|
|
||||||
pub fn chunks(&self) -> impl Iterator<Item = &'a str> {
|
pub fn chunks(&self) -> impl Iterator<Item = &'a str> {
|
||||||
let range = self.range;
|
let range = self.range;
|
||||||
self.node.descendants().filter_map(move |node| {
|
self.node.descendants_with_tokens().filter_map(move |el| match el {
|
||||||
let text = node.leaf_text()?;
|
SyntaxElement::Token(t) => {
|
||||||
let range = range.intersection(&node.range())?;
|
let text = t.text();
|
||||||
let range = range - node.range().start();
|
let range = range.intersection(&t.range())?;
|
||||||
Some(&text[range])
|
let range = range - t.range().start();
|
||||||
|
Some(&text[range])
|
||||||
|
}
|
||||||
|
SyntaxElement::Node(_) => None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@ mod block;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
SourceFile, SyntaxError, AstNode, SyntaxNode,
|
SourceFile, SyntaxError, AstNode, SyntaxNode,
|
||||||
SyntaxKind::{L_CURLY, R_CURLY},
|
SyntaxKind::{L_CURLY, R_CURLY, BYTE, BYTE_STRING, STRING, CHAR},
|
||||||
ast,
|
ast,
|
||||||
algo::visit::{visitor_ctx, VisitorCtx},
|
algo::visit::{visitor_ctx, VisitorCtx},
|
||||||
};
|
};
|
||||||
|
@ -15,16 +15,24 @@ pub(crate) fn validate(file: &SourceFile) -> Vec<SyntaxError> {
|
||||||
let mut errors = Vec::new();
|
let mut errors = Vec::new();
|
||||||
for node in file.syntax().descendants() {
|
for node in file.syntax().descendants() {
|
||||||
let _ = visitor_ctx(&mut errors)
|
let _ = visitor_ctx(&mut errors)
|
||||||
.visit::<ast::Byte, _>(byte::validate_byte_node)
|
.visit::<ast::Literal, _>(validate_literal)
|
||||||
.visit::<ast::ByteString, _>(byte_string::validate_byte_string_node)
|
|
||||||
.visit::<ast::Char, _>(char::validate_char_node)
|
|
||||||
.visit::<ast::String, _>(string::validate_string_node)
|
|
||||||
.visit::<ast::Block, _>(block::validate_block_node)
|
.visit::<ast::Block, _>(block::validate_block_node)
|
||||||
.accept(node);
|
.accept(node);
|
||||||
}
|
}
|
||||||
errors
|
errors
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: kill duplication
|
||||||
|
fn validate_literal(literal: &ast::Literal, acc: &mut Vec<SyntaxError>) {
|
||||||
|
match literal.token().kind() {
|
||||||
|
BYTE => byte::validate_byte_node(literal.token(), acc),
|
||||||
|
BYTE_STRING => byte_string::validate_byte_string_node(literal.token(), acc),
|
||||||
|
STRING => string::validate_string_node(literal.token(), acc),
|
||||||
|
CHAR => char::validate_char_node(literal.token(), acc),
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn validate_block_structure(root: &SyntaxNode) {
|
pub(crate) fn validate_block_structure(root: &SyntaxNode) {
|
||||||
let mut stack = Vec::new();
|
let mut stack = Vec::new();
|
||||||
for node in root.descendants() {
|
for node in root.descendants() {
|
||||||
|
|
|
@ -1,17 +1,17 @@
|
||||||
//! Validation of byte literals
|
//! Validation of byte literals
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast::{self, AstNode, AstToken},
|
|
||||||
string_lexing::{self, StringComponentKind},
|
string_lexing::{self, StringComponentKind},
|
||||||
TextRange,
|
TextRange,
|
||||||
validation::char,
|
validation::char,
|
||||||
SyntaxError,
|
SyntaxError,
|
||||||
SyntaxErrorKind::*,
|
SyntaxErrorKind::*,
|
||||||
|
SyntaxToken,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(super) fn validate_byte_node(node: &ast::Byte, errors: &mut Vec<SyntaxError>) {
|
pub(super) fn validate_byte_node(node: SyntaxToken, errors: &mut Vec<SyntaxError>) {
|
||||||
let literal_text = node.text();
|
let literal_text = node.text();
|
||||||
let literal_range = node.syntax().range();
|
let literal_range = node.range();
|
||||||
let mut components = string_lexing::parse_byte_literal(literal_text);
|
let mut components = string_lexing::parse_byte_literal(literal_text);
|
||||||
let mut len = 0;
|
let mut len = 0;
|
||||||
for component in &mut components {
|
for component in &mut components {
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
ast::{self, AstNode, AstToken},
|
|
||||||
string_lexing::{self, StringComponentKind},
|
string_lexing::{self, StringComponentKind},
|
||||||
SyntaxError,
|
SyntaxError,
|
||||||
SyntaxErrorKind::*,
|
SyntaxErrorKind::*,
|
||||||
|
SyntaxToken,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::byte;
|
use super::byte;
|
||||||
|
|
||||||
pub(crate) fn validate_byte_string_node(node: &ast::ByteString, errors: &mut Vec<SyntaxError>) {
|
pub(crate) fn validate_byte_string_node(node: SyntaxToken, errors: &mut Vec<SyntaxError>) {
|
||||||
let literal_text = node.text();
|
let literal_text = node.text();
|
||||||
let literal_range = node.syntax().range();
|
let literal_range = node.range();
|
||||||
let mut components = string_lexing::parse_byte_string_literal(literal_text);
|
let mut components = string_lexing::parse_byte_string_literal(literal_text);
|
||||||
for component in &mut components {
|
for component in &mut components {
|
||||||
let range = component.range + literal_range.start();
|
let range = component.range + literal_range.start();
|
||||||
|
|
|
@ -5,16 +5,16 @@ use std::u32;
|
||||||
use arrayvec::ArrayString;
|
use arrayvec::ArrayString;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast::{self, AstNode, AstToken},
|
|
||||||
string_lexing::{self, StringComponentKind},
|
string_lexing::{self, StringComponentKind},
|
||||||
TextRange,
|
TextRange,
|
||||||
SyntaxError,
|
SyntaxError,
|
||||||
SyntaxErrorKind::*,
|
SyntaxErrorKind::*,
|
||||||
|
SyntaxToken,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(super) fn validate_char_node(node: &ast::Char, errors: &mut Vec<SyntaxError>) {
|
pub(super) fn validate_char_node(node: SyntaxToken, errors: &mut Vec<SyntaxError>) {
|
||||||
let literal_text = node.text();
|
let literal_text = node.text();
|
||||||
let literal_range = node.syntax().range();
|
let literal_range = node.range();
|
||||||
let mut components = string_lexing::parse_char_literal(literal_text);
|
let mut components = string_lexing::parse_char_literal(literal_text);
|
||||||
let mut len = 0;
|
let mut len = 0;
|
||||||
for component in &mut components {
|
for component in &mut components {
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
ast::{self, AstNode, AstToken},
|
|
||||||
string_lexing,
|
string_lexing,
|
||||||
SyntaxError,
|
SyntaxError,
|
||||||
SyntaxErrorKind::*,
|
SyntaxErrorKind::*,
|
||||||
|
SyntaxToken,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::char;
|
use super::char;
|
||||||
|
|
||||||
pub(crate) fn validate_string_node(node: &ast::String, errors: &mut Vec<SyntaxError>) {
|
pub(crate) fn validate_string_node(node: SyntaxToken, errors: &mut Vec<SyntaxError>) {
|
||||||
let literal_text = node.text();
|
let literal_text = node.text();
|
||||||
let literal_range = node.syntax().range();
|
let literal_range = node.range();
|
||||||
let mut components = string_lexing::parse_string_literal(literal_text);
|
let mut components = string_lexing::parse_string_literal(literal_text);
|
||||||
for component in &mut components {
|
for component in &mut components {
|
||||||
let range = component.range + literal_range.start();
|
let range = component.range + literal_range.start();
|
||||||
|
|
Loading…
Reference in a new issue