mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 05:23:24 +00:00
make ancestors and descendants inherent
This commit is contained in:
parent
dccaa5e45e
commit
d323c81d5c
14 changed files with 40 additions and 46 deletions
|
@ -9,7 +9,6 @@ use ra_syntax::{
|
||||||
Direction, siblings,
|
Direction, siblings,
|
||||||
find_leaf_at_offset,
|
find_leaf_at_offset,
|
||||||
find_covering_node,
|
find_covering_node,
|
||||||
ancestors,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -101,8 +100,8 @@ pub fn add_impl<'a>(file: &'a File, offset: TextUnit) -> Option<impl FnOnce() ->
|
||||||
|
|
||||||
pub fn introduce_variable<'a>(file: &'a File, range: TextRange) -> Option<impl FnOnce() -> LocalEdit + 'a> {
|
pub fn introduce_variable<'a>(file: &'a File, range: TextRange) -> Option<impl FnOnce() -> LocalEdit + 'a> {
|
||||||
let node = find_covering_node(file.syntax(), range);
|
let node = find_covering_node(file.syntax(), range);
|
||||||
let expr = ancestors(node).filter_map(ast::Expr::cast).next()?;
|
let expr = node.ancestors().filter_map(ast::Expr::cast).next()?;
|
||||||
let anchor_stmt = ancestors(expr.syntax()).filter_map(ast::Stmt::cast).next()?;
|
let anchor_stmt = expr.syntax().ancestors().filter_map(ast::Stmt::cast).next()?;
|
||||||
let indent = anchor_stmt.syntax().prev_sibling()?;
|
let indent = anchor_stmt.syntax().prev_sibling()?;
|
||||||
if indent.kind() != WHITESPACE {
|
if indent.kind() != WHITESPACE {
|
||||||
return None;
|
return None;
|
||||||
|
|
|
@ -4,7 +4,6 @@ use ra_syntax::{
|
||||||
File, TextUnit, AstNode, SyntaxNodeRef, SyntaxKind::*,
|
File, TextUnit, AstNode, SyntaxNodeRef, SyntaxKind::*,
|
||||||
ast::{self, LoopBodyOwner, ModuleItemOwner},
|
ast::{self, LoopBodyOwner, ModuleItemOwner},
|
||||||
algo::{
|
algo::{
|
||||||
ancestors,
|
|
||||||
visit::{visitor, Visitor, visitor_ctx, VisitorCtx},
|
visit::{visitor, Visitor, visitor_ctx, VisitorCtx},
|
||||||
},
|
},
|
||||||
text_utils::is_subrange,
|
text_utils::is_subrange,
|
||||||
|
@ -59,7 +58,7 @@ fn complete_name_ref(file: &File, name_ref: ast::NameRef, acc: &mut Vec<Completi
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let mut visited_fn = false;
|
let mut visited_fn = false;
|
||||||
for node in ancestors(name_ref.syntax()) {
|
for node in name_ref.syntax().ancestors() {
|
||||||
if let Some(items) = visitor()
|
if let Some(items) = visitor()
|
||||||
.visit::<ast::Root, _>(|it| Some(it.items()))
|
.visit::<ast::Root, _>(|it| Some(it.items()))
|
||||||
.visit::<ast::Module, _>(|it| Some(it.item_list()?.items()))
|
.visit::<ast::Module, _>(|it| Some(it.item_list()?.items()))
|
||||||
|
@ -92,7 +91,7 @@ fn complete_name_ref(file: &File, name_ref: ast::NameRef, acc: &mut Vec<Completi
|
||||||
|
|
||||||
fn param_completions(ctx: SyntaxNodeRef, acc: &mut Vec<CompletionItem>) {
|
fn param_completions(ctx: SyntaxNodeRef, acc: &mut Vec<CompletionItem>) {
|
||||||
let mut params = HashMap::new();
|
let mut params = HashMap::new();
|
||||||
for node in ancestors(ctx) {
|
for node in ctx.ancestors() {
|
||||||
let _ = visitor_ctx(&mut params)
|
let _ = visitor_ctx(&mut params)
|
||||||
.visit::<ast::Root, _>(process)
|
.visit::<ast::Root, _>(process)
|
||||||
.visit::<ast::ItemList, _>(process)
|
.visit::<ast::ItemList, _>(process)
|
||||||
|
@ -123,7 +122,7 @@ fn param_completions(ctx: SyntaxNodeRef, acc: &mut Vec<CompletionItem>) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_node<'a, N: AstNode<'a>>(node: SyntaxNodeRef<'a>) -> bool {
|
fn is_node<'a, N: AstNode<'a>>(node: SyntaxNodeRef<'a>) -> bool {
|
||||||
match ancestors(node).filter_map(N::cast).next() {
|
match node.ancestors().filter_map(N::cast).next() {
|
||||||
None => false,
|
None => false,
|
||||||
Some(n) => n.syntax().range() == node.range(),
|
Some(n) => n.syntax().range() == node.range(),
|
||||||
}
|
}
|
||||||
|
@ -152,7 +151,7 @@ fn complete_expr_keywords(file: &File, fn_def: ast::FnDef, name_ref: ast::NameRe
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_in_loop_body(name_ref: ast::NameRef) -> bool {
|
fn is_in_loop_body(name_ref: ast::NameRef) -> bool {
|
||||||
for node in ancestors(name_ref.syntax()) {
|
for node in name_ref.syntax().ancestors() {
|
||||||
if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
|
if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -171,7 +170,7 @@ fn is_in_loop_body(name_ref: ast::NameRef) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn complete_return(fn_def: ast::FnDef, name_ref: ast::NameRef) -> Option<CompletionItem> {
|
fn complete_return(fn_def: ast::FnDef, name_ref: ast::NameRef) -> Option<CompletionItem> {
|
||||||
// let is_last_in_block = ancestors(name_ref.syntax()).filter_map(ast::Expr::cast)
|
// let is_last_in_block = name_ref.syntax().ancestors().filter_map(ast::Expr::cast)
|
||||||
// .next()
|
// .next()
|
||||||
// .and_then(|it| it.syntax().parent())
|
// .and_then(|it| it.syntax().parent())
|
||||||
// .and_then(ast::Block::cast)
|
// .and_then(ast::Block::cast)
|
||||||
|
@ -181,7 +180,7 @@ fn complete_return(fn_def: ast::FnDef, name_ref: ast::NameRef) -> Option<Complet
|
||||||
// return None;
|
// return None;
|
||||||
// }
|
// }
|
||||||
|
|
||||||
let is_stmt = match ancestors(name_ref.syntax()).filter_map(ast::ExprStmt::cast).next() {
|
let is_stmt = match name_ref.syntax().ancestors().filter_map(ast::ExprStmt::cast).next() {
|
||||||
None => false,
|
None => false,
|
||||||
Some(expr_stmt) => expr_stmt.syntax().range() == name_ref.syntax().range()
|
Some(expr_stmt) => expr_stmt.syntax().range() == name_ref.syntax().range()
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
File, TextRange, SyntaxNodeRef, TextUnit,
|
File, TextRange, SyntaxNodeRef, TextUnit,
|
||||||
SyntaxKind::*,
|
SyntaxKind::*,
|
||||||
algo::{find_leaf_at_offset, LeafAtOffset, find_covering_node, ancestors, Direction, siblings},
|
algo::{find_leaf_at_offset, LeafAtOffset, find_covering_node, Direction, siblings},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn extend_selection(file: &File, range: TextRange) -> Option<TextRange> {
|
pub fn extend_selection(file: &File, range: TextRange) -> Option<TextRange> {
|
||||||
|
@ -30,7 +30,7 @@ pub(crate) fn extend(root: SyntaxNodeRef, range: TextRange) -> Option<TextRange>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
match ancestors(node).skip_while(|n| n.range() == range).next() {
|
match node.ancestors().skip_while(|n| n.range() == range).next() {
|
||||||
None => None,
|
None => None,
|
||||||
Some(parent) => Some(parent.range()),
|
Some(parent) => Some(parent.range()),
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@ use std::collections::HashSet;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
File, TextRange, SyntaxNodeRef,
|
File, TextRange, SyntaxNodeRef,
|
||||||
SyntaxKind,
|
SyntaxKind,
|
||||||
algo::{walk, Direction, siblings},
|
algo::{Direction, siblings},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
@ -19,12 +19,10 @@ pub struct Fold {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn folding_ranges(file: &File) -> Vec<Fold> {
|
pub fn folding_ranges(file: &File) -> Vec<Fold> {
|
||||||
let syntax = file.syntax();
|
|
||||||
|
|
||||||
let mut res = vec![];
|
let mut res = vec![];
|
||||||
let mut visited = HashSet::new();
|
let mut visited = HashSet::new();
|
||||||
|
|
||||||
for node in walk::preorder(syntax) {
|
for node in file.syntax().descendants() {
|
||||||
if visited.contains(&node) {
|
if visited.contains(&node) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,7 @@ mod test_utils;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
File, TextUnit, TextRange, SyntaxNodeRef,
|
File, TextUnit, TextRange, SyntaxNodeRef,
|
||||||
ast::{self, AstNode, NameOwner},
|
ast::{self, AstNode, NameOwner},
|
||||||
algo::{walk, find_leaf_at_offset, ancestors},
|
algo::find_leaf_at_offset,
|
||||||
SyntaxKind::{self, *},
|
SyntaxKind::{self, *},
|
||||||
};
|
};
|
||||||
pub use ra_syntax::AtomEdit;
|
pub use ra_syntax::AtomEdit;
|
||||||
|
@ -86,7 +86,7 @@ pub fn matching_brace(file: &File, offset: TextUnit) -> Option<TextUnit> {
|
||||||
|
|
||||||
pub fn highlight(file: &File) -> Vec<HighlightedRange> {
|
pub fn highlight(file: &File) -> Vec<HighlightedRange> {
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
for node in walk::preorder(file.syntax()) {
|
for node in file.syntax().descendants() {
|
||||||
let tag = match node.kind() {
|
let tag = match node.kind() {
|
||||||
ERROR => "error",
|
ERROR => "error",
|
||||||
COMMENT | DOC_COMMENT => "comment",
|
COMMENT | DOC_COMMENT => "comment",
|
||||||
|
@ -110,7 +110,7 @@ pub fn highlight(file: &File) -> Vec<HighlightedRange> {
|
||||||
pub fn diagnostics(file: &File) -> Vec<Diagnostic> {
|
pub fn diagnostics(file: &File) -> Vec<Diagnostic> {
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
|
|
||||||
for node in walk::preorder(file.syntax()) {
|
for node in file.syntax().descendants() {
|
||||||
if node.kind() == ERROR {
|
if node.kind() == ERROR {
|
||||||
res.push(Diagnostic {
|
res.push(Diagnostic {
|
||||||
range: node.range(),
|
range: node.range(),
|
||||||
|
@ -130,7 +130,7 @@ pub fn syntax_tree(file: &File) -> String {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn runnables(file: &File) -> Vec<Runnable> {
|
pub fn runnables(file: &File) -> Vec<Runnable> {
|
||||||
walk::preorder(file.syntax())
|
file.syntax().descendants()
|
||||||
.filter_map(ast::FnDef::cast)
|
.filter_map(ast::FnDef::cast)
|
||||||
.filter_map(|f| {
|
.filter_map(|f| {
|
||||||
let name = f.name()?.text();
|
let name = f.name()?.text();
|
||||||
|
@ -159,7 +159,7 @@ pub fn find_node_at_offset<'a, N: AstNode<'a>>(
|
||||||
let leaf = leaves.clone()
|
let leaf = leaves.clone()
|
||||||
.find(|leaf| !leaf.kind().is_trivia())
|
.find(|leaf| !leaf.kind().is_trivia())
|
||||||
.or_else(|| leaves.right_biased())?;
|
.or_else(|| leaves.right_biased())?;
|
||||||
ancestors(leaf)
|
leaf.ancestors()
|
||||||
.filter_map(N::cast)
|
.filter_map(N::cast)
|
||||||
.next()
|
.next()
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ use std::{
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
SyntaxNodeRef, SyntaxNode, SmolStr, AstNode,
|
SyntaxNodeRef, SyntaxNode, SmolStr, AstNode,
|
||||||
ast::{self, NameOwner, LoopBodyOwner, ArgListOwner},
|
ast::{self, NameOwner, LoopBodyOwner, ArgListOwner},
|
||||||
algo::{ancestors, generate, walk::preorder}
|
algo::{generate}
|
||||||
};
|
};
|
||||||
|
|
||||||
type ScopeId = usize;
|
type ScopeId = usize;
|
||||||
|
@ -51,7 +51,7 @@ impl FnScopes {
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
fn add_bindings(&mut self, scope: ScopeId, pat: ast::Pat) {
|
fn add_bindings(&mut self, scope: ScopeId, pat: ast::Pat) {
|
||||||
let entries = preorder(pat.syntax())
|
let entries = pat.syntax().descendants()
|
||||||
.filter_map(ast::BindPat::cast)
|
.filter_map(ast::BindPat::cast)
|
||||||
.filter_map(ScopeEntry::new);
|
.filter_map(ScopeEntry::new);
|
||||||
self.scopes[scope].entries.extend(entries);
|
self.scopes[scope].entries.extend(entries);
|
||||||
|
@ -66,7 +66,7 @@ impl FnScopes {
|
||||||
self.scope_for.insert(node.owned(), scope);
|
self.scope_for.insert(node.owned(), scope);
|
||||||
}
|
}
|
||||||
fn scope_for(&self, node: SyntaxNodeRef) -> Option<ScopeId> {
|
fn scope_for(&self, node: SyntaxNodeRef) -> Option<ScopeId> {
|
||||||
ancestors(node)
|
node.ancestors()
|
||||||
.filter_map(|it| self.scope_for.get(&it.owned()).map(|&scope| scope))
|
.filter_map(|it| self.scope_for.get(&it.owned()).map(|&scope| scope))
|
||||||
.next()
|
.next()
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@ use ra_syntax::{
|
||||||
ast::{self, NameOwner},
|
ast::{self, NameOwner},
|
||||||
algo::{
|
algo::{
|
||||||
visit::{visitor, Visitor},
|
visit::{visitor, Visitor},
|
||||||
walk::{walk, WalkEvent, preorder},
|
walk::{walk, WalkEvent},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use TextRange;
|
use TextRange;
|
||||||
|
@ -25,7 +25,7 @@ pub struct FileSymbol {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn file_symbols(file: &File) -> Vec<FileSymbol> {
|
pub fn file_symbols(file: &File) -> Vec<FileSymbol> {
|
||||||
preorder(file.syntax())
|
file.syntax().descendants()
|
||||||
.filter_map(to_symbol)
|
.filter_map(to_symbol)
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@ use ra_syntax::{
|
||||||
TextUnit, TextRange, SyntaxNodeRef, File, AstNode, SyntaxKind,
|
TextUnit, TextRange, SyntaxNodeRef, File, AstNode, SyntaxKind,
|
||||||
ast,
|
ast,
|
||||||
algo::{
|
algo::{
|
||||||
walk::preorder,
|
|
||||||
find_covering_node,
|
find_covering_node,
|
||||||
},
|
},
|
||||||
text_utils::{intersect, contains_offset_nonstrict},
|
text_utils::{intersect, contains_offset_nonstrict},
|
||||||
|
@ -33,7 +32,7 @@ pub fn join_lines(file: &File, range: TextRange) -> LocalEdit {
|
||||||
};
|
};
|
||||||
let node = find_covering_node(file.syntax(), range);
|
let node = find_covering_node(file.syntax(), range);
|
||||||
let mut edit = EditBuilder::new();
|
let mut edit = EditBuilder::new();
|
||||||
for node in preorder(node) {
|
for node in node.descendants() {
|
||||||
let text = match node.leaf_text() {
|
let text = match node.leaf_text() {
|
||||||
Some(text) => text,
|
Some(text) => text,
|
||||||
None => continue,
|
None => continue,
|
||||||
|
|
|
@ -94,10 +94,6 @@ pub fn find_covering_node(root: SyntaxNodeRef, range: TextRange) -> SyntaxNodeRe
|
||||||
common_ancestor(left, right)
|
common_ancestor(left, right)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ancestors<'a>(node: SyntaxNodeRef<'a>) -> impl Iterator<Item=SyntaxNodeRef<'a>> {
|
|
||||||
generate(Some(node), |&node| node.parent())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum Direction {
|
pub enum Direction {
|
||||||
Forward,
|
Forward,
|
||||||
|
@ -115,8 +111,8 @@ pub fn siblings<'a>(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn common_ancestor<'a>(n1: SyntaxNodeRef<'a>, n2: SyntaxNodeRef<'a>) -> SyntaxNodeRef<'a> {
|
fn common_ancestor<'a>(n1: SyntaxNodeRef<'a>, n2: SyntaxNodeRef<'a>) -> SyntaxNodeRef<'a> {
|
||||||
for p in ancestors(n1) {
|
for p in n1.ancestors() {
|
||||||
if ancestors(n2).any(|a| a == p) {
|
if n2.ancestors().any(|a| a == p) {
|
||||||
return p;
|
return p;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,12 +3,6 @@ use {
|
||||||
algo::generate,
|
algo::generate,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn preorder<'a>(root: SyntaxNodeRef<'a>) -> impl Iterator<Item = SyntaxNodeRef<'a>> {
|
|
||||||
walk(root).filter_map(|event| match event {
|
|
||||||
WalkEvent::Enter(node) => Some(node),
|
|
||||||
WalkEvent::Exit(_) => None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub enum WalkEvent<'a> {
|
pub enum WalkEvent<'a> {
|
||||||
|
|
|
@ -112,7 +112,7 @@ fn find_reparsable_node<'node>(
|
||||||
range: TextRange,
|
range: TextRange,
|
||||||
) -> Option<(SyntaxNodeRef<'node>, fn(&mut Parser))> {
|
) -> Option<(SyntaxNodeRef<'node>, fn(&mut Parser))> {
|
||||||
let node = algo::find_covering_node(node, range);
|
let node = algo::find_covering_node(node, range);
|
||||||
return algo::ancestors(node)
|
return node.ancestors()
|
||||||
.filter_map(|node| reparser(node).map(|r| (node, r)))
|
.filter_map(|node| reparser(node).map(|r| (node, r)))
|
||||||
.next();
|
.next();
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
use {
|
use {
|
||||||
algo::walk::{preorder, walk, WalkEvent},
|
algo::walk::{walk, WalkEvent},
|
||||||
SyntaxKind, File, SyntaxNodeRef
|
SyntaxKind, File, SyntaxNodeRef
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -56,7 +56,7 @@ pub fn check_fuzz_invariants(text: &str) {
|
||||||
|
|
||||||
pub(crate) fn validate_block_structure(root: SyntaxNodeRef) {
|
pub(crate) fn validate_block_structure(root: SyntaxNodeRef) {
|
||||||
let mut stack = Vec::new();
|
let mut stack = Vec::new();
|
||||||
for node in preorder(root) {
|
for node in root.descendants() {
|
||||||
match node.kind() {
|
match node.kind() {
|
||||||
SyntaxKind::L_CURLY => {
|
SyntaxKind::L_CURLY => {
|
||||||
stack.push(node)
|
stack.push(node)
|
||||||
|
|
|
@ -62,6 +62,15 @@ impl<'a> SyntaxNodeRef<'a> {
|
||||||
pub fn leaf_text(self) -> Option<&'a SmolStr> {
|
pub fn leaf_text(self) -> Option<&'a SmolStr> {
|
||||||
self.0.leaf_text()
|
self.0.leaf_text()
|
||||||
}
|
}
|
||||||
|
pub fn ancestors(self) -> impl Iterator<Item=SyntaxNodeRef<'a>> {
|
||||||
|
::algo::generate(Some(self), |&node| node.parent())
|
||||||
|
}
|
||||||
|
pub fn descendants(self) -> impl Iterator<Item=SyntaxNodeRef<'a>> {
|
||||||
|
::algo::walk::walk(self).filter_map(|event| match event {
|
||||||
|
::algo::walk::WalkEvent::Enter(node) => Some(node),
|
||||||
|
::algo::walk::WalkEvent::Exit(_) => None,
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<R: TreeRoot<RaTypes>> SyntaxNode<R> {
|
impl<R: TreeRoot<RaTypes>> SyntaxNode<R> {
|
||||||
|
|
|
@ -4,7 +4,6 @@ use std::{
|
||||||
|
|
||||||
use {
|
use {
|
||||||
SyntaxNodeRef, TextRange, TextUnit,
|
SyntaxNodeRef, TextRange, TextUnit,
|
||||||
algo::walk::preorder,
|
|
||||||
text_utils::{intersect, contains_offset_nonstrict},
|
text_utils::{intersect, contains_offset_nonstrict},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -23,7 +22,8 @@ impl<'a> SyntaxText<'a> {
|
||||||
}
|
}
|
||||||
pub fn chunks(&self) -> impl Iterator<Item=&'a str> {
|
pub fn chunks(&self) -> impl Iterator<Item=&'a str> {
|
||||||
let range = self.range;
|
let range = self.range;
|
||||||
preorder(self.node)
|
self.node
|
||||||
|
.descendants()
|
||||||
.filter_map(move |node| {
|
.filter_map(move |node| {
|
||||||
let text = node.leaf_text()?;
|
let text = node.leaf_text()?;
|
||||||
let range = intersect(range, node.range())?;
|
let range = intersect(range, node.range())?;
|
||||||
|
|
Loading…
Reference in a new issue