mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 13:03:31 +00:00
Start simplifying editing API
This commit is contained in:
parent
1a4b424005
commit
d847d53e36
13 changed files with 114 additions and 87 deletions
12
Cargo.lock
generated
12
Cargo.lock
generated
|
@ -761,7 +761,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.0.2"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
|
@ -897,7 +897,7 @@ dependencies = [
|
|||
"format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"once_cell 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_db 0.1.0",
|
||||
"ra_fmt 0.1.0",
|
||||
"ra_hir 0.1.0",
|
||||
|
@ -968,7 +968,7 @@ dependencies = [
|
|||
"insta 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"once_cell 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_arena 0.1.0",
|
||||
"ra_db 0.1.0",
|
||||
|
@ -1065,7 +1065,7 @@ dependencies = [
|
|||
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"jemalloc-ctl 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"jemallocator 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"once_cell 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1085,7 +1085,9 @@ dependencies = [
|
|||
name = "ra_syntax"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_parser 0.1.0",
|
||||
"ra_text_edit 0.1.0",
|
||||
"rowan 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -1854,7 +1856,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
"checksum num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "6ba9a427cfca2be13aa6f6403b0b7e7368fe982bfa16fccc450ce74c46cd9b32"
|
||||
"checksum num_cpus 1.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bcef43580c035376c0705c42792c294b66974abbfd2789b511784023f71f3273"
|
||||
"checksum number_prefix 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "dbf9993e59c894e3c08aa1c2712914e9e6bf1fcbfc6bef283e2183df345a4fee"
|
||||
"checksum once_cell 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bd38c1bb51148ca239ec38ef1bb4f7570d432861f03e91774d53b01c2ba2132f"
|
||||
"checksum once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "891f486f630e5c5a4916c7e16c4b24a53e78c860b646e9f8e005e4f16847bfed"
|
||||
"checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063"
|
||||
"checksum parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f842b1982eb6c2fe34036a4fbfb06dd185a3f5c8edfaacdf7d1ea10b07de6252"
|
||||
"checksum parking_lot_core 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b876b1b9e7ac6e1a74a6da34d25c42e17e8862aa409cbbbdcfc8d86c6f3bc62b"
|
||||
|
|
|
@ -100,12 +100,11 @@ fn strip_docstring(item: ast::ImplItem) -> ast::ImplItem {
|
|||
}
|
||||
|
||||
fn add_body(fn_def: ast::FnDef) -> ast::FnDef {
|
||||
let mut ast_editor = AstEditor::new(fn_def.clone());
|
||||
if fn_def.body().is_none() {
|
||||
let body = make::block_from_expr(make::expr_unimplemented());
|
||||
ast_editor.set_body(&body);
|
||||
fn_def.with_body(make::block_from_expr(make::expr_unimplemented()))
|
||||
} else {
|
||||
fn_def
|
||||
}
|
||||
ast_editor.ast().to_owned()
|
||||
}
|
||||
|
||||
/// Given an `ast::ImplBlock`, resolves the target trait (the one being
|
||||
|
@ -332,5 +331,4 @@ impl Foo for S {
|
|||
}",
|
||||
)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -448,7 +448,6 @@ fn make_assist_add_in_tree_list(
|
|||
fmt_segments_raw(target, &mut buf);
|
||||
edit.insert(offset, buf);
|
||||
} else {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ use rustc_hash::FxHashMap;
|
|||
use ra_fmt::leading_indent;
|
||||
use ra_syntax::{
|
||||
algo,
|
||||
ast::{self, TypeBoundsOwner},
|
||||
ast::{self, make::tokens, TypeBoundsOwner},
|
||||
AstNode, Direction, InsertPosition, SyntaxElement,
|
||||
SyntaxKind::*,
|
||||
T,
|
||||
|
@ -229,26 +229,6 @@ impl AstEditor<ast::ImplItem> {
|
|||
}
|
||||
}
|
||||
|
||||
impl AstEditor<ast::FnDef> {
|
||||
pub fn set_body(&mut self, body: &ast::Block) {
|
||||
let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new();
|
||||
let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.ast().body() {
|
||||
old_body.syntax().clone().into()
|
||||
} else if let Some(semi) = self.ast().semicolon_token() {
|
||||
to_insert.push(tokens::single_space().into());
|
||||
semi.into()
|
||||
} else {
|
||||
to_insert.push(tokens::single_space().into());
|
||||
to_insert.push(body.syntax().clone().into());
|
||||
self.ast = self.insert_children(InsertPosition::Last, to_insert.into_iter());
|
||||
return;
|
||||
};
|
||||
to_insert.push(body.syntax().clone().into());
|
||||
let replace_range = RangeInclusive::new(old_body_or_semi.clone(), old_body_or_semi);
|
||||
self.ast = self.replace_children(replace_range, to_insert.into_iter())
|
||||
}
|
||||
}
|
||||
|
||||
impl AstEditor<ast::TypeParam> {
|
||||
pub fn remove_bounds(&mut self) -> &mut Self {
|
||||
let colon = match self.ast.colon_token() {
|
||||
|
@ -263,53 +243,3 @@ impl AstEditor<ast::TypeParam> {
|
|||
self
|
||||
}
|
||||
}
|
||||
|
||||
mod tokens {
|
||||
use once_cell::sync::Lazy;
|
||||
use ra_syntax::{AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken, T};
|
||||
|
||||
static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| SourceFile::parse(",\n; ;"));
|
||||
|
||||
pub(crate) fn comma() -> SyntaxToken {
|
||||
SOURCE_FILE
|
||||
.tree()
|
||||
.syntax()
|
||||
.descendants_with_tokens()
|
||||
.filter_map(|it| it.into_token())
|
||||
.find(|it| it.kind() == T![,])
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub(crate) fn single_space() -> SyntaxToken {
|
||||
SOURCE_FILE
|
||||
.tree()
|
||||
.syntax()
|
||||
.descendants_with_tokens()
|
||||
.filter_map(|it| it.into_token())
|
||||
.find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ")
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) fn single_newline() -> SyntaxToken {
|
||||
SOURCE_FILE
|
||||
.tree()
|
||||
.syntax()
|
||||
.descendants_with_tokens()
|
||||
.filter_map(|it| it.into_token())
|
||||
.find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n")
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub(crate) struct WsBuilder(SourceFile);
|
||||
|
||||
impl WsBuilder {
|
||||
pub(crate) fn new(text: &str) -> WsBuilder {
|
||||
WsBuilder(SourceFile::parse(text).ok().unwrap())
|
||||
}
|
||||
pub(crate) fn ws(&self) -> SyntaxToken {
|
||||
self.0.syntax().first_child_or_token().unwrap().into_token().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -340,5 +340,4 @@ mod tests {
|
|||
assert_eq!(assists.next().expect("expected assist").0.label, "introduce variable");
|
||||
assert_eq!(assists.next().expect("expected assist").0.label, "replace with match");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -278,5 +278,4 @@ const C: char = \"メ メ\";
|
|||
|
||||
assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextUnit::from_usize(15));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -229,5 +229,4 @@ mod tests {
|
|||
let runnables = analysis.runnables(pos.file_id).unwrap();
|
||||
assert!(runnables.is_empty())
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -70,5 +70,4 @@ let a = 1;
|
|||
"```rust\nfn main(){}\n```\nSome comment.\n```rust\nlet a = 1;\n```"
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -135,7 +135,6 @@ pub(crate) mod fragments {
|
|||
|
||||
m.complete(p, MACRO_STMTS);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
pub(crate) fn reparser(
|
||||
|
|
|
@ -12,6 +12,8 @@ itertools = "0.8.0"
|
|||
rowan = "0.6.1"
|
||||
rustc_lexer = "0.1.0"
|
||||
rustc-hash = "1.0.1"
|
||||
arrayvec = "0.4.10"
|
||||
once_cell = "1.2.0"
|
||||
|
||||
# ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here
|
||||
# to reduce number of compilations
|
||||
|
|
|
@ -5,6 +5,7 @@ mod traits;
|
|||
mod tokens;
|
||||
mod extensions;
|
||||
mod expr_extensions;
|
||||
mod edit;
|
||||
pub mod make;
|
||||
|
||||
use std::marker::PhantomData;
|
||||
|
|
52
crates/ra_syntax/src/ast/edit.rs
Normal file
52
crates/ra_syntax/src/ast/edit.rs
Normal file
|
@ -0,0 +1,52 @@
|
|||
//! This module contains functions for editing syntax trees. As the trees are
|
||||
//! immutable, all function here return a fresh copy of the tree, instead of
|
||||
//! doing an in-place modification.
|
||||
|
||||
use arrayvec::ArrayVec;
|
||||
use std::ops::RangeInclusive;
|
||||
|
||||
use crate::{
|
||||
algo,
|
||||
ast::{self, make, AstNode},
|
||||
InsertPosition, SyntaxElement,
|
||||
};
|
||||
|
||||
impl ast::FnDef {
|
||||
#[must_use]
|
||||
pub fn with_body(&self, body: ast::Block) -> ast::FnDef {
|
||||
let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new();
|
||||
let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.body() {
|
||||
old_body.syntax().clone().into()
|
||||
} else if let Some(semi) = self.semicolon_token() {
|
||||
to_insert.push(make::tokens::single_space().into());
|
||||
semi.into()
|
||||
} else {
|
||||
to_insert.push(make::tokens::single_space().into());
|
||||
to_insert.push(body.syntax().clone().into());
|
||||
return insert_children(self, InsertPosition::Last, to_insert.into_iter());
|
||||
};
|
||||
to_insert.push(body.syntax().clone().into());
|
||||
let replace_range = RangeInclusive::new(old_body_or_semi.clone(), old_body_or_semi);
|
||||
replace_children(self, replace_range, to_insert.into_iter())
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn insert_children<N: AstNode>(
|
||||
parent: &N,
|
||||
position: InsertPosition<SyntaxElement>,
|
||||
mut to_insert: impl Iterator<Item = SyntaxElement>,
|
||||
) -> N {
|
||||
let new_syntax = algo::insert_children(parent.syntax(), position, &mut to_insert);
|
||||
N::cast(new_syntax).unwrap()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn replace_children<N: AstNode>(
|
||||
parent: &N,
|
||||
to_replace: RangeInclusive<SyntaxElement>,
|
||||
mut to_insert: impl Iterator<Item = SyntaxElement>,
|
||||
) -> N {
|
||||
let new_syntax = algo::replace_children(parent.syntax(), to_replace, &mut to_insert);
|
||||
N::cast(new_syntax).unwrap()
|
||||
}
|
|
@ -133,3 +133,51 @@ fn ast_from_text<N: AstNode>(text: &str) -> N {
|
|||
let res = parse.tree().syntax().descendants().find_map(N::cast).unwrap();
|
||||
res
|
||||
}
|
||||
|
||||
pub mod tokens {
|
||||
use crate::{AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken, T};
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| SourceFile::parse(",\n; ;"));
|
||||
|
||||
pub fn comma() -> SyntaxToken {
|
||||
SOURCE_FILE
|
||||
.tree()
|
||||
.syntax()
|
||||
.descendants_with_tokens()
|
||||
.filter_map(|it| it.into_token())
|
||||
.find(|it| it.kind() == T![,])
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub fn single_space() -> SyntaxToken {
|
||||
SOURCE_FILE
|
||||
.tree()
|
||||
.syntax()
|
||||
.descendants_with_tokens()
|
||||
.filter_map(|it| it.into_token())
|
||||
.find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ")
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub fn single_newline() -> SyntaxToken {
|
||||
SOURCE_FILE
|
||||
.tree()
|
||||
.syntax()
|
||||
.descendants_with_tokens()
|
||||
.filter_map(|it| it.into_token())
|
||||
.find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n")
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub struct WsBuilder(SourceFile);
|
||||
|
||||
impl WsBuilder {
|
||||
pub fn new(text: &str) -> WsBuilder {
|
||||
WsBuilder(SourceFile::parse(text).ok().unwrap())
|
||||
}
|
||||
pub fn ws(&self) -> SyntaxToken {
|
||||
self.0.syntax().first_child_or_token().unwrap().into_token().unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue