4494: Support snippet text edit r=matklad a=matklad



bors r+
🤖

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2020-05-19 18:29:46 +00:00 committed by GitHub
commit 1bc1f28bc5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
30 changed files with 542 additions and 251 deletions

View file

@ -0,0 +1,27 @@
//! Settings for tweaking assists.
//!
//! The fun thing here is `SnippetCap` -- this type can only be created in this
//! module, and we use to statically check that we only produce snippet
//! assists if we are allowed to.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct AssistConfig {
pub snippet_cap: Option<SnippetCap>,
}
impl AssistConfig {
pub fn allow_snippets(&mut self, yes: bool) {
self.snippet_cap = if yes { Some(SnippetCap { _private: () }) } else { None }
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct SnippetCap {
_private: (),
}
impl Default for AssistConfig {
fn default() -> Self {
AssistConfig { snippet_cap: Some(SnippetCap { _private: () }) }
}
}

View file

@ -15,7 +15,10 @@ use ra_syntax::{
}; };
use ra_text_edit::TextEditBuilder; use ra_text_edit::TextEditBuilder;
use crate::{Assist, AssistId, GroupLabel, ResolvedAssist}; use crate::{
assist_config::{AssistConfig, SnippetCap},
Assist, AssistId, GroupLabel, ResolvedAssist,
};
/// `AssistContext` allows to apply an assist or check if it could be applied. /// `AssistContext` allows to apply an assist or check if it could be applied.
/// ///
@ -48,6 +51,7 @@ use crate::{Assist, AssistId, GroupLabel, ResolvedAssist};
/// moment, because the LSP API is pretty awkward in this place, and it's much /// moment, because the LSP API is pretty awkward in this place, and it's much
/// easier to just compute the edit eagerly :-) /// easier to just compute the edit eagerly :-)
pub(crate) struct AssistContext<'a> { pub(crate) struct AssistContext<'a> {
pub(crate) config: &'a AssistConfig,
pub(crate) sema: Semantics<'a, RootDatabase>, pub(crate) sema: Semantics<'a, RootDatabase>,
pub(crate) db: &'a RootDatabase, pub(crate) db: &'a RootDatabase,
pub(crate) frange: FileRange, pub(crate) frange: FileRange,
@ -55,10 +59,14 @@ pub(crate) struct AssistContext<'a> {
} }
impl<'a> AssistContext<'a> { impl<'a> AssistContext<'a> {
pub fn new(sema: Semantics<'a, RootDatabase>, frange: FileRange) -> AssistContext<'a> { pub(crate) fn new(
sema: Semantics<'a, RootDatabase>,
config: &'a AssistConfig,
frange: FileRange,
) -> AssistContext<'a> {
let source_file = sema.parse(frange.file_id); let source_file = sema.parse(frange.file_id);
let db = sema.db; let db = sema.db;
AssistContext { sema, db, frange, source_file } AssistContext { config, sema, db, frange, source_file }
} }
// NB, this ignores active selection. // NB, this ignores active selection.
@ -165,11 +173,17 @@ pub(crate) struct AssistBuilder {
edit: TextEditBuilder, edit: TextEditBuilder,
cursor_position: Option<TextSize>, cursor_position: Option<TextSize>,
file: FileId, file: FileId,
is_snippet: bool,
} }
impl AssistBuilder { impl AssistBuilder {
pub(crate) fn new(file: FileId) -> AssistBuilder { pub(crate) fn new(file: FileId) -> AssistBuilder {
AssistBuilder { edit: TextEditBuilder::default(), cursor_position: None, file } AssistBuilder {
edit: TextEditBuilder::default(),
cursor_position: None,
file,
is_snippet: false,
}
} }
/// Remove specified `range` of text. /// Remove specified `range` of text.
@ -180,6 +194,16 @@ impl AssistBuilder {
pub(crate) fn insert(&mut self, offset: TextSize, text: impl Into<String>) { pub(crate) fn insert(&mut self, offset: TextSize, text: impl Into<String>) {
self.edit.insert(offset, text.into()) self.edit.insert(offset, text.into())
} }
/// Append specified `text` at the given `offset`
pub(crate) fn insert_snippet(
&mut self,
_cap: SnippetCap,
offset: TextSize,
text: impl Into<String>,
) {
self.is_snippet = true;
self.edit.insert(offset, text.into())
}
/// Replaces specified `range` of text with a given string. /// Replaces specified `range` of text with a given string.
pub(crate) fn replace(&mut self, range: TextRange, replace_with: impl Into<String>) { pub(crate) fn replace(&mut self, range: TextRange, replace_with: impl Into<String>) {
self.edit.replace(range, replace_with.into()) self.edit.replace(range, replace_with.into())
@ -227,7 +251,12 @@ impl AssistBuilder {
if edit.is_empty() && self.cursor_position.is_none() { if edit.is_empty() && self.cursor_position.is_none() {
panic!("Only call `add_assist` if the assist can be applied") panic!("Only call `add_assist` if the assist can be applied")
} }
SingleFileChange { label: change_label, edit, cursor_position: self.cursor_position } let mut res =
.into_source_change(self.file) SingleFileChange { label: change_label, edit, cursor_position: self.cursor_position }
.into_source_change(self.file);
if self.is_snippet {
res.is_snippet = true;
}
res
} }
} }

View file

@ -25,7 +25,7 @@ use crate::{
// struct S; // struct S;
// //
// impl Debug for S { // impl Debug for S {
// // $0
// } // }
// ``` // ```
pub(crate) fn add_custom_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { pub(crate) fn add_custom_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
@ -52,7 +52,7 @@ pub(crate) fn add_custom_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<
format!("Add custom impl `{}` for `{}`", trait_token.text().as_str(), annotated_name); format!("Add custom impl `{}` for `{}`", trait_token.text().as_str(), annotated_name);
let target = attr.syntax().text_range(); let target = attr.syntax().text_range();
acc.add(AssistId("add_custom_impl"), label, target, |edit| { acc.add(AssistId("add_custom_impl"), label, target, |builder| {
let new_attr_input = input let new_attr_input = input
.syntax() .syntax()
.descendants_with_tokens() .descendants_with_tokens()
@ -63,20 +63,11 @@ pub(crate) fn add_custom_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<
let has_more_derives = !new_attr_input.is_empty(); let has_more_derives = !new_attr_input.is_empty();
let new_attr_input = new_attr_input.iter().sep_by(", ").surround_with("(", ")").to_string(); let new_attr_input = new_attr_input.iter().sep_by(", ").surround_with("(", ")").to_string();
let mut buf = String::new(); if has_more_derives {
buf.push_str("\n\nimpl "); builder.replace(input.syntax().text_range(), new_attr_input);
buf.push_str(trait_token.text().as_str());
buf.push_str(" for ");
buf.push_str(annotated_name.as_str());
buf.push_str(" {\n");
let cursor_delta = if has_more_derives {
let delta = input.syntax().text_range().len() - TextSize::of(&new_attr_input);
edit.replace(input.syntax().text_range(), new_attr_input);
delta
} else { } else {
let attr_range = attr.syntax().text_range(); let attr_range = attr.syntax().text_range();
edit.delete(attr_range); builder.delete(attr_range);
let line_break_range = attr let line_break_range = attr
.syntax() .syntax()
@ -84,14 +75,24 @@ pub(crate) fn add_custom_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<
.filter(|t| t.kind() == WHITESPACE) .filter(|t| t.kind() == WHITESPACE)
.map(|t| t.text_range()) .map(|t| t.text_range())
.unwrap_or_else(|| TextRange::new(TextSize::from(0), TextSize::from(0))); .unwrap_or_else(|| TextRange::new(TextSize::from(0), TextSize::from(0)));
edit.delete(line_break_range); builder.delete(line_break_range);
}
attr_range.len() + line_break_range.len() match ctx.config.snippet_cap {
}; Some(cap) => {
builder.insert_snippet(
edit.set_cursor(start_offset + TextSize::of(&buf) - cursor_delta); cap,
buf.push_str("\n}"); start_offset,
edit.insert(start_offset, buf); format!("\n\nimpl {} for {} {{\n $0\n}}", trait_token, annotated_name),
);
}
None => {
builder.insert(
start_offset,
format!("\n\nimpl {} for {} {{\n\n}}", trait_token, annotated_name),
);
}
}
}) })
} }
@ -117,7 +118,7 @@ struct Foo {
} }
impl Debug for Foo { impl Debug for Foo {
<|> $0
} }
", ",
) )
@ -139,7 +140,7 @@ pub struct Foo {
} }
impl Debug for Foo { impl Debug for Foo {
<|> $0
} }
", ",
) )
@ -158,7 +159,7 @@ struct Foo {}
struct Foo {} struct Foo {}
impl Debug for Foo { impl Debug for Foo {
<|> $0
} }
", ",
) )

View file

@ -18,31 +18,37 @@ use crate::{AssistContext, AssistId, Assists};
// ``` // ```
// -> // ->
// ``` // ```
// #[derive()] // #[derive($0)]
// struct Point { // struct Point {
// x: u32, // x: u32,
// y: u32, // y: u32,
// } // }
// ``` // ```
pub(crate) fn add_derive(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { pub(crate) fn add_derive(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let cap = ctx.config.snippet_cap?;
let nominal = ctx.find_node_at_offset::<ast::NominalDef>()?; let nominal = ctx.find_node_at_offset::<ast::NominalDef>()?;
let node_start = derive_insertion_offset(&nominal)?; let node_start = derive_insertion_offset(&nominal)?;
let target = nominal.syntax().text_range(); let target = nominal.syntax().text_range();
acc.add(AssistId("add_derive"), "Add `#[derive]`", target, |edit| { acc.add(AssistId("add_derive"), "Add `#[derive]`", target, |builder| {
let derive_attr = nominal let derive_attr = nominal
.attrs() .attrs()
.filter_map(|x| x.as_simple_call()) .filter_map(|x| x.as_simple_call())
.filter(|(name, _arg)| name == "derive") .filter(|(name, _arg)| name == "derive")
.map(|(_name, arg)| arg) .map(|(_name, arg)| arg)
.next(); .next();
let offset = match derive_attr { match derive_attr {
None => { None => {
edit.insert(node_start, "#[derive()]\n"); builder.insert_snippet(cap, node_start, "#[derive($0)]\n");
node_start + TextSize::of("#[derive(") }
Some(tt) => {
// Just move the cursor.
builder.insert_snippet(
cap,
tt.syntax().text_range().end() - TextSize::of(')'),
"$0",
)
} }
Some(tt) => tt.syntax().text_range().end() - TextSize::of(')'),
}; };
edit.set_cursor(offset)
}) })
} }
@ -66,12 +72,12 @@ mod tests {
check_assist( check_assist(
add_derive, add_derive,
"struct Foo { a: i32, <|>}", "struct Foo { a: i32, <|>}",
"#[derive(<|>)]\nstruct Foo { a: i32, }", "#[derive($0)]\nstruct Foo { a: i32, }",
); );
check_assist( check_assist(
add_derive, add_derive,
"struct Foo { <|> a: i32, }", "struct Foo { <|> a: i32, }",
"#[derive(<|>)]\nstruct Foo { a: i32, }", "#[derive($0)]\nstruct Foo { a: i32, }",
); );
} }
@ -80,7 +86,7 @@ mod tests {
check_assist( check_assist(
add_derive, add_derive,
"#[derive(Clone)]\nstruct Foo { a: i32<|>, }", "#[derive(Clone)]\nstruct Foo { a: i32<|>, }",
"#[derive(Clone<|>)]\nstruct Foo { a: i32, }", "#[derive(Clone$0)]\nstruct Foo { a: i32, }",
); );
} }
@ -96,7 +102,7 @@ struct Foo { a: i32<|>, }
" "
/// `Foo` is a pretty important struct. /// `Foo` is a pretty important struct.
/// It does stuff. /// It does stuff.
#[derive(<|>)] #[derive($0)]
struct Foo { a: i32, } struct Foo { a: i32, }
", ",
); );

View file

@ -1,7 +1,4 @@
use ra_syntax::{ use ra_syntax::ast::{self, AstNode, NameOwner, TypeParamsOwner};
ast::{self, AstNode, NameOwner, TypeParamsOwner},
TextSize,
};
use stdx::{format_to, SepBy}; use stdx::{format_to, SepBy};
use crate::{AssistContext, AssistId, Assists}; use crate::{AssistContext, AssistId, Assists};
@ -12,17 +9,17 @@ use crate::{AssistContext, AssistId, Assists};
// //
// ``` // ```
// struct Ctx<T: Clone> { // struct Ctx<T: Clone> {
// data: T,<|> // data: T,<|>
// } // }
// ``` // ```
// -> // ->
// ``` // ```
// struct Ctx<T: Clone> { // struct Ctx<T: Clone> {
// data: T, // data: T,
// } // }
// //
// impl<T: Clone> Ctx<T> { // impl<T: Clone> Ctx<T> {
// // $0
// } // }
// ``` // ```
pub(crate) fn add_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { pub(crate) fn add_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
@ -50,30 +47,37 @@ pub(crate) fn add_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let generic_params = lifetime_params.chain(type_params).sep_by(", "); let generic_params = lifetime_params.chain(type_params).sep_by(", ");
format_to!(buf, "<{}>", generic_params) format_to!(buf, "<{}>", generic_params)
} }
buf.push_str(" {\n"); match ctx.config.snippet_cap {
edit.set_cursor(start_offset + TextSize::of(&buf)); Some(cap) => {
buf.push_str("\n}"); buf.push_str(" {\n $0\n}");
edit.insert(start_offset, buf); edit.insert_snippet(cap, start_offset, buf);
}
None => {
buf.push_str(" {\n}");
edit.insert(start_offset, buf);
}
}
}) })
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
use crate::tests::{check_assist, check_assist_target}; use crate::tests::{check_assist, check_assist_target};
use super::*;
#[test] #[test]
fn test_add_impl() { fn test_add_impl() {
check_assist(add_impl, "struct Foo {<|>}\n", "struct Foo {}\n\nimpl Foo {\n<|>\n}\n"); check_assist(add_impl, "struct Foo {<|>}\n", "struct Foo {}\n\nimpl Foo {\n $0\n}\n");
check_assist( check_assist(
add_impl, add_impl,
"struct Foo<T: Clone> {<|>}", "struct Foo<T: Clone> {<|>}",
"struct Foo<T: Clone> {}\n\nimpl<T: Clone> Foo<T> {\n<|>\n}", "struct Foo<T: Clone> {}\n\nimpl<T: Clone> Foo<T> {\n $0\n}",
); );
check_assist( check_assist(
add_impl, add_impl,
"struct Foo<'a, T: Foo<'a>> {<|>}", "struct Foo<'a, T: Foo<'a>> {<|>}",
"struct Foo<'a, T: Foo<'a>> {}\n\nimpl<'a, T: Foo<'a>> Foo<'a, T> {\n<|>\n}", "struct Foo<'a, T: Foo<'a>> {}\n\nimpl<'a, T: Foo<'a>> Foo<'a, T> {\n $0\n}",
); );
} }

View file

@ -10,6 +10,7 @@ macro_rules! eprintln {
($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
} }
mod assist_config;
mod assist_context; mod assist_context;
mod marks; mod marks;
#[cfg(test)] #[cfg(test)]
@ -24,6 +25,8 @@ use ra_syntax::TextRange;
pub(crate) use crate::assist_context::{AssistContext, Assists}; pub(crate) use crate::assist_context::{AssistContext, Assists};
pub use assist_config::AssistConfig;
/// Unique identifier of the assist, should not be shown to the user /// Unique identifier of the assist, should not be shown to the user
/// directly. /// directly.
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
@ -54,9 +57,9 @@ impl Assist {
/// ///
/// Assists are returned in the "unresolved" state, that is only labels are /// Assists are returned in the "unresolved" state, that is only labels are
/// returned, without actual edits. /// returned, without actual edits.
pub fn unresolved(db: &RootDatabase, range: FileRange) -> Vec<Assist> { pub fn unresolved(db: &RootDatabase, config: &AssistConfig, range: FileRange) -> Vec<Assist> {
let sema = Semantics::new(db); let sema = Semantics::new(db);
let ctx = AssistContext::new(sema, range); let ctx = AssistContext::new(sema, config, range);
let mut acc = Assists::new_unresolved(&ctx); let mut acc = Assists::new_unresolved(&ctx);
handlers::all().iter().for_each(|handler| { handlers::all().iter().for_each(|handler| {
handler(&mut acc, &ctx); handler(&mut acc, &ctx);
@ -68,9 +71,13 @@ impl Assist {
/// ///
/// Assists are returned in the "resolved" state, that is with edit fully /// Assists are returned in the "resolved" state, that is with edit fully
/// computed. /// computed.
pub fn resolved(db: &RootDatabase, range: FileRange) -> Vec<ResolvedAssist> { pub fn resolved(
db: &RootDatabase,
config: &AssistConfig,
range: FileRange,
) -> Vec<ResolvedAssist> {
let sema = Semantics::new(db); let sema = Semantics::new(db);
let ctx = AssistContext::new(sema, range); let ctx = AssistContext::new(sema, config, range);
let mut acc = Assists::new_resolved(&ctx); let mut acc = Assists::new_resolved(&ctx);
handlers::all().iter().for_each(|handler| { handlers::all().iter().for_each(|handler| {
handler(&mut acc, &ctx); handler(&mut acc, &ctx);

View file

@ -11,7 +11,7 @@ use test_utils::{
RangeOrOffset, RangeOrOffset,
}; };
use crate::{handlers::Handler, Assist, AssistContext, Assists}; use crate::{handlers::Handler, Assist, AssistConfig, AssistContext, Assists};
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) { pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {
let (mut db, file_id) = RootDatabase::with_single_file(text); let (mut db, file_id) = RootDatabase::with_single_file(text);
@ -41,14 +41,14 @@ fn check_doc_test(assist_id: &str, before: &str, after: &str) {
let (db, file_id) = crate::tests::with_single_file(&before); let (db, file_id) = crate::tests::with_single_file(&before);
let frange = FileRange { file_id, range: selection.into() }; let frange = FileRange { file_id, range: selection.into() };
let mut assist = Assist::resolved(&db, frange) let mut assist = Assist::resolved(&db, &AssistConfig::default(), frange)
.into_iter() .into_iter()
.find(|assist| assist.assist.id.0 == assist_id) .find(|assist| assist.assist.id.0 == assist_id)
.unwrap_or_else(|| { .unwrap_or_else(|| {
panic!( panic!(
"\n\nAssist is not applicable: {}\nAvailable assists: {}", "\n\nAssist is not applicable: {}\nAvailable assists: {}",
assist_id, assist_id,
Assist::resolved(&db, frange) Assist::resolved(&db, &AssistConfig::default(), frange)
.into_iter() .into_iter()
.map(|assist| assist.assist.id.0) .map(|assist| assist.assist.id.0)
.collect::<Vec<_>>() .collect::<Vec<_>>()
@ -90,7 +90,8 @@ fn check(handler: Handler, before: &str, expected: ExpectedResult) {
let frange = FileRange { file_id: file_with_caret_id, range: range_or_offset.into() }; let frange = FileRange { file_id: file_with_caret_id, range: range_or_offset.into() };
let sema = Semantics::new(&db); let sema = Semantics::new(&db);
let ctx = AssistContext::new(sema, frange); let config = AssistConfig::default();
let ctx = AssistContext::new(sema, &config, frange);
let mut acc = Assists::new_resolved(&ctx); let mut acc = Assists::new_resolved(&ctx);
handler(&mut acc, &ctx); handler(&mut acc, &ctx);
let mut res = acc.finish_resolved(); let mut res = acc.finish_resolved();
@ -103,19 +104,20 @@ fn check(handler: Handler, before: &str, expected: ExpectedResult) {
let mut actual = db.file_text(change.file_id).as_ref().to_owned(); let mut actual = db.file_text(change.file_id).as_ref().to_owned();
change.edit.apply(&mut actual); change.edit.apply(&mut actual);
match source_change.cursor_position { if !source_change.is_snippet {
None => { match source_change.cursor_position {
if let RangeOrOffset::Offset(before_cursor_pos) = range_or_offset { None => {
let off = change if let RangeOrOffset::Offset(before_cursor_pos) = range_or_offset {
.edit let off = change
.apply_to_offset(before_cursor_pos) .edit
.expect("cursor position is affected by the edit"); .apply_to_offset(before_cursor_pos)
actual = add_cursor(&actual, off) .expect("cursor position is affected by the edit");
actual = add_cursor(&actual, off)
}
} }
} Some(off) => actual = add_cursor(&actual, off.offset),
Some(off) => actual = add_cursor(&actual, off.offset), };
}; }
assert_eq_text!(after, &actual); assert_eq_text!(after, &actual);
} }
(Some(assist), ExpectedResult::Target(target)) => { (Some(assist), ExpectedResult::Target(target)) => {
@ -136,7 +138,7 @@ fn assist_order_field_struct() {
let (before_cursor_pos, before) = extract_offset(before); let (before_cursor_pos, before) = extract_offset(before);
let (db, file_id) = with_single_file(&before); let (db, file_id) = with_single_file(&before);
let frange = FileRange { file_id, range: TextRange::empty(before_cursor_pos) }; let frange = FileRange { file_id, range: TextRange::empty(before_cursor_pos) };
let assists = Assist::resolved(&db, frange); let assists = Assist::resolved(&db, &AssistConfig::default(), frange);
let mut assists = assists.iter(); let mut assists = assists.iter();
assert_eq!( assert_eq!(
@ -159,7 +161,7 @@ fn assist_order_if_expr() {
let (range, before) = extract_range(before); let (range, before) = extract_range(before);
let (db, file_id) = with_single_file(&before); let (db, file_id) = with_single_file(&before);
let frange = FileRange { file_id, range }; let frange = FileRange { file_id, range };
let assists = Assist::resolved(&db, frange); let assists = Assist::resolved(&db, &AssistConfig::default(), frange);
let mut assists = assists.iter(); let mut assists = assists.iter();
assert_eq!(assists.next().expect("expected assist").assist.label, "Extract into variable"); assert_eq!(assists.next().expect("expected assist").assist.label, "Extract into variable");

View file

@ -15,7 +15,7 @@ struct S;
struct S; struct S;
impl Debug for S { impl Debug for S {
$0
} }
"#####, "#####,
) )
@ -32,7 +32,7 @@ struct Point {
} }
"#####, "#####,
r#####" r#####"
#[derive()] #[derive($0)]
struct Point { struct Point {
x: u32, x: u32,
y: u32, y: u32,
@ -108,16 +108,16 @@ fn doctest_add_impl() {
"add_impl", "add_impl",
r#####" r#####"
struct Ctx<T: Clone> { struct Ctx<T: Clone> {
data: T,<|> data: T,<|>
} }
"#####, "#####,
r#####" r#####"
struct Ctx<T: Clone> { struct Ctx<T: Clone> {
data: T, data: T,
} }
impl<T: Clone> Ctx<T> { impl<T: Clone> Ctx<T> {
$0
} }
"#####, "#####,
) )

View file

@ -59,8 +59,8 @@ pub use crate::completion::{
/// with ordering of completions (currently this is done by the client). /// with ordering of completions (currently this is done by the client).
pub(crate) fn completions( pub(crate) fn completions(
db: &RootDatabase, db: &RootDatabase,
position: FilePosition,
config: &CompletionConfig, config: &CompletionConfig,
position: FilePosition,
) -> Option<Completions> { ) -> Option<Completions> {
let ctx = CompletionContext::new(db, position, config)?; let ctx = CompletionContext::new(db, position, config)?;

View file

@ -20,7 +20,7 @@ pub(crate) fn do_completion_with_options(
} else { } else {
single_file_with_position(code) single_file_with_position(code)
}; };
let completions = analysis.completions(position, options).unwrap().unwrap(); let completions = analysis.completions(options, position).unwrap().unwrap();
let completion_items: Vec<CompletionItem> = completions.into(); let completion_items: Vec<CompletionItem> = completions.into();
let mut kind_completions: Vec<CompletionItem> = let mut kind_completions: Vec<CompletionItem> =
completion_items.into_iter().filter(|c| c.completion_kind == kind).collect(); completion_items.into_iter().filter(|c| c.completion_kind == kind).collect();

View file

@ -629,6 +629,7 @@ mod tests {
}, },
], ],
cursor_position: None, cursor_position: None,
is_snippet: false,
}, },
), ),
severity: Error, severity: Error,
@ -685,6 +686,7 @@ mod tests {
], ],
file_system_edits: [], file_system_edits: [],
cursor_position: None, cursor_position: None,
is_snippet: false,
}, },
), ),
severity: Error, severity: Error,

View file

@ -82,7 +82,7 @@ pub use crate::{
}; };
pub use hir::Documentation; pub use hir::Documentation;
pub use ra_assists::AssistId; pub use ra_assists::{AssistConfig, AssistId};
pub use ra_db::{ pub use ra_db::{
Canceled, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, SourceRootId, Canceled, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, SourceRootId,
}; };
@ -458,17 +458,17 @@ impl Analysis {
/// Computes completions at the given position. /// Computes completions at the given position.
pub fn completions( pub fn completions(
&self, &self,
position: FilePosition,
config: &CompletionConfig, config: &CompletionConfig,
position: FilePosition,
) -> Cancelable<Option<Vec<CompletionItem>>> { ) -> Cancelable<Option<Vec<CompletionItem>>> {
self.with_db(|db| completion::completions(db, position, config).map(Into::into)) self.with_db(|db| completion::completions(db, config, position).map(Into::into))
} }
/// Computes assists (aka code actions aka intentions) for the given /// Computes assists (aka code actions aka intentions) for the given
/// position. /// position.
pub fn assists(&self, frange: FileRange) -> Cancelable<Vec<Assist>> { pub fn assists(&self, config: &AssistConfig, frange: FileRange) -> Cancelable<Vec<Assist>> {
self.with_db(|db| { self.with_db(|db| {
ra_assists::Assist::resolved(db, frange) ra_assists::Assist::resolved(db, config, frange)
.into_iter() .into_iter()
.map(|assist| Assist { .map(|assist| Assist {
id: assist.assist.id, id: assist.assist.id,

View file

@ -670,6 +670,7 @@ mod tests {
}, },
], ],
cursor_position: None, cursor_position: None,
is_snippet: false,
}, },
}, },
) )
@ -722,6 +723,7 @@ mod tests {
}, },
], ],
cursor_position: None, cursor_position: None,
is_snippet: false,
}, },
}, },
) )
@ -818,6 +820,7 @@ mod tests {
}, },
], ],
cursor_position: None, cursor_position: None,
is_snippet: false,
}, },
}, },
) )

View file

@ -13,6 +13,7 @@ pub struct SourceChange {
pub source_file_edits: Vec<SourceFileEdit>, pub source_file_edits: Vec<SourceFileEdit>,
pub file_system_edits: Vec<FileSystemEdit>, pub file_system_edits: Vec<FileSystemEdit>,
pub cursor_position: Option<FilePosition>, pub cursor_position: Option<FilePosition>,
pub is_snippet: bool,
} }
impl SourceChange { impl SourceChange {
@ -28,6 +29,7 @@ impl SourceChange {
source_file_edits, source_file_edits,
file_system_edits, file_system_edits,
cursor_position: None, cursor_position: None,
is_snippet: false,
} }
} }
@ -41,6 +43,7 @@ impl SourceChange {
source_file_edits: edits, source_file_edits: edits,
file_system_edits: vec![], file_system_edits: vec![],
cursor_position: None, cursor_position: None,
is_snippet: false,
} }
} }
@ -52,6 +55,7 @@ impl SourceChange {
source_file_edits: vec![], source_file_edits: vec![],
file_system_edits: edits, file_system_edits: edits,
cursor_position: None, cursor_position: None,
is_snippet: false,
} }
} }
@ -115,6 +119,7 @@ impl SingleFileChange {
source_file_edits: vec![SourceFileEdit { file_id, edit: self.edit }], source_file_edits: vec![SourceFileEdit { file_id, edit: self.edit }],
file_system_edits: Vec::new(), file_system_edits: Vec::new(),
cursor_position: self.cursor_position.map(|offset| FilePosition { file_id, offset }), cursor_position: self.cursor_position.map(|offset| FilePosition { file_id, offset }),
is_snippet: false,
} }
} }
} }

View file

@ -105,7 +105,7 @@ pub fn analysis_bench(
if is_completion { if is_completion {
let options = CompletionConfig::default(); let options = CompletionConfig::default();
let res = do_work(&mut host, file_id, |analysis| { let res = do_work(&mut host, file_id, |analysis| {
analysis.completions(file_position, &options) analysis.completions(&options, file_position)
}); });
if verbosity.is_verbose() { if verbosity.is_verbose() {
println!("\n{:#?}", res); println!("\n{:#?}", res);

View file

@ -11,7 +11,7 @@ use std::{ffi::OsString, path::PathBuf};
use lsp_types::ClientCapabilities; use lsp_types::ClientCapabilities;
use ra_flycheck::FlycheckConfig; use ra_flycheck::FlycheckConfig;
use ra_ide::{CompletionConfig, InlayHintsConfig}; use ra_ide::{AssistConfig, CompletionConfig, InlayHintsConfig};
use ra_project_model::CargoConfig; use ra_project_model::CargoConfig;
use serde::Deserialize; use serde::Deserialize;
@ -32,6 +32,7 @@ pub struct Config {
pub inlay_hints: InlayHintsConfig, pub inlay_hints: InlayHintsConfig,
pub completion: CompletionConfig, pub completion: CompletionConfig,
pub assist: AssistConfig,
pub call_info_full: bool, pub call_info_full: bool,
pub lens: LensConfig, pub lens: LensConfig,
} }
@ -136,6 +137,7 @@ impl Default for Config {
add_call_argument_snippets: true, add_call_argument_snippets: true,
..CompletionConfig::default() ..CompletionConfig::default()
}, },
assist: AssistConfig::default(),
call_info_full: true, call_info_full: true,
lens: LensConfig::default(), lens: LensConfig::default(),
} }
@ -273,6 +275,7 @@ impl Config {
{ {
self.client_caps.code_action_literals = value; self.client_caps.code_action_literals = value;
} }
self.completion.allow_snippets(false); self.completion.allow_snippets(false);
if let Some(completion) = &doc_caps.completion { if let Some(completion) = &doc_caps.completion {
if let Some(completion_item) = &completion.completion_item { if let Some(completion_item) = &completion.completion_item {
@ -288,5 +291,12 @@ impl Config {
self.client_caps.work_done_progress = value; self.client_caps.work_done_progress = value;
} }
} }
self.assist.allow_snippets(false);
if let Some(experimental) = &caps.experimental {
let enable =
experimental.get("snippetTextEdit").and_then(|it| it.as_bool()) == Some(true);
self.assist.allow_snippets(enable);
}
} }
} }

View file

@ -3,9 +3,11 @@ pub(crate) mod to_proto;
use std::{collections::HashMap, sync::Arc}; use std::{collections::HashMap, sync::Arc};
use lsp_types::{CodeActionOrCommand, Diagnostic, Range}; use lsp_types::{Diagnostic, Range};
use ra_ide::FileId; use ra_ide::FileId;
use crate::lsp_ext;
pub type CheckFixes = Arc<HashMap<FileId, Vec<Fix>>>; pub type CheckFixes = Arc<HashMap<FileId, Vec<Fix>>>;
#[derive(Debug, Default, Clone)] #[derive(Debug, Default, Clone)]
@ -18,13 +20,13 @@ pub struct DiagnosticCollection {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Fix { pub struct Fix {
pub range: Range, pub range: Range,
pub action: CodeActionOrCommand, pub action: lsp_ext::CodeAction,
} }
#[derive(Debug)] #[derive(Debug)]
pub enum DiagnosticTask { pub enum DiagnosticTask {
ClearCheck, ClearCheck,
AddCheck(FileId, Diagnostic, Vec<CodeActionOrCommand>), AddCheck(FileId, Diagnostic, Vec<lsp_ext::CodeAction>),
SetNative(FileId, Vec<Diagnostic>), SetNative(FileId, Vec<Diagnostic>),
} }
@ -38,7 +40,7 @@ impl DiagnosticCollection {
&mut self, &mut self,
file_id: FileId, file_id: FileId,
diagnostic: Diagnostic, diagnostic: Diagnostic,
fixes: Vec<CodeActionOrCommand>, fixes: Vec<lsp_ext::CodeAction>,
) { ) {
let diagnostics = self.check.entry(file_id).or_default(); let diagnostics = self.check.entry(file_id).or_default();
for existing_diagnostic in diagnostics.iter() { for existing_diagnostic in diagnostics.iter() {

View file

@ -68,9 +68,9 @@ expression: diag
kind: Some( kind: Some(
"quickfix", "quickfix",
), ),
diagnostics: None, command: None,
edit: Some( edit: Some(
WorkspaceEdit { SnippetWorkspaceEdit {
changes: Some( changes: Some(
{ {
"file:///test/src/main.rs": [ "file:///test/src/main.rs": [
@ -106,8 +106,6 @@ expression: diag
document_changes: None, document_changes: None,
}, },
), ),
command: None,
is_preferred: None,
}, },
], ],
}, },

View file

@ -53,9 +53,9 @@ expression: diag
kind: Some( kind: Some(
"quickfix", "quickfix",
), ),
diagnostics: None, command: None,
edit: Some( edit: Some(
WorkspaceEdit { SnippetWorkspaceEdit {
changes: Some( changes: Some(
{ {
"file:///test/driver/subcommand/repl.rs": [ "file:///test/driver/subcommand/repl.rs": [
@ -78,8 +78,6 @@ expression: diag
document_changes: None, document_changes: None,
}, },
), ),
command: None,
is_preferred: None,
}, },
], ],
}, },

View file

@ -7,13 +7,13 @@ use std::{
}; };
use lsp_types::{ use lsp_types::{
CodeAction, Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, DiagnosticTag, Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, DiagnosticTag, Location,
Location, NumberOrString, Position, Range, TextEdit, Url, WorkspaceEdit, NumberOrString, Position, Range, TextEdit, Url,
}; };
use ra_flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan, DiagnosticSpanMacroExpansion}; use ra_flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan, DiagnosticSpanMacroExpansion};
use stdx::format_to; use stdx::format_to;
use crate::Result; use crate::{lsp_ext, Result};
/// Converts a Rust level string to a LSP severity /// Converts a Rust level string to a LSP severity
fn map_level_to_severity(val: DiagnosticLevel) -> Option<DiagnosticSeverity> { fn map_level_to_severity(val: DiagnosticLevel) -> Option<DiagnosticSeverity> {
@ -110,7 +110,7 @@ fn is_deprecated(rd: &ra_flycheck::Diagnostic) -> bool {
enum MappedRustChildDiagnostic { enum MappedRustChildDiagnostic {
Related(DiagnosticRelatedInformation), Related(DiagnosticRelatedInformation),
SuggestedFix(CodeAction), SuggestedFix(lsp_ext::CodeAction),
MessageLine(String), MessageLine(String),
} }
@ -143,13 +143,15 @@ fn map_rust_child_diagnostic(
message: rd.message.clone(), message: rd.message.clone(),
}) })
} else { } else {
MappedRustChildDiagnostic::SuggestedFix(CodeAction { MappedRustChildDiagnostic::SuggestedFix(lsp_ext::CodeAction {
title: rd.message.clone(), title: rd.message.clone(),
kind: Some("quickfix".to_string()), kind: Some("quickfix".to_string()),
diagnostics: None, edit: Some(lsp_ext::SnippetWorkspaceEdit {
edit: Some(WorkspaceEdit::new(edit_map)), // FIXME: there's no good reason to use edit_map here....
changes: Some(edit_map),
document_changes: None,
}),
command: None, command: None,
is_preferred: None,
}) })
} }
} }
@ -158,7 +160,7 @@ fn map_rust_child_diagnostic(
pub(crate) struct MappedRustDiagnostic { pub(crate) struct MappedRustDiagnostic {
pub location: Location, pub location: Location,
pub diagnostic: Diagnostic, pub diagnostic: Diagnostic,
pub fixes: Vec<CodeAction>, pub fixes: Vec<lsp_ext::CodeAction>,
} }
/// Converts a Rust root diagnostic to LSP form /// Converts a Rust root diagnostic to LSP form

View file

@ -1,6 +1,6 @@
//! rust-analyzer extensions to the LSP. //! rust-analyzer extensions to the LSP.
use std::path::PathBuf; use std::{collections::HashMap, path::PathBuf};
use lsp_types::request::Request; use lsp_types::request::Request;
use lsp_types::{Location, Position, Range, TextDocumentIdentifier}; use lsp_types::{Location, Position, Range, TextDocumentIdentifier};
@ -137,7 +137,7 @@ pub struct Runnable {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct SourceChange { pub struct SourceChange {
pub label: String, pub label: String,
pub workspace_edit: lsp_types::WorkspaceEdit, pub workspace_edit: SnippetWorkspaceEdit,
pub cursor_position: Option<lsp_types::TextDocumentPositionParams>, pub cursor_position: Option<lsp_types::TextDocumentPositionParams>,
} }
@ -183,3 +183,54 @@ pub struct SsrParams {
pub query: String, pub query: String,
pub parse_only: bool, pub parse_only: bool,
} }
pub enum CodeActionRequest {}
impl Request for CodeActionRequest {
type Params = lsp_types::CodeActionParams;
type Result = Option<Vec<CodeAction>>;
const METHOD: &'static str = "textDocument/codeAction";
}
#[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)]
pub struct CodeAction {
pub title: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub kind: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub command: Option<lsp_types::Command>,
#[serde(skip_serializing_if = "Option::is_none")]
pub edit: Option<SnippetWorkspaceEdit>,
}
#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SnippetWorkspaceEdit {
#[serde(skip_serializing_if = "Option::is_none")]
pub changes: Option<HashMap<lsp_types::Url, Vec<lsp_types::TextEdit>>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub document_changes: Option<Vec<SnippetDocumentChangeOperation>>,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(untagged, rename_all = "lowercase")]
pub enum SnippetDocumentChangeOperation {
Op(lsp_types::ResourceOp),
Edit(SnippetTextDocumentEdit),
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SnippetTextDocumentEdit {
pub text_document: lsp_types::VersionedTextDocumentIdentifier,
pub edits: Vec<SnippetTextEdit>,
}
#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SnippetTextEdit {
pub range: Range,
pub new_text: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub insert_text_format: Option<lsp_types::InsertTextFormat>,
}

View file

@ -518,6 +518,7 @@ fn on_request(
.on::<lsp_ext::ParentModule>(handlers::handle_parent_module)? .on::<lsp_ext::ParentModule>(handlers::handle_parent_module)?
.on::<lsp_ext::Runnables>(handlers::handle_runnables)? .on::<lsp_ext::Runnables>(handlers::handle_runnables)?
.on::<lsp_ext::InlayHints>(handlers::handle_inlay_hints)? .on::<lsp_ext::InlayHints>(handlers::handle_inlay_hints)?
.on::<lsp_ext::CodeActionRequest>(handlers::handle_code_action)?
.on::<lsp_types::request::OnTypeFormatting>(handlers::handle_on_type_formatting)? .on::<lsp_types::request::OnTypeFormatting>(handlers::handle_on_type_formatting)?
.on::<lsp_types::request::DocumentSymbolRequest>(handlers::handle_document_symbol)? .on::<lsp_types::request::DocumentSymbolRequest>(handlers::handle_document_symbol)?
.on::<lsp_types::request::WorkspaceSymbol>(handlers::handle_workspace_symbol)? .on::<lsp_types::request::WorkspaceSymbol>(handlers::handle_workspace_symbol)?
@ -525,7 +526,6 @@ fn on_request(
.on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation)? .on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation)?
.on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition)? .on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition)?
.on::<lsp_types::request::Completion>(handlers::handle_completion)? .on::<lsp_types::request::Completion>(handlers::handle_completion)?
.on::<lsp_types::request::CodeActionRequest>(handlers::handle_code_action)?
.on::<lsp_types::request::CodeLensRequest>(handlers::handle_code_lens)? .on::<lsp_types::request::CodeLensRequest>(handlers::handle_code_lens)?
.on::<lsp_types::request::CodeLensResolve>(handlers::handle_code_lens_resolve)? .on::<lsp_types::request::CodeLensResolve>(handlers::handle_code_lens_resolve)?
.on::<lsp_types::request::FoldingRangeRequest>(handlers::handle_folding_range)? .on::<lsp_types::request::FoldingRangeRequest>(handlers::handle_folding_range)?

View file

@ -11,12 +11,11 @@ use lsp_server::ErrorCode;
use lsp_types::{ use lsp_types::{
CallHierarchyIncomingCall, CallHierarchyIncomingCallsParams, CallHierarchyItem, CallHierarchyIncomingCall, CallHierarchyIncomingCallsParams, CallHierarchyItem,
CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams, CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams,
CodeAction, CodeActionResponse, CodeLens, Command, CompletionItem, Diagnostic, CodeLens, Command, CompletionItem, Diagnostic, DocumentFormattingParams, DocumentHighlight,
DocumentFormattingParams, DocumentHighlight, DocumentSymbol, FoldingRange, FoldingRangeParams, DocumentSymbol, FoldingRange, FoldingRangeParams, Hover, HoverContents, Location,
Hover, HoverContents, Location, MarkupContent, MarkupKind, Position, PrepareRenameResponse, MarkupContent, MarkupKind, Position, PrepareRenameResponse, Range, RenameParams,
Range, RenameParams, SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensRangeResult,
SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation, TextDocumentIdentifier, SemanticTokensResult, SymbolInformation, TextDocumentIdentifier, TextEdit, Url, WorkspaceEdit,
TextEdit, Url, WorkspaceEdit,
}; };
use ra_ide::{ use ra_ide::{
Assist, FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind, SearchScope, Assist, FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind, SearchScope,
@ -476,7 +475,7 @@ pub fn handle_completion(
return Ok(None); return Ok(None);
} }
let items = match world.analysis().completions(position, &world.config.completion)? { let items = match world.analysis().completions(&world.config.completion, position)? {
None => return Ok(None), None => return Ok(None),
Some(items) => items, Some(items) => items,
}; };
@ -585,9 +584,8 @@ pub fn handle_rename(world: WorldSnapshot, params: RenameParams) -> Result<Optio
None => return Ok(None), None => return Ok(None),
Some(it) => it.info, Some(it) => it.info,
}; };
let workspace_edit = to_proto::workspace_edit(&world, source_change)?;
let source_change = to_proto::source_change(&world, source_change)?; Ok(Some(workspace_edit))
Ok(Some(source_change.workspace_edit))
} }
pub fn handle_references( pub fn handle_references(
@ -696,14 +694,21 @@ pub fn handle_formatting(
pub fn handle_code_action( pub fn handle_code_action(
world: WorldSnapshot, world: WorldSnapshot,
params: lsp_types::CodeActionParams, params: lsp_types::CodeActionParams,
) -> Result<Option<CodeActionResponse>> { ) -> Result<Option<Vec<lsp_ext::CodeAction>>> {
let _p = profile("handle_code_action"); let _p = profile("handle_code_action");
// We intentionally don't support command-based actions, as those either
// requires custom client-code anyway, or requires server-initiated edits.
// Server initiated edits break causality, so we avoid those as well.
if !world.config.client_caps.code_action_literals {
return Ok(None);
}
let file_id = from_proto::file_id(&world, &params.text_document.uri)?; let file_id = from_proto::file_id(&world, &params.text_document.uri)?;
let line_index = world.analysis().file_line_index(file_id)?; let line_index = world.analysis().file_line_index(file_id)?;
let range = from_proto::text_range(&line_index, params.range); let range = from_proto::text_range(&line_index, params.range);
let diagnostics = world.analysis().diagnostics(file_id)?; let diagnostics = world.analysis().diagnostics(file_id)?;
let mut res = CodeActionResponse::default(); let mut res: Vec<lsp_ext::CodeAction> = Vec::new();
let fixes_from_diagnostics = diagnostics let fixes_from_diagnostics = diagnostics
.into_iter() .into_iter()
@ -713,22 +718,9 @@ pub fn handle_code_action(
for source_edit in fixes_from_diagnostics { for source_edit in fixes_from_diagnostics {
let title = source_edit.label.clone(); let title = source_edit.label.clone();
let edit = to_proto::source_change(&world, source_edit)?; let edit = to_proto::snippet_workspace_edit(&world, source_edit)?;
let action = lsp_ext::CodeAction { title, kind: None, edit: Some(edit), command: None };
let command = Command { res.push(action);
title,
command: "rust-analyzer.applySourceChange".to_string(),
arguments: Some(vec![to_value(edit).unwrap()]),
};
let action = CodeAction {
title: command.title.clone(),
kind: None,
diagnostics: None,
edit: None,
command: Some(command),
is_preferred: None,
};
res.push(action.into());
} }
for fix in world.check_fixes.get(&file_id).into_iter().flatten() { for fix in world.check_fixes.get(&file_id).into_iter().flatten() {
@ -740,14 +732,21 @@ pub fn handle_code_action(
} }
let mut grouped_assists: FxHashMap<String, (usize, Vec<Assist>)> = FxHashMap::default(); let mut grouped_assists: FxHashMap<String, (usize, Vec<Assist>)> = FxHashMap::default();
for assist in world.analysis().assists(FileRange { file_id, range })?.into_iter() { for assist in
world.analysis().assists(&world.config.assist, FileRange { file_id, range })?.into_iter()
{
match &assist.group_label { match &assist.group_label {
Some(label) => grouped_assists Some(label) => grouped_assists
.entry(label.to_owned()) .entry(label.to_owned())
.or_insert_with(|| { .or_insert_with(|| {
let idx = res.len(); let idx = res.len();
let dummy = Command::new(String::new(), String::new(), None); let dummy = lsp_ext::CodeAction {
res.push(dummy.into()); title: String::new(),
kind: None,
command: None,
edit: None,
};
res.push(dummy);
(idx, Vec::new()) (idx, Vec::new())
}) })
.1 .1
@ -775,35 +774,10 @@ pub fn handle_code_action(
command: "rust-analyzer.selectAndApplySourceChange".to_string(), command: "rust-analyzer.selectAndApplySourceChange".to_string(),
arguments: Some(vec![serde_json::Value::Array(arguments)]), arguments: Some(vec![serde_json::Value::Array(arguments)]),
}); });
res[idx] = CodeAction { res[idx] = lsp_ext::CodeAction { title, kind: None, edit: None, command };
title,
kind: None,
diagnostics: None,
edit: None,
command,
is_preferred: None,
}
.into();
} }
} }
// If the client only supports commands then filter the list
// and remove and actions that depend on edits.
if !world.config.client_caps.code_action_literals {
// FIXME: use drain_filter once it hits stable.
res = res
.into_iter()
.filter_map(|it| match it {
cmd @ lsp_types::CodeActionOrCommand::Command(_) => Some(cmd),
lsp_types::CodeActionOrCommand::CodeAction(action) => match action.command {
Some(cmd) if action.edit.is_none() => {
Some(lsp_types::CodeActionOrCommand::Command(cmd))
}
_ => None,
},
})
.collect();
}
Ok(Some(res)) Ok(Some(res))
} }

View file

@ -112,6 +112,22 @@ pub(crate) fn text_edit(
lsp_types::TextEdit { range, new_text } lsp_types::TextEdit { range, new_text }
} }
pub(crate) fn snippet_text_edit(
line_index: &LineIndex,
line_endings: LineEndings,
is_snippet: bool,
indel: Indel,
) -> lsp_ext::SnippetTextEdit {
let text_edit = text_edit(line_index, line_endings, indel);
let insert_text_format =
if is_snippet { Some(lsp_types::InsertTextFormat::Snippet) } else { None };
lsp_ext::SnippetTextEdit {
range: text_edit.range,
new_text: text_edit.new_text,
insert_text_format,
}
}
pub(crate) fn text_edit_vec( pub(crate) fn text_edit_vec(
line_index: &LineIndex, line_index: &LineIndex,
line_endings: LineEndings, line_endings: LineEndings,
@ -441,10 +457,11 @@ pub(crate) fn goto_definition_response(
} }
} }
pub(crate) fn text_document_edit( pub(crate) fn snippet_text_document_edit(
world: &WorldSnapshot, world: &WorldSnapshot,
is_snippet: bool,
source_file_edit: SourceFileEdit, source_file_edit: SourceFileEdit,
) -> Result<lsp_types::TextDocumentEdit> { ) -> Result<lsp_ext::SnippetTextDocumentEdit> {
let text_document = versioned_text_document_identifier(world, source_file_edit.file_id, None)?; let text_document = versioned_text_document_identifier(world, source_file_edit.file_id, None)?;
let line_index = world.analysis().file_line_index(source_file_edit.file_id)?; let line_index = world.analysis().file_line_index(source_file_edit.file_id)?;
let line_endings = world.file_line_endings(source_file_edit.file_id); let line_endings = world.file_line_endings(source_file_edit.file_id);
@ -452,9 +469,9 @@ pub(crate) fn text_document_edit(
.edit .edit
.as_indels() .as_indels()
.iter() .iter()
.map(|it| text_edit(&line_index, line_endings, it.clone())) .map(|it| snippet_text_edit(&line_index, line_endings, is_snippet, it.clone()))
.collect(); .collect();
Ok(lsp_types::TextDocumentEdit { text_document, edits }) Ok(lsp_ext::SnippetTextDocumentEdit { text_document, edits })
} }
pub(crate) fn resource_op( pub(crate) fn resource_op(
@ -500,20 +517,70 @@ pub(crate) fn source_change(
}) })
} }
}; };
let mut document_changes: Vec<lsp_types::DocumentChangeOperation> = Vec::new(); let label = source_change.label.clone();
let workspace_edit = self::snippet_workspace_edit(world, source_change)?;
Ok(lsp_ext::SourceChange { label, workspace_edit, cursor_position })
}
pub(crate) fn snippet_workspace_edit(
world: &WorldSnapshot,
source_change: SourceChange,
) -> Result<lsp_ext::SnippetWorkspaceEdit> {
let mut document_changes: Vec<lsp_ext::SnippetDocumentChangeOperation> = Vec::new();
for op in source_change.file_system_edits { for op in source_change.file_system_edits {
let op = resource_op(&world, op)?; let op = resource_op(&world, op)?;
document_changes.push(lsp_types::DocumentChangeOperation::Op(op)); document_changes.push(lsp_ext::SnippetDocumentChangeOperation::Op(op));
} }
for edit in source_change.source_file_edits { for edit in source_change.source_file_edits {
let edit = text_document_edit(&world, edit)?; let edit = snippet_text_document_edit(&world, source_change.is_snippet, edit)?;
document_changes.push(lsp_types::DocumentChangeOperation::Edit(edit)); document_changes.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit));
}
let workspace_edit =
lsp_ext::SnippetWorkspaceEdit { changes: None, document_changes: Some(document_changes) };
Ok(workspace_edit)
}
pub(crate) fn workspace_edit(
world: &WorldSnapshot,
source_change: SourceChange,
) -> Result<lsp_types::WorkspaceEdit> {
assert!(!source_change.is_snippet);
snippet_workspace_edit(world, source_change).map(|it| it.into())
}
impl From<lsp_ext::SnippetWorkspaceEdit> for lsp_types::WorkspaceEdit {
fn from(snippet_workspace_edit: lsp_ext::SnippetWorkspaceEdit) -> lsp_types::WorkspaceEdit {
lsp_types::WorkspaceEdit {
changes: None,
document_changes: snippet_workspace_edit.document_changes.map(|changes| {
lsp_types::DocumentChanges::Operations(
changes
.into_iter()
.map(|change| match change {
lsp_ext::SnippetDocumentChangeOperation::Op(op) => {
lsp_types::DocumentChangeOperation::Op(op)
}
lsp_ext::SnippetDocumentChangeOperation::Edit(edit) => {
lsp_types::DocumentChangeOperation::Edit(
lsp_types::TextDocumentEdit {
text_document: edit.text_document,
edits: edit
.edits
.into_iter()
.map(|edit| lsp_types::TextEdit {
range: edit.range,
new_text: edit.new_text,
})
.collect(),
},
)
}
})
.collect(),
)
}),
}
} }
let workspace_edit = lsp_types::WorkspaceEdit {
changes: None,
document_changes: Some(lsp_types::DocumentChanges::Operations(document_changes)),
};
Ok(lsp_ext::SourceChange { label: source_change.label, workspace_edit, cursor_position })
} }
pub fn call_hierarchy_item( pub fn call_hierarchy_item(
@ -571,22 +638,25 @@ fn main() <fold>{
} }
} }
pub(crate) fn code_action(world: &WorldSnapshot, assist: Assist) -> Result<lsp_types::CodeAction> { pub(crate) fn code_action(world: &WorldSnapshot, assist: Assist) -> Result<lsp_ext::CodeAction> {
let source_change = source_change(&world, assist.source_change)?; let res = if assist.source_change.is_snippet {
let arg = serde_json::to_value(source_change)?; lsp_ext::CodeAction {
let title = assist.label; title: assist.label,
let command = lsp_types::Command { kind: Some(String::new()),
title: title.clone(), edit: Some(snippet_workspace_edit(world, assist.source_change)?),
command: "rust-analyzer.applySourceChange".to_string(), command: None,
arguments: Some(vec![arg]), }
}; } else {
let source_change = source_change(&world, assist.source_change)?;
let arg = serde_json::to_value(source_change)?;
let title = assist.label;
let command = lsp_types::Command {
title: title.clone(),
command: "rust-analyzer.applySourceChange".to_string(),
arguments: Some(vec![arg]),
};
Ok(lsp_types::CodeAction { lsp_ext::CodeAction { title, kind: Some(String::new()), edit: None, command: Some(command) }
title, };
kind: Some(String::new()), Ok(res)
diagnostics: None,
edit: None,
command: Some(command),
is_preferred: None,
})
} }

View file

@ -333,29 +333,17 @@ fn main() {}
partial_result_params: PartialResultParams::default(), partial_result_params: PartialResultParams::default(),
work_done_progress_params: WorkDoneProgressParams::default(), work_done_progress_params: WorkDoneProgressParams::default(),
}, },
json!([ json!([{
{ "edit": {
"command": { "documentChanges": [
"arguments": [
{ {
"cursorPosition": null, "kind": "create",
"label": "Create module", "uri": "file:///[..]/src/bar.rs"
"workspaceEdit": {
"documentChanges": [
{
"kind": "create",
"uri": "file:///[..]/src/bar.rs"
}
]
}
} }
], ]
"command": "rust-analyzer.applySourceChange",
"title": "Create module"
}, },
"title": "Create module" "title": "Create module"
} }]),
]),
); );
server.request::<CodeActionRequest>( server.request::<CodeActionRequest>(
@ -416,29 +404,17 @@ fn main() {{}}
partial_result_params: PartialResultParams::default(), partial_result_params: PartialResultParams::default(),
work_done_progress_params: WorkDoneProgressParams::default(), work_done_progress_params: WorkDoneProgressParams::default(),
}, },
json!([ json!([{
{ "edit": {
"command": { "documentChanges": [
"arguments": [
{ {
"cursorPosition": null, "kind": "create",
"label": "Create module", "uri": "file://[..]/src/bar.rs"
"workspaceEdit": {
"documentChanges": [
{
"kind": "create",
"uri": "file:///[..]/src/bar.rs"
}
]
}
} }
], ]
"command": "rust-analyzer.applySourceChange",
"title": "Create module"
}, },
"title": "Create module" "title": "Create module"
} }]),
]),
); );
server.request::<CodeActionRequest>( server.request::<CodeActionRequest>(

View file

@ -0,0 +1,34 @@
# LSP Extensions
This document describes LSP extensions used by rust-analyzer.
It's a best effort document, when in doubt, consult the source (and send a PR with clarification ;-) ).
We aim to upstream all non Rust-specific extensions to the protocol, but this is not a top priority.
All capabilities are enabled via `experimental` field of `ClientCapabilities`.
## `SnippetTextEdit`
**Capability**
```typescript
{
"snippetTextEdit": boolean
}
```
If this capability is set, `WorkspaceEdit`s returned from `codeAction` requests might contain `SnippetTextEdit`s instead of usual `TextEdit`s:
```typescript
interface SnippetTextEdit extends TextEdit {
insertTextFormat?: InsertTextFormat;
}
```
```typescript
export interface TextDocumentEdit {
textDocument: VersionedTextDocumentIdentifier;
edits: (TextEdit | SnippetTextEdit)[];
}
```
When applying such code action, the editor should insert snippet, with tab stops and placeholder.
At the moment, rust-analyzer guarantees that only a single edit will have `InsertTextFormat.Snippet`.

View file

@ -17,7 +17,7 @@ struct S;
struct S; struct S;
impl Debug for S { impl Debug for S {
$0
} }
``` ```
@ -33,7 +33,7 @@ struct Point {
} }
// AFTER // AFTER
#[derive()] #[derive($0)]
struct Point { struct Point {
x: u32, x: u32,
y: u32, y: u32,
@ -105,16 +105,16 @@ Adds a new inherent impl for a type.
```rust ```rust
// BEFORE // BEFORE
struct Ctx<T: Clone> { struct Ctx<T: Clone> {
data: T,┃ data: T,┃
} }
// AFTER // AFTER
struct Ctx<T: Clone> { struct Ctx<T: Clone> {
data: T, data: T,
} }
impl<T: Clone> Ctx<T> { impl<T: Clone> Ctx<T> {
$0
} }
``` ```

View file

@ -31,24 +31,79 @@ export function createClient(serverPath: string, cwd: string): lc.LanguageClient
const res = await next(document, token); const res = await next(document, token);
if (res === undefined) throw new Error('busy'); if (res === undefined) throw new Error('busy');
return res; return res;
},
async provideCodeActions(document: vscode.TextDocument, range: vscode.Range, context: vscode.CodeActionContext, token: vscode.CancellationToken, _next: lc.ProvideCodeActionsSignature) {
const params: lc.CodeActionParams = {
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(document),
range: client.code2ProtocolConverter.asRange(range),
context: client.code2ProtocolConverter.asCodeActionContext(context)
};
return client.sendRequest(lc.CodeActionRequest.type, params, token).then((values) => {
if (values === null) return undefined;
const result: (vscode.CodeAction | vscode.Command)[] = [];
for (const item of values) {
if (lc.CodeAction.is(item)) {
const action = client.protocol2CodeConverter.asCodeAction(item);
if (isSnippetEdit(item)) {
action.command = {
command: "rust-analyzer.applySnippetWorkspaceEdit",
title: "",
arguments: [action.edit],
};
action.edit = undefined;
}
result.push(action);
} else {
const command = client.protocol2CodeConverter.asCommand(item);
result.push(command);
}
}
return result;
},
(_error) => undefined
);
} }
} as any } as any
}; };
const res = new lc.LanguageClient( const client = new lc.LanguageClient(
'rust-analyzer', 'rust-analyzer',
'Rust Analyzer Language Server', 'Rust Analyzer Language Server',
serverOptions, serverOptions,
clientOptions, clientOptions,
); );
// To turn on all proposed features use: res.registerProposedFeatures(); // To turn on all proposed features use: client.registerProposedFeatures();
// Here we want to enable CallHierarchyFeature and SemanticTokensFeature // Here we want to enable CallHierarchyFeature and SemanticTokensFeature
// since they are available on stable. // since they are available on stable.
// Note that while these features are stable in vscode their LSP protocol // Note that while these features are stable in vscode their LSP protocol
// implementations are still in the "proposed" category for 3.16. // implementations are still in the "proposed" category for 3.16.
res.registerFeature(new CallHierarchyFeature(res)); client.registerFeature(new CallHierarchyFeature(client));
res.registerFeature(new SemanticTokensFeature(res)); client.registerFeature(new SemanticTokensFeature(client));
client.registerFeature(new SnippetTextEditFeature());
return res; return client;
}
class SnippetTextEditFeature implements lc.StaticFeature {
fillClientCapabilities(capabilities: lc.ClientCapabilities): void {
const caps: any = capabilities.experimental ?? {};
caps.snippetTextEdit = true;
capabilities.experimental = caps;
}
initialize(_capabilities: lc.ServerCapabilities<any>, _documentSelector: lc.DocumentSelector | undefined): void {
}
}
function isSnippetEdit(action: lc.CodeAction): boolean {
const documentChanges = action.edit?.documentChanges ?? [];
for (const edit of documentChanges) {
if (lc.TextDocumentEdit.is(edit)) {
if (edit.edits.some((indel) => (indel as any).insertTextFormat === lc.InsertTextFormat.Snippet)) {
return true;
}
}
}
return false;
} }

View file

@ -4,6 +4,7 @@ import * as ra from '../rust-analyzer-api';
import { Ctx, Cmd } from '../ctx'; import { Ctx, Cmd } from '../ctx';
import * as sourceChange from '../source_change'; import * as sourceChange from '../source_change';
import { assert } from '../util';
export * from './analyzer_status'; export * from './analyzer_status';
export * from './matching_brace'; export * from './matching_brace';
@ -51,3 +52,36 @@ export function selectAndApplySourceChange(ctx: Ctx): Cmd {
} }
}; };
} }
export function applySnippetWorkspaceEdit(_ctx: Ctx): Cmd {
return async (edit: vscode.WorkspaceEdit) => {
assert(edit.entries().length === 1, `bad ws edit: ${JSON.stringify(edit)}`);
const [uri, edits] = edit.entries()[0];
const editor = vscode.window.visibleTextEditors.find((it) => it.document.uri.toString() === uri.toString());
if (!editor) return;
let editWithSnippet: vscode.TextEdit | undefined = undefined;
let lineDelta = 0;
await editor.edit((builder) => {
for (const indel of edits) {
if (indel.newText.indexOf('$0') !== -1) {
editWithSnippet = indel;
} else {
if (!editWithSnippet) {
lineDelta = (indel.newText.match(/\n/g) || []).length - (indel.range.end.line - indel.range.start.line);
}
builder.replace(indel.range, indel.newText);
}
}
});
if (editWithSnippet) {
const snip = editWithSnippet as vscode.TextEdit;
const range = snip.range.with(
snip.range.start.with(snip.range.start.line + lineDelta),
snip.range.end.with(snip.range.end.line + lineDelta),
);
await editor.insertSnippet(new vscode.SnippetString(snip.newText), range);
}
};
}

View file

@ -91,6 +91,7 @@ export async function activate(context: vscode.ExtensionContext) {
ctx.registerCommand('debugSingle', commands.debugSingle); ctx.registerCommand('debugSingle', commands.debugSingle);
ctx.registerCommand('showReferences', commands.showReferences); ctx.registerCommand('showReferences', commands.showReferences);
ctx.registerCommand('applySourceChange', commands.applySourceChange); ctx.registerCommand('applySourceChange', commands.applySourceChange);
ctx.registerCommand('applySnippetWorkspaceEdit', commands.applySnippetWorkspaceEdit);
ctx.registerCommand('selectAndApplySourceChange', commands.selectAndApplySourceChange); ctx.registerCommand('selectAndApplySourceChange', commands.selectAndApplySourceChange);
ctx.pushCleanup(activateTaskProvider(workspaceFolder)); ctx.pushCleanup(activateTaskProvider(workspaceFolder));