10458: feat: Implement custom user snippets r=Veykril a=Veykril

![Y24dX7fOWX](https://user-images.githubusercontent.com/3757771/136059454-ceccfc2c-2c90-46da-8ad1-bac9c2e83ec1.gif)

Allows us to address the following issues:
    - `.arc / .rc / .pin, similar to .box?` https://github.com/rust-analyzer/rust-analyzer/issues/7033
    - `Add unsafe snippet` https://github.com/rust-analyzer/rust-analyzer/issues/10392, would allow users to have this without the diagnostic)
    - `.ok() postfix snippet is annoying` https://github.com/rust-analyzer/rust-analyzer/issues/9636, allows us to get rid of the `ok` postfix and similar ones
    - `Postfix vec completion` https://github.com/rust-analyzer/rust-analyzer/issues/7773

cc https://github.com/rust-analyzer/rust-analyzer/issues/772

Zulipd discussion: https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer/topic/Custom.20Postfix.20snippets

Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
This commit is contained in:
bors[bot] 2021-10-05 16:41:11 +00:00 committed by GitHub
commit c409cf0e26
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
23 changed files with 550 additions and 88 deletions

1
Cargo.lock generated
View file

@ -633,6 +633,7 @@ dependencies = [
"once_cell", "once_cell",
"profile", "profile",
"rustc-hash", "rustc-hash",
"smallvec",
"sourcegen", "sourcegen",
"stdx", "stdx",
"syntax", "syntax",

View file

@ -98,7 +98,8 @@ pub use ide_assists::{
Assist, AssistConfig, AssistId, AssistKind, AssistResolveStrategy, SingleResolve, Assist, AssistConfig, AssistId, AssistKind, AssistResolveStrategy, SingleResolve,
}; };
pub use ide_completion::{ pub use ide_completion::{
CompletionConfig, CompletionItem, CompletionItemKind, CompletionRelevance, ImportEdit, CompletionConfig, CompletionItem, CompletionItemKind, CompletionRelevance, ImportEdit, Snippet,
SnippetScope,
}; };
pub use ide_db::{ pub use ide_db::{
base_db::{ base_db::{
@ -532,19 +533,10 @@ impl Analysis {
&self, &self,
config: &CompletionConfig, config: &CompletionConfig,
position: FilePosition, position: FilePosition,
full_import_path: &str, imports: impl IntoIterator<Item = (String, String)> + std::panic::UnwindSafe,
imported_name: String,
) -> Cancellable<Vec<TextEdit>> { ) -> Cancellable<Vec<TextEdit>> {
Ok(self Ok(self
.with_db(|db| { .with_db(|db| ide_completion::resolve_completion_edits(db, config, position, imports))?
ide_completion::resolve_completion_edits(
db,
config,
position,
full_import_path,
imported_name,
)
})?
.unwrap_or_default()) .unwrap_or_default())
} }

View file

@ -14,6 +14,7 @@ itertools = "0.10.0"
rustc-hash = "1.1.0" rustc-hash = "1.1.0"
either = "1.6.1" either = "1.6.1"
once_cell = "1.7" once_cell = "1.7"
smallvec = "1.4"
stdx = { path = "../stdx", version = "0.0.0" } stdx = { path = "../stdx", version = "0.0.0" }
syntax = { path = "../syntax", version = "0.0.0" } syntax = { path = "../syntax", version = "0.0.0" }

View file

@ -2,8 +2,9 @@
mod format_like; mod format_like;
use hir::Documentation;
use ide_db::{ use ide_db::{
helpers::{FamousDefs, SnippetCap}, helpers::{insert_use::ImportScope, FamousDefs, SnippetCap},
ty_filter::TryEnum, ty_filter::TryEnum,
}; };
use syntax::{ use syntax::{
@ -56,6 +57,10 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
let postfix_snippet = build_postfix_snippet_builder(ctx, cap, &dot_receiver); let postfix_snippet = build_postfix_snippet_builder(ctx, cap, &dot_receiver);
if !ctx.config.snippets.is_empty() {
add_custom_postfix_completions(acc, ctx, &postfix_snippet, &receiver_text);
}
let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty.strip_references()); let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty.strip_references());
if let Some(try_enum) = &try_enum { if let Some(try_enum) = &try_enum {
match try_enum { match try_enum {
@ -218,13 +223,40 @@ fn build_postfix_snippet_builder<'a>(
} }
} }
fn add_custom_postfix_completions(
acc: &mut Completions,
ctx: &CompletionContext,
postfix_snippet: impl Fn(&str, &str, &str) -> Builder,
receiver_text: &str,
) -> Option<()> {
let import_scope =
ImportScope::find_insert_use_container_with_macros(&ctx.token.parent()?, &ctx.sema)?;
ctx.config.postfix_snippets().filter(|(_, snip)| snip.is_expr()).for_each(
|(trigger, snippet)| {
let imports = match snippet.imports(ctx, &import_scope) {
Some(imports) => imports,
None => return,
};
let body = snippet.postfix_snippet(&receiver_text);
let mut builder =
postfix_snippet(trigger, snippet.description.as_deref().unwrap_or_default(), &body);
builder.documentation(Documentation::new(format!("```rust\n{}\n```", body)));
for import in imports.into_iter() {
builder.add_import(import);
}
builder.add_to(acc);
},
);
None
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use crate::{ use crate::{
tests::{check_edit, filtered_completion_list}, tests::{check_edit, check_edit_with_config, filtered_completion_list, TEST_CONFIG},
CompletionKind, CompletionConfig, CompletionKind, Snippet,
}; };
fn check(ra_fixture: &str, expect: Expect) { fn check(ra_fixture: &str, expect: Expect) {
@ -442,6 +474,34 @@ fn main() {
) )
} }
#[test]
fn custom_postfix_completion() {
check_edit_with_config(
CompletionConfig {
snippets: vec![Snippet::new(
&[],
&["break".into()],
&["ControlFlow::Break(${receiver})".into()],
"",
&["core::ops::ControlFlow".into()],
crate::SnippetScope::Expr,
)
.unwrap()],
..TEST_CONFIG
},
"break",
r#"
//- minicore: try
fn main() { 42.$0 }
"#,
r#"
use core::ops::ControlFlow;
fn main() { ControlFlow::Break(42) }
"#,
);
}
#[test] #[test]
fn postfix_completion_for_format_like_strings() { fn postfix_completion_for_format_like_strings() {
check_edit( check_edit(

View file

@ -1,11 +1,12 @@
//! This file provides snippet completions, like `pd` => `eprintln!(...)`. //! This file provides snippet completions, like `pd` => `eprintln!(...)`.
use ide_db::helpers::SnippetCap; use hir::Documentation;
use ide_db::helpers::{insert_use::ImportScope, SnippetCap};
use syntax::T; use syntax::T;
use crate::{ use crate::{
context::PathCompletionContext, item::Builder, CompletionContext, CompletionItem, context::PathCompletionContext, item::Builder, CompletionContext, CompletionItem,
CompletionItemKind, CompletionKind, Completions, CompletionItemKind, CompletionKind, Completions, SnippetScope,
}; };
fn snippet(ctx: &CompletionContext, cap: SnippetCap, label: &str, snippet: &str) -> Builder { fn snippet(ctx: &CompletionContext, cap: SnippetCap, label: &str, snippet: &str) -> Builder {
@ -29,6 +30,10 @@ pub(crate) fn complete_expr_snippet(acc: &mut Completions, ctx: &CompletionConte
None => return, None => return,
}; };
if !ctx.config.snippets.is_empty() {
add_custom_completions(acc, ctx, cap, SnippetScope::Expr);
}
if can_be_stmt { if can_be_stmt {
snippet(ctx, cap, "pd", "eprintln!(\"$0 = {:?}\", $0);").add_to(acc); snippet(ctx, cap, "pd", "eprintln!(\"$0 = {:?}\", $0);").add_to(acc);
snippet(ctx, cap, "ppd", "eprintln!(\"$0 = {:#?}\", $0);").add_to(acc); snippet(ctx, cap, "ppd", "eprintln!(\"$0 = {:#?}\", $0);").add_to(acc);
@ -52,6 +57,10 @@ pub(crate) fn complete_item_snippet(acc: &mut Completions, ctx: &CompletionConte
None => return, None => return,
}; };
if !ctx.config.snippets.is_empty() {
add_custom_completions(acc, ctx, cap, SnippetScope::Item);
}
let mut item = snippet( let mut item = snippet(
ctx, ctx,
cap, cap,
@ -86,3 +95,66 @@ fn ${1:feature}() {
let item = snippet(ctx, cap, "macro_rules", "macro_rules! $1 {\n\t($2) => {\n\t\t$0\n\t};\n}"); let item = snippet(ctx, cap, "macro_rules", "macro_rules! $1 {\n\t($2) => {\n\t\t$0\n\t};\n}");
item.add_to(acc); item.add_to(acc);
} }
fn add_custom_completions(
acc: &mut Completions,
ctx: &CompletionContext,
cap: SnippetCap,
scope: SnippetScope,
) -> Option<()> {
let import_scope =
ImportScope::find_insert_use_container_with_macros(&ctx.token.parent()?, &ctx.sema)?;
ctx.config.prefix_snippets().filter(|(_, snip)| snip.scope == scope).for_each(
|(trigger, snip)| {
let imports = match snip.imports(ctx, &import_scope) {
Some(imports) => imports,
None => return,
};
let body = snip.snippet();
let mut builder = snippet(ctx, cap, &trigger, &body);
builder.documentation(Documentation::new(format!("```rust\n{}\n```", body)));
for import in imports.into_iter() {
builder.add_import(import);
}
builder.detail(snip.description.as_deref().unwrap_or_default());
builder.add_to(acc);
},
);
None
}
#[cfg(test)]
mod tests {
use crate::{
tests::{check_edit_with_config, TEST_CONFIG},
CompletionConfig, Snippet,
};
#[test]
fn custom_snippet_completion() {
check_edit_with_config(
CompletionConfig {
snippets: vec![Snippet::new(
&["break".into()],
&[],
&["ControlFlow::Break(())".into()],
"",
&["core::ops::ControlFlow".into()],
crate::SnippetScope::Expr,
)
.unwrap()],
..TEST_CONFIG
},
"break",
r#"
//- minicore: try
fn main() { $0 }
"#,
r#"
use core::ops::ControlFlow;
fn main() { ControlFlow::Break(()) }
"#,
);
}
}

View file

@ -6,6 +6,8 @@
use ide_db::helpers::{insert_use::InsertUseConfig, SnippetCap}; use ide_db::helpers::{insert_use::InsertUseConfig, SnippetCap};
use crate::snippet::Snippet;
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub struct CompletionConfig { pub struct CompletionConfig {
pub enable_postfix_completions: bool, pub enable_postfix_completions: bool,
@ -15,4 +17,18 @@ pub struct CompletionConfig {
pub add_call_argument_snippets: bool, pub add_call_argument_snippets: bool,
pub snippet_cap: Option<SnippetCap>, pub snippet_cap: Option<SnippetCap>,
pub insert_use: InsertUseConfig, pub insert_use: InsertUseConfig,
pub snippets: Vec<Snippet>,
}
impl CompletionConfig {
pub fn postfix_snippets(&self) -> impl Iterator<Item = (&str, &Snippet)> {
self.snippets.iter().flat_map(|snip| {
snip.postfix_triggers.iter().map(move |trigger| (trigger.as_str(), snip))
})
}
pub fn prefix_snippets(&self) -> impl Iterator<Item = (&str, &Snippet)> {
self.snippets.iter().flat_map(|snip| {
snip.prefix_triggers.iter().map(move |trigger| (trigger.as_str(), snip))
})
}
} }

View file

@ -868,7 +868,8 @@ mod tests {
fn check_expected_type_and_name(ra_fixture: &str, expect: Expect) { fn check_expected_type_and_name(ra_fixture: &str, expect: Expect) {
let (db, pos) = position(ra_fixture); let (db, pos) = position(ra_fixture);
let completion_context = CompletionContext::new(&db, pos, &TEST_CONFIG).unwrap(); let config = TEST_CONFIG;
let completion_context = CompletionContext::new(&db, pos, &config).unwrap();
let ty = completion_context let ty = completion_context
.expected_type .expected_type

View file

@ -11,6 +11,7 @@ use ide_db::{
}, },
SymbolKind, SymbolKind,
}; };
use smallvec::SmallVec;
use stdx::{format_to, impl_from, never}; use stdx::{format_to, impl_from, never};
use syntax::{algo, TextRange}; use syntax::{algo, TextRange};
use text_edit::TextEdit; use text_edit::TextEdit;
@ -76,7 +77,7 @@ pub struct CompletionItem {
ref_match: Option<Mutability>, ref_match: Option<Mutability>,
/// The import data to add to completion's edits. /// The import data to add to completion's edits.
import_to_add: Option<ImportEdit>, import_to_add: SmallVec<[ImportEdit; 1]>,
} }
// We use custom debug for CompletionItem to make snapshot tests more readable. // We use custom debug for CompletionItem to make snapshot tests more readable.
@ -305,7 +306,7 @@ impl CompletionItem {
trigger_call_info: None, trigger_call_info: None,
relevance: CompletionRelevance::default(), relevance: CompletionRelevance::default(),
ref_match: None, ref_match: None,
import_to_add: None, imports_to_add: Default::default(),
} }
} }
@ -364,8 +365,8 @@ impl CompletionItem {
self.ref_match.map(|mutability| (mutability, relevance)) self.ref_match.map(|mutability| (mutability, relevance))
} }
pub fn import_to_add(&self) -> Option<&ImportEdit> { pub fn imports_to_add(&self) -> &[ImportEdit] {
self.import_to_add.as_ref() &self.import_to_add
} }
} }
@ -398,7 +399,7 @@ impl ImportEdit {
pub(crate) struct Builder { pub(crate) struct Builder {
source_range: TextRange, source_range: TextRange,
completion_kind: CompletionKind, completion_kind: CompletionKind,
import_to_add: Option<ImportEdit>, imports_to_add: SmallVec<[ImportEdit; 1]>,
trait_name: Option<String>, trait_name: Option<String>,
label: String, label: String,
insert_text: Option<String>, insert_text: Option<String>,
@ -422,14 +423,13 @@ impl Builder {
let mut lookup = self.lookup; let mut lookup = self.lookup;
let mut insert_text = self.insert_text; let mut insert_text = self.insert_text;
if let Some(original_path) = self if let [import_edit] = &*self.imports_to_add {
.import_to_add // snippets can have multiple imports, but normal completions only have up to one
.as_ref() if let Some(original_path) = import_edit.import.original_path.as_ref() {
.and_then(|import_edit| import_edit.import.original_path.as_ref()) lookup = lookup.or_else(|| Some(label.clone()));
{ insert_text = insert_text.or_else(|| Some(label.clone()));
lookup = lookup.or_else(|| Some(label.clone())); format_to!(label, " (use {})", original_path)
insert_text = insert_text.or_else(|| Some(label.clone())); }
format_to!(label, " (use {})", original_path)
} else if let Some(trait_name) = self.trait_name { } else if let Some(trait_name) = self.trait_name {
insert_text = insert_text.or_else(|| Some(label.clone())); insert_text = insert_text.or_else(|| Some(label.clone()));
format_to!(label, " (as {})", trait_name) format_to!(label, " (as {})", trait_name)
@ -456,7 +456,7 @@ impl Builder {
trigger_call_info: self.trigger_call_info.unwrap_or(false), trigger_call_info: self.trigger_call_info.unwrap_or(false),
relevance: self.relevance, relevance: self.relevance,
ref_match: self.ref_match, ref_match: self.ref_match,
import_to_add: self.import_to_add, import_to_add: self.imports_to_add,
} }
} }
pub(crate) fn lookup_by(&mut self, lookup: impl Into<String>) -> &mut Builder { pub(crate) fn lookup_by(&mut self, lookup: impl Into<String>) -> &mut Builder {
@ -527,8 +527,8 @@ impl Builder {
self.trigger_call_info = Some(true); self.trigger_call_info = Some(true);
self self
} }
pub(crate) fn add_import(&mut self, import_to_add: Option<ImportEdit>) -> &mut Builder { pub(crate) fn add_import(&mut self, import_to_add: ImportEdit) -> &mut Builder {
self.import_to_add = import_to_add; self.imports_to_add.push(import_to_add);
self self
} }
pub(crate) fn ref_match(&mut self, mutability: Mutability) -> &mut Builder { pub(crate) fn ref_match(&mut self, mutability: Mutability) -> &mut Builder {

View file

@ -9,16 +9,19 @@ mod render;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
mod snippet;
use completions::flyimport::position_for_import; use completions::flyimport::position_for_import;
use ide_db::{ use ide_db::{
base_db::FilePosition, base_db::FilePosition,
helpers::{ helpers::{
import_assets::{LocatedImport, NameToImport}, import_assets::NameToImport,
insert_use::ImportScope, insert_use::{self, ImportScope},
mod_path_to_ast,
}, },
items_locator, RootDatabase, items_locator, RootDatabase,
}; };
use syntax::algo;
use text_edit::TextEdit; use text_edit::TextEdit;
use crate::{completions::Completions, context::CompletionContext, item::CompletionKind}; use crate::{completions::Completions, context::CompletionContext, item::CompletionKind};
@ -26,6 +29,7 @@ use crate::{completions::Completions, context::CompletionContext, item::Completi
pub use crate::{ pub use crate::{
config::CompletionConfig, config::CompletionConfig,
item::{CompletionItem, CompletionItemKind, CompletionRelevance, ImportEdit}, item::{CompletionItem, CompletionItemKind, CompletionRelevance, ImportEdit},
snippet::{Snippet, SnippetScope},
}; };
//FIXME: split the following feature into fine-grained features. //FIXME: split the following feature into fine-grained features.
@ -173,31 +177,37 @@ pub fn resolve_completion_edits(
db: &RootDatabase, db: &RootDatabase,
config: &CompletionConfig, config: &CompletionConfig,
position: FilePosition, position: FilePosition,
full_import_path: &str, imports: impl IntoIterator<Item = (String, String)>,
imported_name: String,
) -> Option<Vec<TextEdit>> { ) -> Option<Vec<TextEdit>> {
let _p = profile::span("resolve_completion_edits");
let ctx = CompletionContext::new(db, position, config)?; let ctx = CompletionContext::new(db, position, config)?;
let position_for_import = position_for_import(&ctx, None)?; let position_for_import = position_for_import(&ctx, None)?;
let scope = ImportScope::find_insert_use_container_with_macros(position_for_import, &ctx.sema)?; let scope = ImportScope::find_insert_use_container_with_macros(position_for_import, &ctx.sema)?;
let current_module = ctx.sema.scope(position_for_import).module()?; let current_module = ctx.sema.scope(position_for_import).module()?;
let current_crate = current_module.krate(); let current_crate = current_module.krate();
let new_ast = scope.clone_for_update();
let mut import_insert = TextEdit::builder();
let (import_path, item_to_import) = items_locator::items_with_name( // FIXME: lift out and make some tests here, this is ImportEdit::to_text_edit but changed to work with multiple edits
&ctx.sema, imports.into_iter().for_each(|(full_import_path, imported_name)| {
current_crate, let items_with_name = items_locator::items_with_name(
NameToImport::Exact(imported_name), &ctx.sema,
items_locator::AssocItemSearch::Include, current_crate,
Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()), NameToImport::Exact(imported_name),
) items_locator::AssocItemSearch::Include,
.filter_map(|candidate| { Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()),
current_module );
.find_use_path_prefixed(db, candidate, config.insert_use.prefix_kind) let import = items_with_name
.zip(Some(candidate)) .filter_map(|candidate| {
}) current_module.find_use_path_prefixed(db, candidate, config.insert_use.prefix_kind)
.find(|(mod_path, _)| mod_path.to_string() == full_import_path)?; })
let import = .find(|mod_path| mod_path.to_string() == full_import_path);
LocatedImport::new(import_path.clone(), item_to_import, item_to_import, Some(import_path)); if let Some(import_path) = import {
insert_use::insert_use(&new_ast, mod_path_to_ast(&import_path), &config.insert_use);
}
});
ImportEdit { import, scope }.to_text_edit(config.insert_use).map(|edit| vec![edit]) algo::diff(scope.as_syntax_node(), new_ast.as_syntax_node()).into_text_edit(&mut import_insert);
Some(vec![import_insert.finish()])
} }

View file

@ -212,7 +212,10 @@ fn render_resolution_(
ctx.source_range(), ctx.source_range(),
local_name.to_string(), local_name.to_string(),
); );
item.kind(CompletionItemKind::UnresolvedReference).add_import(import_to_add); item.kind(CompletionItemKind::UnresolvedReference);
if let Some(import_to_add) = import_to_add {
item.add_import(import_to_add);
}
return Some(item.build()); return Some(item.build());
} }
}; };
@ -258,9 +261,12 @@ fn render_resolution_(
} }
} }
item.kind(kind) item.kind(kind)
.add_import(import_to_add)
.set_documentation(scope_def_docs(ctx.db(), resolution)) .set_documentation(scope_def_docs(ctx.db(), resolution))
.set_deprecated(scope_def_is_deprecated(&ctx, resolution)); .set_deprecated(scope_def_is_deprecated(&ctx, resolution));
if let Some(import_to_add) = import_to_add {
item.add_import(import_to_add);
}
Some(item.build()) Some(item.build())
} }

View file

@ -68,9 +68,12 @@ impl<'a> EnumRender<'a> {
item.kind(SymbolKind::Variant) item.kind(SymbolKind::Variant)
.set_documentation(self.variant.docs(self.ctx.db())) .set_documentation(self.variant.docs(self.ctx.db()))
.set_deprecated(self.ctx.is_deprecated(self.variant)) .set_deprecated(self.ctx.is_deprecated(self.variant))
.add_import(import_to_add)
.detail(self.detail()); .detail(self.detail());
if let Some(import_to_add) = import_to_add {
item.add_import(import_to_add);
}
if self.variant_kind == hir::StructKind::Tuple { if self.variant_kind == hir::StructKind::Tuple {
cov_mark::hit!(inserts_parens_for_tuple_enums); cov_mark::hit!(inserts_parens_for_tuple_enums);
let params = Params::Anonymous(self.variant.fields(self.ctx.db()).len()); let params = Params::Anonymous(self.variant.fields(self.ctx.db()).len());

View file

@ -97,7 +97,10 @@ impl<'a> FunctionRender<'a> {
} }
} }
item.add_import(import_to_add).lookup_by(self.name); if let Some(import_to_add) = import_to_add {
item.add_import(import_to_add);
}
item.lookup_by(self.name);
let ret_type = self.func.ret_type(self.ctx.db()); let ret_type = self.func.ret_type(self.ctx.db());
item.set_relevance(CompletionRelevance { item.set_relevance(CompletionRelevance {

View file

@ -51,9 +51,12 @@ impl<'a> MacroRender<'a> {
item.kind(SymbolKind::Macro) item.kind(SymbolKind::Macro)
.set_documentation(self.docs.clone()) .set_documentation(self.docs.clone())
.set_deprecated(self.ctx.is_deprecated(self.macro_)) .set_deprecated(self.ctx.is_deprecated(self.macro_))
.add_import(import_to_add)
.set_detail(self.detail()); .set_detail(self.detail());
if let Some(import_to_add) = import_to_add {
item.add_import(import_to_add);
}
let needs_bang = !(self.ctx.completion.in_use_tree() let needs_bang = !(self.ctx.completion.in_use_tree()
|| matches!(self.ctx.completion.path_call_kind(), Some(CallKind::Mac))); || matches!(self.ctx.completion.path_call_kind(), Some(CallKind::Mac)));
let has_parens = self.ctx.completion.path_call_kind().is_some(); let has_parens = self.ctx.completion.path_call_kind().is_some();

View file

@ -0,0 +1,176 @@
//! User (postfix)-snippet definitions.
//!
//! Actual logic is implemented in [`crate::completions::postfix`] and [`crate::completions::snippet`].
// Feature: User Snippet Completions
//
// rust-analyzer allows the user to define custom (postfix)-snippets that may depend on items to be accessible for the current scope to be applicable.
//
// A custom snippet can be defined by adding it to the `rust-analyzer.completion.snippets` object respectively.
//
// [source,json]
// ----
// {
// "rust-analyzer.completion.snippets": {
// "thread spawn": {
// "prefix": ["spawn", "tspawn"],
// "body": [
// "thread::spawn(move || {",
// "\t$0",
// ")};",
// ],
// "description": "Insert a thread::spawn call",
// "requires": "std::thread",
// "scope": "expr",
// }
// }
// }
// ----
//
// In the example above:
//
// * `"thread spawn"` is the name of the snippet.
//
// * `prefix` defines one or more trigger words that will trigger the snippets completion.
// Using `postfix` will instead create a postfix snippet.
//
// * `body` is one or more lines of content joined via newlines for the final output.
//
// * `description` is an optional description of the snippet, if unset the snippet name will be used.
//
// * `requires` is an optional list of item paths that have to be resolvable in the current crate where the completion is rendered.
// On failure of resolution the snippet won't be applicable, otherwise the snippet will insert an import for the items on insertion if
// the items aren't yet in scope.
//
// * `scope` is an optional filter for when the snippet should be applicable. Possible values are:
// ** for Snippet-Scopes: `expr`, `item` (default: `item`)
// ** for Postfix-Snippet-Scopes: `expr`, `type` (default: `expr`)
//
// The `body` field also has access to placeholders as visible in the example as `$0`.
// These placeholders take the form of `$number` or `${number:placeholder_text}` which can be traversed as tabstop in ascending order starting from 1,
// with `$0` being a special case that always comes last.
//
// There is also a special placeholder, `${receiver}`, which will be replaced by the receiver expression for postfix snippets, or nothing in case of normal snippets.
// It does not act as a tabstop.
use ide_db::helpers::{import_assets::LocatedImport, insert_use::ImportScope};
use itertools::Itertools;
use syntax::ast;
use crate::{context::CompletionContext, ImportEdit};
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum SnippetScope {
Item,
Expr,
Type,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Snippet {
pub postfix_triggers: Box<[String]>,
pub prefix_triggers: Box<[String]>,
pub scope: SnippetScope,
snippet: String,
pub description: Option<String>,
pub requires: Box<[String]>,
}
impl Snippet {
pub fn new(
prefix_triggers: &[String],
postfix_triggers: &[String],
snippet: &[String],
description: &str,
requires: &[String],
scope: SnippetScope,
) -> Option<Self> {
let (snippet, description) = validate_snippet(snippet, description, requires)?;
Some(Snippet {
// Box::into doesn't work as that has a Copy bound 😒
postfix_triggers: postfix_triggers.iter().cloned().collect(),
prefix_triggers: prefix_triggers.iter().cloned().collect(),
scope,
snippet,
description,
requires: requires.iter().cloned().collect(),
})
}
/// Returns None if the required items do not resolve.
pub(crate) fn imports(
&self,
ctx: &CompletionContext,
import_scope: &ImportScope,
) -> Option<Vec<ImportEdit>> {
import_edits(ctx, import_scope, &self.requires)
}
pub fn snippet(&self) -> String {
self.snippet.replace("${receiver}", "")
}
pub fn postfix_snippet(&self, receiver: &str) -> String {
self.snippet.replace("${receiver}", receiver)
}
pub fn is_item(&self) -> bool {
self.scope == SnippetScope::Item
}
pub fn is_expr(&self) -> bool {
self.scope == SnippetScope::Expr
}
}
fn import_edits(
ctx: &CompletionContext,
import_scope: &ImportScope,
requires: &[String],
) -> Option<Vec<ImportEdit>> {
let resolve = |import| {
let path = ast::Path::parse(import).ok()?;
let item = match ctx.scope.speculative_resolve(&path)? {
hir::PathResolution::Macro(mac) => mac.into(),
hir::PathResolution::Def(def) => def.into(),
_ => return None,
};
let path = ctx.scope.module()?.find_use_path_prefixed(
ctx.db,
item,
ctx.config.insert_use.prefix_kind,
)?;
Some((path.len() > 1).then(|| ImportEdit {
import: LocatedImport::new(path.clone(), item, item, None),
scope: import_scope.clone(),
}))
};
let mut res = Vec::with_capacity(requires.len());
for import in requires {
match resolve(import) {
Some(first) => res.extend(first),
None => return None,
}
}
Some(res)
}
fn validate_snippet(
snippet: &[String],
description: &str,
requires: &[String],
) -> Option<(String, Option<String>)> {
// validate that these are indeed simple paths
// we can't save the paths unfortunately due to them not being Send+Sync
if requires.iter().any(|path| match ast::Path::parse(path) {
Ok(path) => path.segments().any(|seg| {
!matches!(seg.kind(), Some(ast::PathSegmentKind::Name(_)))
|| seg.generic_arg_list().is_some()
}),
Err(_) => true,
}) {
return None;
}
let snippet = snippet.iter().join("\n");
let description = if description.is_empty() { None } else { Some(description.to_owned()) };
Some((snippet, description))
}

View file

@ -74,6 +74,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
group: true, group: true,
skip_glob_imports: true, skip_glob_imports: true,
}, },
snippets: Vec::new(),
}; };
pub(crate) fn completion_list(ra_fixture: &str) -> String { pub(crate) fn completion_list(ra_fixture: &str) -> String {
@ -181,13 +182,15 @@ pub(crate) fn check_edit_with_config(
let mut actual = db.file_text(position.file_id).to_string(); let mut actual = db.file_text(position.file_id).to_string();
let mut combined_edit = completion.text_edit().to_owned(); let mut combined_edit = completion.text_edit().to_owned();
if let Some(import_text_edit) = completion
completion.import_to_add().and_then(|edit| edit.to_text_edit(config.insert_use)) .imports_to_add()
{ .iter()
combined_edit.union(import_text_edit).expect( .filter_map(|edit| edit.to_text_edit(config.insert_use))
"Failed to apply completion resolve changes: change ranges overlap, but should not", .for_each(|text_edit| {
) combined_edit.union(text_edit).expect(
} "Failed to apply completion resolve changes: change ranges overlap, but should not",
)
});
combined_edit.apply(&mut actual); combined_edit.apply(&mut actual);
assert_eq_text!(&ra_fixture_after, &actual) assert_eq_text!(&ra_fixture_after, &actual)

View file

@ -12,7 +12,7 @@ use std::{ffi::OsString, iter, path::PathBuf};
use flycheck::FlycheckConfig; use flycheck::FlycheckConfig;
use ide::{ use ide::{
AssistConfig, CompletionConfig, DiagnosticsConfig, HighlightRelatedConfig, HoverConfig, AssistConfig, CompletionConfig, DiagnosticsConfig, HighlightRelatedConfig, HoverConfig,
HoverDocFormat, InlayHintsConfig, JoinLinesConfig, HoverDocFormat, InlayHintsConfig, JoinLinesConfig, Snippet, SnippetScope,
}; };
use ide_db::helpers::{ use ide_db::helpers::{
insert_use::{ImportGranularity, InsertUseConfig, PrefixKind}, insert_use::{ImportGranularity, InsertUseConfig, PrefixKind},
@ -112,6 +112,8 @@ config_data! {
completion_addCallArgumentSnippets: bool = "true", completion_addCallArgumentSnippets: bool = "true",
/// Whether to add parenthesis when completing functions. /// Whether to add parenthesis when completing functions.
completion_addCallParenthesis: bool = "true", completion_addCallParenthesis: bool = "true",
/// Custom completion snippets.
completion_snippets: FxHashMap<String, SnippetDef> = "{}",
/// Whether to show postfix snippets like `dbg`, `if`, `not`, etc. /// Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
completion_postfix_enable: bool = "true", completion_postfix_enable: bool = "true",
/// Toggles the additional completions that automatically add imports when completed. /// Toggles the additional completions that automatically add imports when completed.
@ -277,9 +279,9 @@ config_data! {
rustfmt_enableRangeFormatting: bool = "false", rustfmt_enableRangeFormatting: bool = "false",
/// Workspace symbol search scope. /// Workspace symbol search scope.
workspace_symbol_search_scope: WorskpaceSymbolSearchScopeDef = "\"workspace\"", workspace_symbol_search_scope: WorkspaceSymbolSearchScopeDef = "\"workspace\"",
/// Workspace symbol search kind. /// Workspace symbol search kind.
workspace_symbol_search_kind: WorskpaceSymbolSearchKindDef = "\"only_types\"", workspace_symbol_search_kind: WorkspaceSymbolSearchKindDef = "\"only_types\"",
} }
} }
@ -296,6 +298,7 @@ pub struct Config {
detached_files: Vec<AbsPathBuf>, detached_files: Vec<AbsPathBuf>,
pub discovered_projects: Option<Vec<ProjectManifest>>, pub discovered_projects: Option<Vec<ProjectManifest>>,
pub root_path: AbsPathBuf, pub root_path: AbsPathBuf,
snippets: Vec<Snippet>,
} }
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
@ -431,6 +434,7 @@ impl Config {
detached_files: Vec::new(), detached_files: Vec::new(),
discovered_projects: None, discovered_projects: None,
root_path, root_path,
snippets: Default::default(),
} }
} }
pub fn update(&mut self, mut json: serde_json::Value) { pub fn update(&mut self, mut json: serde_json::Value) {
@ -443,6 +447,28 @@ impl Config {
.map(AbsPathBuf::assert) .map(AbsPathBuf::assert)
.collect(); .collect();
self.data = ConfigData::from_json(json); self.data = ConfigData::from_json(json);
self.snippets.clear();
for (name, def) in self.data.completion_snippets.iter() {
if def.prefix.is_empty() && def.postfix.is_empty() {
continue;
}
let scope = match def.scope {
SnippetScopeDef::Expr => SnippetScope::Expr,
SnippetScopeDef::Type => SnippetScope::Type,
SnippetScopeDef::Item => SnippetScope::Item,
};
match Snippet::new(
&def.prefix,
&def.postfix,
&def.body,
def.description.as_ref().unwrap_or(name),
&def.requires,
scope,
) {
Some(snippet) => self.snippets.push(snippet),
None => tracing::info!("Invalid snippet {}", name),
}
}
} }
pub fn json_schema() -> serde_json::Value { pub fn json_schema() -> serde_json::Value {
@ -778,6 +804,7 @@ impl Config {
.snippet_support?, .snippet_support?,
false false
)), )),
snippets: self.snippets.clone(),
} }
} }
pub fn assist(&self) -> AssistConfig { pub fn assist(&self) -> AssistConfig {
@ -848,14 +875,14 @@ impl Config {
pub fn workspace_symbol(&self) -> WorkspaceSymbolConfig { pub fn workspace_symbol(&self) -> WorkspaceSymbolConfig {
WorkspaceSymbolConfig { WorkspaceSymbolConfig {
search_scope: match self.data.workspace_symbol_search_scope { search_scope: match self.data.workspace_symbol_search_scope {
WorskpaceSymbolSearchScopeDef::Workspace => WorkspaceSymbolSearchScope::Workspace, WorkspaceSymbolSearchScopeDef::Workspace => WorkspaceSymbolSearchScope::Workspace,
WorskpaceSymbolSearchScopeDef::WorkspaceAndDependencies => { WorkspaceSymbolSearchScopeDef::WorkspaceAndDependencies => {
WorkspaceSymbolSearchScope::WorkspaceAndDependencies WorkspaceSymbolSearchScope::WorkspaceAndDependencies
} }
}, },
search_kind: match self.data.workspace_symbol_search_kind { search_kind: match self.data.workspace_symbol_search_kind {
WorskpaceSymbolSearchKindDef::OnlyTypes => WorkspaceSymbolSearchKind::OnlyTypes, WorkspaceSymbolSearchKindDef::OnlyTypes => WorkspaceSymbolSearchKind::OnlyTypes,
WorskpaceSymbolSearchKindDef::AllSymbols => WorkspaceSymbolSearchKind::AllSymbols, WorkspaceSymbolSearchKindDef::AllSymbols => WorkspaceSymbolSearchKind::AllSymbols,
}, },
} }
} }
@ -908,6 +935,66 @@ impl Config {
} }
} }
#[derive(Deserialize, Debug, Clone, Copy)]
#[serde(rename_all = "snake_case")]
enum SnippetScopeDef {
Expr,
Item,
Type,
}
impl Default for SnippetScopeDef {
fn default() -> Self {
SnippetScopeDef::Expr
}
}
#[derive(Deserialize, Debug, Clone, Default)]
#[serde(default)]
struct SnippetDef {
#[serde(deserialize_with = "single_or_array")]
prefix: Vec<String>,
#[serde(deserialize_with = "single_or_array")]
postfix: Vec<String>,
description: Option<String>,
#[serde(deserialize_with = "single_or_array")]
body: Vec<String>,
#[serde(deserialize_with = "single_or_array")]
requires: Vec<String>,
scope: SnippetScopeDef,
}
fn single_or_array<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
where
D: serde::Deserializer<'de>,
{
struct SingleOrVec;
impl<'de> serde::de::Visitor<'de> for SingleOrVec {
type Value = Vec<String>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("string or array of strings")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(vec![value.to_owned()])
}
fn visit_seq<A>(self, seq: A) -> Result<Self::Value, A::Error>
where
A: serde::de::SeqAccess<'de>,
{
Deserialize::deserialize(serde::de::value::SeqAccessDeserializer::new(seq))
}
}
deserializer.deserialize_any(SingleOrVec)
}
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug, Clone)]
#[serde(untagged)] #[serde(untagged)]
enum ManifestOrProjectJson { enum ManifestOrProjectJson {
@ -939,14 +1026,14 @@ enum ImportPrefixDef {
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
enum WorskpaceSymbolSearchScopeDef { enum WorkspaceSymbolSearchScopeDef {
Workspace, Workspace,
WorkspaceAndDependencies, WorkspaceAndDependencies,
} }
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
enum WorskpaceSymbolSearchKindDef { enum WorkspaceSymbolSearchKindDef {
OnlyTypes, OnlyTypes,
AllSymbols, AllSymbols,
} }
@ -1077,6 +1164,9 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
"items": { "type": "string" }, "items": { "type": "string" },
"uniqueItems": true, "uniqueItems": true,
}, },
"FxHashMap<String, SnippetDef>" => set! {
"type": "object",
},
"FxHashMap<String, String>" => set! { "FxHashMap<String, String>" => set! {
"type": "object", "type": "object",
}, },
@ -1133,7 +1223,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
"type": "array", "type": "array",
"items": { "type": ["string", "object"] }, "items": { "type": ["string", "object"] },
}, },
"WorskpaceSymbolSearchScopeDef" => set! { "WorkspaceSymbolSearchScopeDef" => set! {
"type": "string", "type": "string",
"enum": ["workspace", "workspace_and_dependencies"], "enum": ["workspace", "workspace_and_dependencies"],
"enumDescriptions": [ "enumDescriptions": [
@ -1141,7 +1231,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
"Search in current workspace and dependencies" "Search in current workspace and dependencies"
], ],
}, },
"WorskpaceSymbolSearchKindDef" => set! { "WorkspaceSymbolSearchKindDef" => set! {
"type": "string", "type": "string",
"enum": ["only_types", "all_symbols"], "enum": ["only_types", "all_symbols"],
"enumDescriptions": [ "enumDescriptions": [

View file

@ -785,8 +785,10 @@ pub(crate) fn handle_completion_resolve(
.resolve_completion_edits( .resolve_completion_edits(
&snap.config.completion(), &snap.config.completion(),
FilePosition { file_id, offset }, FilePosition { file_id, offset },
&resolve_data.full_import_path, resolve_data
resolve_data.imported_name, .imports
.into_iter()
.map(|import| (import.full_import_path, import.imported_name)),
)? )?
.into_iter() .into_iter()
.flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel))) .flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel)))

View file

@ -144,6 +144,7 @@ fn integrated_completion_benchmark() {
group: true, group: true,
skip_glob_imports: true, skip_glob_imports: true,
}, },
snippets: Vec::new(),
}; };
let position = let position =
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
@ -180,6 +181,7 @@ fn integrated_completion_benchmark() {
group: true, group: true,
skip_glob_imports: true, skip_glob_imports: true,
}, },
snippets: Vec::new(),
}; };
let position = let position =
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };

View file

@ -520,6 +520,11 @@ pub enum WorkspaceSymbolSearchKind {
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct CompletionResolveData { pub struct CompletionResolveData {
pub position: lsp_types::TextDocumentPositionParams, pub position: lsp_types::TextDocumentPositionParams,
pub imports: Vec<CompletionImport>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct CompletionImport {
pub full_import_path: String, pub full_import_path: String,
pub imported_name: String, pub imported_name: String,
} }

View file

@ -270,14 +270,20 @@ fn completion_item(
lsp_item.insert_text_format = Some(lsp_types::InsertTextFormat::Snippet); lsp_item.insert_text_format = Some(lsp_types::InsertTextFormat::Snippet);
} }
if config.completion().enable_imports_on_the_fly { if config.completion().enable_imports_on_the_fly {
if let Some(import_edit) = item.import_to_add() { if let imports @ [_, ..] = item.imports_to_add() {
let import_path = &import_edit.import.import_path; let imports: Vec<_> = imports
if let Some(import_name) = import_path.segments().last() { .iter()
let data = lsp_ext::CompletionResolveData { .filter_map(|import_edit| {
position: tdpp.clone(), let import_path = &import_edit.import.import_path;
full_import_path: import_path.to_string(), let import_name = import_path.segments().last()?;
imported_name: import_name.to_string(), Some(lsp_ext::CompletionImport {
}; full_import_path: import_path.to_string(),
imported_name: import_name.to_string(),
})
})
.collect();
if !imports.is_empty() {
let data = lsp_ext::CompletionResolveData { position: tdpp.clone(), imports };
lsp_item.data = Some(to_value(data).unwrap()); lsp_item.data = Some(to_value(data).unwrap());
} }
} }

View file

@ -1,5 +1,5 @@
<!--- <!---
lsp_ext.rs hash: ad52054176909945 lsp_ext.rs hash: c6568e4035333f3a
If you need to change the above hash to make the test pass, please check if you If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue: need to adjust this doc as well and ping this issue:

View file

@ -136,6 +136,11 @@ Only applies when `#rust-analyzer.completion.addCallParenthesis#` is set.
-- --
Whether to add parenthesis when completing functions. Whether to add parenthesis when completing functions.
-- --
[[rust-analyzer.completion.snippets]]rust-analyzer.completion.snippets (default: `{}`)::
+
--
Custom completion snippets.
--
[[rust-analyzer.completion.postfix.enable]]rust-analyzer.completion.postfix.enable (default: `true`):: [[rust-analyzer.completion.postfix.enable]]rust-analyzer.completion.postfix.enable (default: `true`)::
+ +
-- --

View file

@ -585,6 +585,11 @@
"default": true, "default": true,
"type": "boolean" "type": "boolean"
}, },
"rust-analyzer.completion.snippets": {
"markdownDescription": "Custom completion snippets.",
"default": {},
"type": "object"
},
"rust-analyzer.completion.postfix.enable": { "rust-analyzer.completion.postfix.enable": {
"markdownDescription": "Whether to show postfix snippets like `dbg`, `if`, `not`, etc.", "markdownDescription": "Whether to show postfix snippets like `dbg`, `if`, `not`, etc.",
"default": true, "default": true,