Auto merge of #12580 - Veykril:completion, r=Veykril

internal: Move more things out of `CompletionContext::function_def` into more specific parts
This commit is contained in:
bors 2022-06-18 09:21:13 +00:00
commit 817f46bc6c
9 changed files with 226 additions and 112 deletions

View file

@ -24,6 +24,7 @@ use std::iter;
use hir::{db::HirDatabase, known, ScopeDef};
use ide_db::SymbolKind;
use syntax::ast;
use crate::{
context::Visible,
@ -110,12 +111,18 @@ impl Completions {
["self", "super", "crate"].into_iter().for_each(|kw| self.add_keyword(ctx, kw));
}
pub(crate) fn add_keyword_snippet(&mut self, ctx: &CompletionContext, kw: &str, snippet: &str) {
pub(crate) fn add_keyword_snippet_expr(
&mut self,
ctx: &CompletionContext,
kw: &str,
snippet: &str,
incomplete_let: bool,
) {
let mut item = CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), kw);
match ctx.config.snippet_cap {
Some(cap) => {
if snippet.ends_with('}') && ctx.incomplete_let {
if snippet.ends_with('}') && incomplete_let {
// complete block expression snippets with a trailing semicolon, if inside an incomplete let
cov_mark::hit!(let_semi);
item.insert_snippet(cap, format!("{};", snippet));
@ -130,6 +137,16 @@ impl Completions {
item.add_to(self);
}
pub(crate) fn add_keyword_snippet(&mut self, ctx: &CompletionContext, kw: &str, snippet: &str) {
let mut item = CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), kw);
match ctx.config.snippet_cap {
Some(cap) => item.insert_snippet(cap, snippet),
None => item.insert_text(if snippet.contains('$') { kw } else { snippet }),
};
item.add_to(self);
}
pub(crate) fn add_crate_roots(&mut self, ctx: &CompletionContext) {
ctx.process_all_names(&mut |name, res| match res {
ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) if m.is_crate_root(ctx.db) => {
@ -393,11 +410,12 @@ fn enum_variants_with_paths(
acc: &mut Completions,
ctx: &CompletionContext,
enum_: hir::Enum,
impl_: &Option<ast::Impl>,
cb: impl Fn(&mut Completions, &CompletionContext, hir::Variant, hir::ModPath),
) {
let variants = enum_.variants(ctx.db);
if let Some(impl_) = ctx.impl_def.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) {
if let Some(impl_) = impl_.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) {
if impl_.self_ty(ctx.db).as_adt() == Some(hir::Adt::Enum(enum_)) {
for &variant in &variants {
let self_path = hir::ModPath::from_segments(

View file

@ -46,27 +46,26 @@ pub(crate) fn complete_undotted_self(
if !ctx.config.enable_self_on_the_fly {
return;
}
match path_ctx {
PathCompletionCtx { qualified: Qualified::No, kind: PathKind::Expr { .. }, .. }
if path_ctx.is_trivial_path() && ctx.qualifier_ctx.none() => {}
let self_param = match path_ctx {
PathCompletionCtx {
qualified: Qualified::No,
kind: PathKind::Expr { self_param: Some(self_param), .. },
..
} if path_ctx.is_trivial_path() && ctx.qualifier_ctx.none() => self_param,
_ => return,
}
};
if let Some(func) = ctx.function_def.as_ref().and_then(|fn_| ctx.sema.to_def(fn_)) {
if let Some(self_) = func.self_param(ctx.db) {
let ty = self_.ty(ctx.db);
complete_fields(
acc,
ctx,
&ty,
|acc, field, ty| acc.add_field(ctx, Some(hir::known::SELF_PARAM), field, &ty),
|acc, field, ty| acc.add_tuple_field(ctx, Some(hir::known::SELF_PARAM), field, &ty),
);
complete_methods(ctx, &ty, |func| {
acc.add_method(ctx, func, Some(hir::known::SELF_PARAM), None)
});
}
}
let ty = self_param.ty(ctx.db);
complete_fields(
acc,
ctx,
&ty,
|acc, field, ty| acc.add_field(ctx, Some(hir::known::SELF_PARAM), field, &ty),
|acc, field, ty| acc.add_tuple_field(ctx, Some(hir::known::SELF_PARAM), field, &ty),
);
complete_methods(ctx, &ty, |func| {
acc.add_method(ctx, func, Some(hir::known::SELF_PARAM), None)
});
}
fn complete_fields(

View file

@ -14,7 +14,9 @@ pub(crate) fn complete_expr_path(
path_ctx: &PathCompletionCtx,
) {
let _p = profile::span("complete_expr_path");
if !ctx.qualifier_ctx.none() {
return;
}
let (
qualified,
in_block_expr,
@ -23,6 +25,9 @@ pub(crate) fn complete_expr_path(
after_if_expr,
wants_mut_token,
in_condition,
ty,
incomplete_let,
impl_,
) = match path_ctx {
&PathCompletionCtx {
kind:
@ -31,12 +36,16 @@ pub(crate) fn complete_expr_path(
in_loop_body,
after_if_expr,
in_condition,
incomplete_let,
ref ref_expr_parent,
ref is_func_update,
ref innermost_ret_ty,
ref impl_,
..
},
ref qualified,
..
} if ctx.qualifier_ctx.none() => (
} => (
qualified,
in_block_expr,
in_loop_body,
@ -44,6 +53,9 @@ pub(crate) fn complete_expr_path(
after_if_expr,
ref_expr_parent.as_ref().map(|it| it.mut_token().is_none()).unwrap_or(false),
in_condition,
innermost_ret_ty,
incomplete_let,
impl_,
),
_ => return,
};
@ -172,8 +184,7 @@ pub(crate) fn complete_expr_path(
if let Some(adt) =
ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt())
{
let self_ty =
(|| ctx.sema.to_def(ctx.impl_def.as_ref()?)?.self_ty(ctx.db).as_adt())();
let self_ty = (|| ctx.sema.to_def(impl_.as_ref()?)?.self_ty(ctx.db).as_adt())();
let complete_self = self_ty == Some(adt);
match adt {
@ -201,9 +212,15 @@ pub(crate) fn complete_expr_path(
}
}
hir::Adt::Enum(e) => {
super::enum_variants_with_paths(acc, ctx, e, |acc, ctx, variant, path| {
acc.add_qualified_enum_variant(ctx, variant, path)
});
super::enum_variants_with_paths(
acc,
ctx,
e,
impl_,
|acc, ctx, variant, path| {
acc.add_qualified_enum_variant(ctx, variant, path)
},
);
}
}
}
@ -214,7 +231,8 @@ pub(crate) fn complete_expr_path(
});
if !is_func_update {
let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet);
let mut add_keyword =
|kw, snippet| acc.add_keyword_snippet_expr(ctx, kw, snippet, incomplete_let);
if !in_block_expr {
add_keyword("unsafe", "unsafe {\n $0\n}");
@ -252,10 +270,10 @@ pub(crate) fn complete_expr_path(
}
}
if let Some(fn_def) = &ctx.function_def {
if let Some(ty) = ty {
add_keyword(
"return",
match (in_block_expr, fn_def.ret_type().is_some()) {
match (in_block_expr, ty.is_unit()) {
(true, true) => "return ;",
(true, false) => "return;",
(false, true) => "return $0",

View file

@ -24,8 +24,8 @@ pub(crate) fn complete_fn_param(
ctx: &CompletionContext,
pattern_ctx: &PatternContext,
) -> Option<()> {
let (param_list, _, param_kind) = match pattern_ctx {
PatternContext { param_ctx: Some(kind), .. } => kind,
let ((param_list, _, param_kind), impl_) = match pattern_ctx {
PatternContext { param_ctx: Some(kind), impl_, .. } => (kind, impl_),
_ => return None,
};
@ -45,7 +45,7 @@ pub(crate) fn complete_fn_param(
match param_kind {
ParamKind::Function(function) => {
fill_fn_params(ctx, function, param_list, add_new_item_to_acc);
fill_fn_params(ctx, function, param_list, impl_, add_new_item_to_acc);
}
ParamKind::Closure(closure) => {
let stmt_list = closure.syntax().ancestors().find_map(ast::StmtList::cast)?;
@ -62,6 +62,7 @@ fn fill_fn_params(
ctx: &CompletionContext,
function: &ast::Fn,
param_list: &ast::ParamList,
impl_: &Option<ast::Impl>,
mut add_new_item_to_acc: impl FnMut(&str),
) {
let mut file_params = FxHashMap::default();
@ -104,7 +105,7 @@ fn fill_fn_params(
}
remove_duplicated(&mut file_params, param_list.params());
let self_completion_items = ["self", "&self", "mut self", "&mut self"];
if should_add_self_completions(ctx, param_list) {
if should_add_self_completions(param_list, impl_) {
self_completion_items.into_iter().for_each(|self_item| add_new_item_to_acc(self_item));
}
@ -155,11 +156,10 @@ fn remove_duplicated(
})
}
fn should_add_self_completions(ctx: &CompletionContext, param_list: &ast::ParamList) -> bool {
let inside_impl = ctx.impl_def.is_some();
fn should_add_self_completions(param_list: &ast::ParamList, impl_: &Option<ast::Impl>) -> bool {
let no_params = param_list.params().next().is_none() && param_list.self_param().is_none();
inside_impl && no_params
impl_.is_some() && no_params
}
fn comma_wrapper(ctx: &CompletionContext) -> Option<(impl Fn(&str) -> String, TextRange)> {

View file

@ -66,7 +66,7 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext, kind: Option<&It
let in_assoc_non_trait_impl = matches!(kind, Some(ItemListKind::Impl | ItemListKind::Trait));
let in_extern_block = matches!(kind, Some(ItemListKind::ExternBlock));
let in_trait = matches!(kind, Some(ItemListKind::Trait));
let in_trait_impl = matches!(kind, Some(ItemListKind::TraitImpl));
let in_trait_impl = matches!(kind, Some(ItemListKind::TraitImpl(_)));
let in_inherent_impl = matches!(kind, Some(ItemListKind::Impl));
let no_qualifiers = ctx.qualifier_ctx.vis_node.is_none();
let in_block = matches!(kind, None);

View file

@ -81,7 +81,7 @@ pub(crate) fn complete_trait_impl_name(
kind,
replacement_range(ctx, &item),
// item -> ASSOC_ITEM_LIST -> IMPL
ast::Impl::cast(item.parent()?.parent()?)?,
&ast::Impl::cast(item.parent()?.parent()?)?,
);
Some(())
}
@ -97,7 +97,7 @@ pub(crate) fn complete_trait_impl_name_ref(
kind:
NameRefKind::Path(
path_ctx @ PathCompletionCtx {
kind: PathKind::Item { kind: ItemListKind::TraitImpl },
kind: PathKind::Item { kind: ItemListKind::TraitImpl(Some(impl_)) },
..
},
),
@ -109,7 +109,7 @@ pub(crate) fn complete_trait_impl_name_ref(
Some(name) => name.syntax().text_range(),
None => ctx.source_range(),
},
ctx.impl_def.clone()?,
impl_,
),
_ => (),
}
@ -121,10 +121,10 @@ fn complete_trait_impl(
ctx: &CompletionContext,
kind: ImplCompletionKind,
replacement_range: TextRange,
impl_def: ast::Impl,
impl_def: &ast::Impl,
) {
if let Some(hir_impl) = ctx.sema.to_def(&impl_def) {
get_missing_assoc_items(&ctx.sema, &impl_def).into_iter().for_each(|item| {
if let Some(hir_impl) = ctx.sema.to_def(impl_def) {
get_missing_assoc_items(&ctx.sema, impl_def).into_iter().for_each(|item| {
use self::ImplCompletionKind::*;
match (item, kind) {
(hir::AssocItem::Function(func), All | Fn) => {

View file

@ -51,9 +51,15 @@ pub(crate) fn complete_pattern(
ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt())
{
if refutable || single_variant_enum(e) {
super::enum_variants_with_paths(acc, ctx, e, |acc, ctx, variant, path| {
acc.add_qualified_variant_pat(ctx, variant, path);
});
super::enum_variants_with_paths(
acc,
ctx,
e,
&patctx.impl_,
|acc, ctx, variant, path| {
acc.add_qualified_variant_pat(ctx, variant, path);
},
);
}
}

View file

@ -93,8 +93,12 @@ pub(super) enum PathKind {
after_if_expr: bool,
/// Whether this expression is the direct condition of an if or while expression
in_condition: bool,
incomplete_let: bool,
ref_expr_parent: Option<ast::RefExpr>,
is_func_update: Option<ast::RecordExpr>,
self_param: Option<hir::SelfParam>,
innermost_ret_ty: Option<hir::Type>,
impl_: Option<ast::Impl>,
},
Type {
location: TypeLocation,
@ -140,12 +144,12 @@ pub(crate) enum TypeAscriptionTarget {
}
/// The kind of item list a [`PathKind::Item`] belongs to.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[derive(Debug, PartialEq, Eq)]
pub(super) enum ItemListKind {
SourceFile,
Module,
Impl,
TraitImpl,
TraitImpl(Option<ast::Impl>),
Trait,
ExternBlock,
}
@ -176,6 +180,7 @@ pub(super) struct PatternContext {
pub(super) mut_token: Option<SyntaxToken>,
/// The record pattern this name or ref is a field of
pub(super) record_pat: Option<ast::RecordPat>,
pub(super) impl_: Option<ast::Impl>,
}
/// The state of the lifetime we are completing.
@ -317,16 +322,6 @@ pub(crate) struct CompletionContext<'a> {
/// The expected type of what we are completing.
pub(super) expected_type: Option<Type>,
/// The parent function of the cursor position if it exists.
// FIXME: This probably doesn't belong here
pub(super) function_def: Option<ast::Fn>,
/// The parent impl of the cursor position if it exists.
// FIXME: This probably doesn't belong here
pub(super) impl_def: Option<ast::Impl>,
/// Are we completing inside a let statement with a missing semicolon?
// FIXME: This should be part of PathKind::Expr
pub(super) incomplete_let: bool,
// FIXME: This shouldn't exist
pub(super) previous_token: Option<SyntaxToken>,
@ -500,9 +495,6 @@ impl<'a> CompletionContext<'a> {
module,
expected_name: None,
expected_type: None,
function_def: None,
impl_def: None,
incomplete_let: false,
previous_token: None,
// dummy value, will be overwritten
ident_ctx: IdentContext::UnexpandedAttrTT { fake_attribute_under_caret: None },

View file

@ -330,11 +330,6 @@ impl<'a> CompletionContext<'a> {
self.previous_token =
syntax_element.clone().into_token().and_then(previous_non_trivia_token);
self.incomplete_let =
syntax_element.ancestors().take(6).find_map(ast::LetStmt::cast).map_or(false, |it| {
it.syntax().text_range().end() == syntax_element.text_range().end()
});
(self.expected_type, self.expected_name) = self.expected_type_and_name();
// Overwrite the path kind for derives
@ -394,28 +389,6 @@ impl<'a> CompletionContext<'a> {
return Some(());
}
};
self.impl_def = self
.sema
.token_ancestors_with_macros(self.token.clone())
.take_while(|it| it.kind() != SyntaxKind::SOURCE_FILE)
.filter_map(ast::Item::cast)
.take(2)
.find_map(|it| match it {
ast::Item::Impl(impl_) => Some(impl_),
_ => None,
});
self.function_def = self
.sema
.token_ancestors_with_macros(self.token.clone())
.take_while(|it| {
it.kind() != SyntaxKind::SOURCE_FILE && it.kind() != SyntaxKind::MODULE
})
.filter_map(ast::Item::cast)
.take(2)
.find_map(|it| match it {
ast::Item::Fn(fn_) => Some(fn_),
_ => None,
});
match name_like {
ast::NameLike::Lifetime(lifetime) => {
@ -469,7 +442,7 @@ impl<'a> CompletionContext<'a> {
}
fn classify_name(
_sema: &Semantics<RootDatabase>,
sema: &Semantics<RootDatabase>,
original_file: &SyntaxNode,
name: ast::Name,
) -> Option<NameContext> {
@ -481,9 +454,9 @@ impl<'a> CompletionContext<'a> {
ast::Enum(_) => NameKind::Enum,
ast::Fn(_) => NameKind::Function,
ast::IdentPat(bind_pat) => {
let mut pat_ctx = pattern_context_for(original_file, bind_pat.into());
let mut pat_ctx = pattern_context_for(sema, original_file, bind_pat.into());
if let Some(record_field) = ast::RecordPatField::for_field_name(&name) {
pat_ctx.record_pat = find_node_in_file_compensated(original_file, &record_field.parent_record_pat());
pat_ctx.record_pat = find_node_in_file_compensated(sema, original_file, &record_field.parent_record_pat());
}
NameKind::IdentPat(pat_ctx)
@ -520,9 +493,13 @@ impl<'a> CompletionContext<'a> {
|kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
return find_node_in_file_compensated(original_file, &record_field.parent_record_lit())
.map(NameRefKind::RecordExpr)
.map(make_res);
return find_node_in_file_compensated(
sema,
original_file,
&record_field.parent_record_lit(),
)
.map(NameRefKind::RecordExpr)
.map(make_res);
}
if let Some(record_field) = ast::RecordPatField::for_field_name_ref(&name_ref) {
let kind = NameRefKind::Pattern(PatternContext {
@ -531,10 +508,12 @@ impl<'a> CompletionContext<'a> {
ref_token: None,
mut_token: None,
record_pat: find_node_in_file_compensated(
sema,
original_file,
&record_field.parent_record_pat(),
),
..pattern_context_for(
sema,
original_file,
record_field.parent_record_pat().clone().into(),
)
@ -594,7 +573,7 @@ impl<'a> CompletionContext<'a> {
};
let func_update_record = |syn: &SyntaxNode| {
if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) {
find_node_in_file_compensated(original_file, &record_expr)
find_node_in_file_compensated(sema, original_file, &record_expr)
} else {
None
}
@ -696,9 +675,9 @@ impl<'a> CompletionContext<'a> {
ast::TypeBound(_) => TypeLocation::TypeBound,
// is this case needed?
ast::TypeBoundList(_) => TypeLocation::TypeBound,
ast::GenericArg(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(original_file, it.syntax().parent().and_then(ast::GenericArgList::cast))),
ast::GenericArg(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, it.syntax().parent().and_then(ast::GenericArgList::cast))),
// is this case needed?
ast::GenericArgList(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(original_file, Some(it))),
ast::GenericArgList(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, Some(it))),
ast::TupleField(_) => TypeLocation::TupleField,
_ => return None,
}
@ -727,8 +706,63 @@ impl<'a> CompletionContext<'a> {
let after_if_expr = after_if_expr(it.clone());
let ref_expr_parent =
path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
let (innermost_ret_ty, self_param) = {
let find_ret_ty = |it: SyntaxNode| {
if let Some(item) = ast::Item::cast(it.clone()) {
match item {
ast::Item::Fn(f) => {
Some(sema.to_def(&f).map(|it| it.ret_type(sema.db)))
}
ast::Item::MacroCall(_) => None,
_ => Some(None),
}
} else {
let expr = ast::Expr::cast(it)?;
let callable = match expr {
// FIXME
// ast::Expr::BlockExpr(b) if b.async_token().is_some() || b.try_token().is_some() => sema.type_of_expr(b),
ast::Expr::ClosureExpr(_) => sema.type_of_expr(&expr),
_ => return None,
};
Some(
callable
.and_then(|c| c.adjusted().as_callable(sema.db))
.map(|it| it.return_type()),
)
}
};
let find_fn_self_param = |it| match it {
ast::Item::Fn(fn_) => {
Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db)))
}
ast::Item::MacroCall(_) => None,
_ => Some(None),
};
match find_node_in_file_compensated(sema, original_file, &expr) {
Some(it) => {
let innermost_ret_ty = sema
.ancestors_with_macros(it.syntax().clone())
.find_map(find_ret_ty)
.flatten();
let self_param = sema
.ancestors_with_macros(it.syntax().clone())
.filter_map(ast::Item::cast)
.find_map(find_fn_self_param)
.flatten();
(innermost_ret_ty, self_param)
}
None => (None, None),
}
};
let is_func_update = func_update_record(it);
let in_condition = is_in_condition(&expr);
let incomplete_let = it
.parent()
.and_then(ast::LetStmt::cast)
.map_or(false, |it| it.semicolon_token().is_none());
let impl_ = fetch_immediate_impl(sema, original_file, expr.syntax());
PathKind::Expr {
in_block_expr,
@ -737,6 +771,10 @@ impl<'a> CompletionContext<'a> {
in_condition,
ref_expr_parent,
is_func_update,
innermost_ret_ty,
self_param,
incomplete_let,
impl_,
}
};
let make_path_kind_type = |ty: ast::Type| {
@ -764,14 +802,14 @@ impl<'a> CompletionContext<'a> {
},
ast::TupleStructPat(it) => {
path_ctx.has_call_parens = true;
PathKind::Pat { pat_ctx: pattern_context_for(original_file, it.into())}
PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
},
ast::RecordPat(it) => {
path_ctx.has_call_parens = true;
PathKind::Pat { pat_ctx: pattern_context_for(original_file, it.into())}
PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
},
ast::PathPat(it) => {
PathKind::Pat { pat_ctx: pattern_context_for(original_file, it.into())}
PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
},
ast::MacroCall(it) => {
// A macro call in this position is usually a result of parsing recovery, so check that
@ -785,7 +823,7 @@ impl<'a> CompletionContext<'a> {
match_ast! {
match parent {
ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(original_file, it.into())},
ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
ast::MacroType(ty) => make_path_kind_type(ty.into()),
ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
@ -793,7 +831,7 @@ impl<'a> CompletionContext<'a> {
match it {
ast::Trait(_) => ItemListKind::Trait,
ast::Impl(it) => if it.trait_().is_some() {
ItemListKind::TraitImpl
ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it))
} else {
ItemListKind::Impl
},
@ -930,7 +968,11 @@ impl<'a> CompletionContext<'a> {
}
}
fn pattern_context_for(original_file: &SyntaxNode, pat: ast::Pat) -> PatternContext {
fn pattern_context_for(
sema: &Semantics<RootDatabase>,
original_file: &SyntaxNode,
pat: ast::Pat,
) -> PatternContext {
let mut is_param = None;
let (refutability, has_type_ascription) =
pat
@ -946,7 +988,7 @@ fn pattern_context_for(original_file: &SyntaxNode, pat: ast::Pat) -> PatternCont
let has_type_ascription = param.ty().is_some();
is_param = (|| {
let fake_param_list = param.syntax().parent().and_then(ast::ParamList::cast)?;
let param_list = find_node_in_file_compensated(original_file, &fake_param_list)?;
let param_list = find_node_in_file_compensated(sema, original_file, &fake_param_list)?;
let param_list_owner = param_list.syntax().parent()?;
let kind = match_ast! {
match param_list_owner {
@ -971,6 +1013,7 @@ fn pattern_context_for(original_file: &SyntaxNode, pat: ast::Pat) -> PatternCont
ast::Pat::IdentPat(it) => (it.ref_token(), it.mut_token()),
_ => (None, None),
};
PatternContext {
refutability,
param_ctx: is_param,
@ -979,6 +1022,27 @@ fn pattern_context_for(original_file: &SyntaxNode, pat: ast::Pat) -> PatternCont
mut_token,
ref_token,
record_pat: None,
impl_: fetch_immediate_impl(sema, original_file, pat.syntax()),
}
}
fn fetch_immediate_impl(
sema: &Semantics<RootDatabase>,
original_file: &SyntaxNode,
node: &SyntaxNode,
) -> Option<ast::Impl> {
let mut ancestors = ancestors_in_file_compensated(sema, original_file, node)?
.filter_map(ast::Item::cast)
.filter(|it| !matches!(it, ast::Item::MacroCall(_)));
match ancestors.next()? {
ast::Item::Const(_) | ast::Item::Fn(_) | ast::Item::TypeAlias(_) => (),
ast::Item::Impl(it) => return Some(it),
_ => return None,
}
match ancestors.next()? {
ast::Item::Impl(it) => Some(it),
_ => None,
}
}
@ -1000,9 +1064,21 @@ fn find_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
/// for the offset introduced by the fake ident.
/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
fn find_node_in_file_compensated<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
let syntax_range = syntax.text_range();
let range = node.syntax().text_range();
fn find_node_in_file_compensated<N: AstNode>(
sema: &Semantics<RootDatabase>,
in_file: &SyntaxNode,
node: &N,
) -> Option<N> {
ancestors_in_file_compensated(sema, in_file, node.syntax())?.find_map(N::cast)
}
fn ancestors_in_file_compensated<'sema>(
sema: &'sema Semantics<RootDatabase>,
in_file: &SyntaxNode,
node: &SyntaxNode,
) -> Option<impl Iterator<Item = SyntaxNode> + 'sema> {
let syntax_range = in_file.text_range();
let range = node.text_range();
let end = range.end().checked_sub(TextSize::try_from(COMPLETION_MARKER.len()).ok()?)?;
if end < range.start() {
return None;
@ -1010,17 +1086,22 @@ fn find_node_in_file_compensated<N: AstNode>(syntax: &SyntaxNode, node: &N) -> O
let range = TextRange::new(range.start(), end);
// our inserted ident could cause `range` to go outside of the original syntax, so cap it
let intersection = range.intersect(syntax_range)?;
syntax.covering_element(intersection).ancestors().find_map(N::cast)
let node = match in_file.covering_element(intersection) {
NodeOrToken::Node(node) => node,
NodeOrToken::Token(tok) => tok.parent()?,
};
Some(sema.ancestors_with_macros(node))
}
/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
/// for the offset introduced by the fake ident..
/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
fn find_opt_node_in_file_compensated<N: AstNode>(
sema: &Semantics<RootDatabase>,
syntax: &SyntaxNode,
node: Option<N>,
) -> Option<N> {
find_node_in_file_compensated(syntax, &node?)
find_node_in_file_compensated(sema, syntax, &node?)
}
fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {