mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-28 14:03:35 +00:00
Auto merge of #12580 - Veykril:completion, r=Veykril
internal: Move more things out of `CompletionContext::function_def` into more specific parts
This commit is contained in:
commit
817f46bc6c
9 changed files with 226 additions and 112 deletions
|
@ -24,6 +24,7 @@ use std::iter;
|
||||||
|
|
||||||
use hir::{db::HirDatabase, known, ScopeDef};
|
use hir::{db::HirDatabase, known, ScopeDef};
|
||||||
use ide_db::SymbolKind;
|
use ide_db::SymbolKind;
|
||||||
|
use syntax::ast;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
context::Visible,
|
context::Visible,
|
||||||
|
@ -110,12 +111,18 @@ impl Completions {
|
||||||
["self", "super", "crate"].into_iter().for_each(|kw| self.add_keyword(ctx, kw));
|
["self", "super", "crate"].into_iter().for_each(|kw| self.add_keyword(ctx, kw));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn add_keyword_snippet(&mut self, ctx: &CompletionContext, kw: &str, snippet: &str) {
|
pub(crate) fn add_keyword_snippet_expr(
|
||||||
|
&mut self,
|
||||||
|
ctx: &CompletionContext,
|
||||||
|
kw: &str,
|
||||||
|
snippet: &str,
|
||||||
|
incomplete_let: bool,
|
||||||
|
) {
|
||||||
let mut item = CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), kw);
|
let mut item = CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), kw);
|
||||||
|
|
||||||
match ctx.config.snippet_cap {
|
match ctx.config.snippet_cap {
|
||||||
Some(cap) => {
|
Some(cap) => {
|
||||||
if snippet.ends_with('}') && ctx.incomplete_let {
|
if snippet.ends_with('}') && incomplete_let {
|
||||||
// complete block expression snippets with a trailing semicolon, if inside an incomplete let
|
// complete block expression snippets with a trailing semicolon, if inside an incomplete let
|
||||||
cov_mark::hit!(let_semi);
|
cov_mark::hit!(let_semi);
|
||||||
item.insert_snippet(cap, format!("{};", snippet));
|
item.insert_snippet(cap, format!("{};", snippet));
|
||||||
|
@ -130,6 +137,16 @@ impl Completions {
|
||||||
item.add_to(self);
|
item.add_to(self);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn add_keyword_snippet(&mut self, ctx: &CompletionContext, kw: &str, snippet: &str) {
|
||||||
|
let mut item = CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), kw);
|
||||||
|
|
||||||
|
match ctx.config.snippet_cap {
|
||||||
|
Some(cap) => item.insert_snippet(cap, snippet),
|
||||||
|
None => item.insert_text(if snippet.contains('$') { kw } else { snippet }),
|
||||||
|
};
|
||||||
|
item.add_to(self);
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn add_crate_roots(&mut self, ctx: &CompletionContext) {
|
pub(crate) fn add_crate_roots(&mut self, ctx: &CompletionContext) {
|
||||||
ctx.process_all_names(&mut |name, res| match res {
|
ctx.process_all_names(&mut |name, res| match res {
|
||||||
ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) if m.is_crate_root(ctx.db) => {
|
ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) if m.is_crate_root(ctx.db) => {
|
||||||
|
@ -393,11 +410,12 @@ fn enum_variants_with_paths(
|
||||||
acc: &mut Completions,
|
acc: &mut Completions,
|
||||||
ctx: &CompletionContext,
|
ctx: &CompletionContext,
|
||||||
enum_: hir::Enum,
|
enum_: hir::Enum,
|
||||||
|
impl_: &Option<ast::Impl>,
|
||||||
cb: impl Fn(&mut Completions, &CompletionContext, hir::Variant, hir::ModPath),
|
cb: impl Fn(&mut Completions, &CompletionContext, hir::Variant, hir::ModPath),
|
||||||
) {
|
) {
|
||||||
let variants = enum_.variants(ctx.db);
|
let variants = enum_.variants(ctx.db);
|
||||||
|
|
||||||
if let Some(impl_) = ctx.impl_def.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) {
|
if let Some(impl_) = impl_.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) {
|
||||||
if impl_.self_ty(ctx.db).as_adt() == Some(hir::Adt::Enum(enum_)) {
|
if impl_.self_ty(ctx.db).as_adt() == Some(hir::Adt::Enum(enum_)) {
|
||||||
for &variant in &variants {
|
for &variant in &variants {
|
||||||
let self_path = hir::ModPath::from_segments(
|
let self_path = hir::ModPath::from_segments(
|
||||||
|
|
|
@ -46,27 +46,26 @@ pub(crate) fn complete_undotted_self(
|
||||||
if !ctx.config.enable_self_on_the_fly {
|
if !ctx.config.enable_self_on_the_fly {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
match path_ctx {
|
let self_param = match path_ctx {
|
||||||
PathCompletionCtx { qualified: Qualified::No, kind: PathKind::Expr { .. }, .. }
|
PathCompletionCtx {
|
||||||
if path_ctx.is_trivial_path() && ctx.qualifier_ctx.none() => {}
|
qualified: Qualified::No,
|
||||||
|
kind: PathKind::Expr { self_param: Some(self_param), .. },
|
||||||
|
..
|
||||||
|
} if path_ctx.is_trivial_path() && ctx.qualifier_ctx.none() => self_param,
|
||||||
_ => return,
|
_ => return,
|
||||||
}
|
};
|
||||||
|
|
||||||
if let Some(func) = ctx.function_def.as_ref().and_then(|fn_| ctx.sema.to_def(fn_)) {
|
let ty = self_param.ty(ctx.db);
|
||||||
if let Some(self_) = func.self_param(ctx.db) {
|
complete_fields(
|
||||||
let ty = self_.ty(ctx.db);
|
acc,
|
||||||
complete_fields(
|
ctx,
|
||||||
acc,
|
&ty,
|
||||||
ctx,
|
|acc, field, ty| acc.add_field(ctx, Some(hir::known::SELF_PARAM), field, &ty),
|
||||||
&ty,
|
|acc, field, ty| acc.add_tuple_field(ctx, Some(hir::known::SELF_PARAM), field, &ty),
|
||||||
|acc, field, ty| acc.add_field(ctx, Some(hir::known::SELF_PARAM), field, &ty),
|
);
|
||||||
|acc, field, ty| acc.add_tuple_field(ctx, Some(hir::known::SELF_PARAM), field, &ty),
|
complete_methods(ctx, &ty, |func| {
|
||||||
);
|
acc.add_method(ctx, func, Some(hir::known::SELF_PARAM), None)
|
||||||
complete_methods(ctx, &ty, |func| {
|
});
|
||||||
acc.add_method(ctx, func, Some(hir::known::SELF_PARAM), None)
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn complete_fields(
|
fn complete_fields(
|
||||||
|
|
|
@ -14,7 +14,9 @@ pub(crate) fn complete_expr_path(
|
||||||
path_ctx: &PathCompletionCtx,
|
path_ctx: &PathCompletionCtx,
|
||||||
) {
|
) {
|
||||||
let _p = profile::span("complete_expr_path");
|
let _p = profile::span("complete_expr_path");
|
||||||
|
if !ctx.qualifier_ctx.none() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
let (
|
let (
|
||||||
qualified,
|
qualified,
|
||||||
in_block_expr,
|
in_block_expr,
|
||||||
|
@ -23,6 +25,9 @@ pub(crate) fn complete_expr_path(
|
||||||
after_if_expr,
|
after_if_expr,
|
||||||
wants_mut_token,
|
wants_mut_token,
|
||||||
in_condition,
|
in_condition,
|
||||||
|
ty,
|
||||||
|
incomplete_let,
|
||||||
|
impl_,
|
||||||
) = match path_ctx {
|
) = match path_ctx {
|
||||||
&PathCompletionCtx {
|
&PathCompletionCtx {
|
||||||
kind:
|
kind:
|
||||||
|
@ -31,12 +36,16 @@ pub(crate) fn complete_expr_path(
|
||||||
in_loop_body,
|
in_loop_body,
|
||||||
after_if_expr,
|
after_if_expr,
|
||||||
in_condition,
|
in_condition,
|
||||||
|
incomplete_let,
|
||||||
ref ref_expr_parent,
|
ref ref_expr_parent,
|
||||||
ref is_func_update,
|
ref is_func_update,
|
||||||
|
ref innermost_ret_ty,
|
||||||
|
ref impl_,
|
||||||
|
..
|
||||||
},
|
},
|
||||||
ref qualified,
|
ref qualified,
|
||||||
..
|
..
|
||||||
} if ctx.qualifier_ctx.none() => (
|
} => (
|
||||||
qualified,
|
qualified,
|
||||||
in_block_expr,
|
in_block_expr,
|
||||||
in_loop_body,
|
in_loop_body,
|
||||||
|
@ -44,6 +53,9 @@ pub(crate) fn complete_expr_path(
|
||||||
after_if_expr,
|
after_if_expr,
|
||||||
ref_expr_parent.as_ref().map(|it| it.mut_token().is_none()).unwrap_or(false),
|
ref_expr_parent.as_ref().map(|it| it.mut_token().is_none()).unwrap_or(false),
|
||||||
in_condition,
|
in_condition,
|
||||||
|
innermost_ret_ty,
|
||||||
|
incomplete_let,
|
||||||
|
impl_,
|
||||||
),
|
),
|
||||||
_ => return,
|
_ => return,
|
||||||
};
|
};
|
||||||
|
@ -172,8 +184,7 @@ pub(crate) fn complete_expr_path(
|
||||||
if let Some(adt) =
|
if let Some(adt) =
|
||||||
ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt())
|
ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt())
|
||||||
{
|
{
|
||||||
let self_ty =
|
let self_ty = (|| ctx.sema.to_def(impl_.as_ref()?)?.self_ty(ctx.db).as_adt())();
|
||||||
(|| ctx.sema.to_def(ctx.impl_def.as_ref()?)?.self_ty(ctx.db).as_adt())();
|
|
||||||
let complete_self = self_ty == Some(adt);
|
let complete_self = self_ty == Some(adt);
|
||||||
|
|
||||||
match adt {
|
match adt {
|
||||||
|
@ -201,9 +212,15 @@ pub(crate) fn complete_expr_path(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
hir::Adt::Enum(e) => {
|
hir::Adt::Enum(e) => {
|
||||||
super::enum_variants_with_paths(acc, ctx, e, |acc, ctx, variant, path| {
|
super::enum_variants_with_paths(
|
||||||
acc.add_qualified_enum_variant(ctx, variant, path)
|
acc,
|
||||||
});
|
ctx,
|
||||||
|
e,
|
||||||
|
impl_,
|
||||||
|
|acc, ctx, variant, path| {
|
||||||
|
acc.add_qualified_enum_variant(ctx, variant, path)
|
||||||
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -214,7 +231,8 @@ pub(crate) fn complete_expr_path(
|
||||||
});
|
});
|
||||||
|
|
||||||
if !is_func_update {
|
if !is_func_update {
|
||||||
let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet);
|
let mut add_keyword =
|
||||||
|
|kw, snippet| acc.add_keyword_snippet_expr(ctx, kw, snippet, incomplete_let);
|
||||||
|
|
||||||
if !in_block_expr {
|
if !in_block_expr {
|
||||||
add_keyword("unsafe", "unsafe {\n $0\n}");
|
add_keyword("unsafe", "unsafe {\n $0\n}");
|
||||||
|
@ -252,10 +270,10 @@ pub(crate) fn complete_expr_path(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(fn_def) = &ctx.function_def {
|
if let Some(ty) = ty {
|
||||||
add_keyword(
|
add_keyword(
|
||||||
"return",
|
"return",
|
||||||
match (in_block_expr, fn_def.ret_type().is_some()) {
|
match (in_block_expr, ty.is_unit()) {
|
||||||
(true, true) => "return ;",
|
(true, true) => "return ;",
|
||||||
(true, false) => "return;",
|
(true, false) => "return;",
|
||||||
(false, true) => "return $0",
|
(false, true) => "return $0",
|
||||||
|
|
|
@ -24,8 +24,8 @@ pub(crate) fn complete_fn_param(
|
||||||
ctx: &CompletionContext,
|
ctx: &CompletionContext,
|
||||||
pattern_ctx: &PatternContext,
|
pattern_ctx: &PatternContext,
|
||||||
) -> Option<()> {
|
) -> Option<()> {
|
||||||
let (param_list, _, param_kind) = match pattern_ctx {
|
let ((param_list, _, param_kind), impl_) = match pattern_ctx {
|
||||||
PatternContext { param_ctx: Some(kind), .. } => kind,
|
PatternContext { param_ctx: Some(kind), impl_, .. } => (kind, impl_),
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -45,7 +45,7 @@ pub(crate) fn complete_fn_param(
|
||||||
|
|
||||||
match param_kind {
|
match param_kind {
|
||||||
ParamKind::Function(function) => {
|
ParamKind::Function(function) => {
|
||||||
fill_fn_params(ctx, function, param_list, add_new_item_to_acc);
|
fill_fn_params(ctx, function, param_list, impl_, add_new_item_to_acc);
|
||||||
}
|
}
|
||||||
ParamKind::Closure(closure) => {
|
ParamKind::Closure(closure) => {
|
||||||
let stmt_list = closure.syntax().ancestors().find_map(ast::StmtList::cast)?;
|
let stmt_list = closure.syntax().ancestors().find_map(ast::StmtList::cast)?;
|
||||||
|
@ -62,6 +62,7 @@ fn fill_fn_params(
|
||||||
ctx: &CompletionContext,
|
ctx: &CompletionContext,
|
||||||
function: &ast::Fn,
|
function: &ast::Fn,
|
||||||
param_list: &ast::ParamList,
|
param_list: &ast::ParamList,
|
||||||
|
impl_: &Option<ast::Impl>,
|
||||||
mut add_new_item_to_acc: impl FnMut(&str),
|
mut add_new_item_to_acc: impl FnMut(&str),
|
||||||
) {
|
) {
|
||||||
let mut file_params = FxHashMap::default();
|
let mut file_params = FxHashMap::default();
|
||||||
|
@ -104,7 +105,7 @@ fn fill_fn_params(
|
||||||
}
|
}
|
||||||
remove_duplicated(&mut file_params, param_list.params());
|
remove_duplicated(&mut file_params, param_list.params());
|
||||||
let self_completion_items = ["self", "&self", "mut self", "&mut self"];
|
let self_completion_items = ["self", "&self", "mut self", "&mut self"];
|
||||||
if should_add_self_completions(ctx, param_list) {
|
if should_add_self_completions(param_list, impl_) {
|
||||||
self_completion_items.into_iter().for_each(|self_item| add_new_item_to_acc(self_item));
|
self_completion_items.into_iter().for_each(|self_item| add_new_item_to_acc(self_item));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -155,11 +156,10 @@ fn remove_duplicated(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn should_add_self_completions(ctx: &CompletionContext, param_list: &ast::ParamList) -> bool {
|
fn should_add_self_completions(param_list: &ast::ParamList, impl_: &Option<ast::Impl>) -> bool {
|
||||||
let inside_impl = ctx.impl_def.is_some();
|
|
||||||
let no_params = param_list.params().next().is_none() && param_list.self_param().is_none();
|
let no_params = param_list.params().next().is_none() && param_list.self_param().is_none();
|
||||||
|
|
||||||
inside_impl && no_params
|
impl_.is_some() && no_params
|
||||||
}
|
}
|
||||||
|
|
||||||
fn comma_wrapper(ctx: &CompletionContext) -> Option<(impl Fn(&str) -> String, TextRange)> {
|
fn comma_wrapper(ctx: &CompletionContext) -> Option<(impl Fn(&str) -> String, TextRange)> {
|
||||||
|
|
|
@ -66,7 +66,7 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext, kind: Option<&It
|
||||||
let in_assoc_non_trait_impl = matches!(kind, Some(ItemListKind::Impl | ItemListKind::Trait));
|
let in_assoc_non_trait_impl = matches!(kind, Some(ItemListKind::Impl | ItemListKind::Trait));
|
||||||
let in_extern_block = matches!(kind, Some(ItemListKind::ExternBlock));
|
let in_extern_block = matches!(kind, Some(ItemListKind::ExternBlock));
|
||||||
let in_trait = matches!(kind, Some(ItemListKind::Trait));
|
let in_trait = matches!(kind, Some(ItemListKind::Trait));
|
||||||
let in_trait_impl = matches!(kind, Some(ItemListKind::TraitImpl));
|
let in_trait_impl = matches!(kind, Some(ItemListKind::TraitImpl(_)));
|
||||||
let in_inherent_impl = matches!(kind, Some(ItemListKind::Impl));
|
let in_inherent_impl = matches!(kind, Some(ItemListKind::Impl));
|
||||||
let no_qualifiers = ctx.qualifier_ctx.vis_node.is_none();
|
let no_qualifiers = ctx.qualifier_ctx.vis_node.is_none();
|
||||||
let in_block = matches!(kind, None);
|
let in_block = matches!(kind, None);
|
||||||
|
|
|
@ -81,7 +81,7 @@ pub(crate) fn complete_trait_impl_name(
|
||||||
kind,
|
kind,
|
||||||
replacement_range(ctx, &item),
|
replacement_range(ctx, &item),
|
||||||
// item -> ASSOC_ITEM_LIST -> IMPL
|
// item -> ASSOC_ITEM_LIST -> IMPL
|
||||||
ast::Impl::cast(item.parent()?.parent()?)?,
|
&ast::Impl::cast(item.parent()?.parent()?)?,
|
||||||
);
|
);
|
||||||
Some(())
|
Some(())
|
||||||
}
|
}
|
||||||
|
@ -97,7 +97,7 @@ pub(crate) fn complete_trait_impl_name_ref(
|
||||||
kind:
|
kind:
|
||||||
NameRefKind::Path(
|
NameRefKind::Path(
|
||||||
path_ctx @ PathCompletionCtx {
|
path_ctx @ PathCompletionCtx {
|
||||||
kind: PathKind::Item { kind: ItemListKind::TraitImpl },
|
kind: PathKind::Item { kind: ItemListKind::TraitImpl(Some(impl_)) },
|
||||||
..
|
..
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
@ -109,7 +109,7 @@ pub(crate) fn complete_trait_impl_name_ref(
|
||||||
Some(name) => name.syntax().text_range(),
|
Some(name) => name.syntax().text_range(),
|
||||||
None => ctx.source_range(),
|
None => ctx.source_range(),
|
||||||
},
|
},
|
||||||
ctx.impl_def.clone()?,
|
impl_,
|
||||||
),
|
),
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
|
@ -121,10 +121,10 @@ fn complete_trait_impl(
|
||||||
ctx: &CompletionContext,
|
ctx: &CompletionContext,
|
||||||
kind: ImplCompletionKind,
|
kind: ImplCompletionKind,
|
||||||
replacement_range: TextRange,
|
replacement_range: TextRange,
|
||||||
impl_def: ast::Impl,
|
impl_def: &ast::Impl,
|
||||||
) {
|
) {
|
||||||
if let Some(hir_impl) = ctx.sema.to_def(&impl_def) {
|
if let Some(hir_impl) = ctx.sema.to_def(impl_def) {
|
||||||
get_missing_assoc_items(&ctx.sema, &impl_def).into_iter().for_each(|item| {
|
get_missing_assoc_items(&ctx.sema, impl_def).into_iter().for_each(|item| {
|
||||||
use self::ImplCompletionKind::*;
|
use self::ImplCompletionKind::*;
|
||||||
match (item, kind) {
|
match (item, kind) {
|
||||||
(hir::AssocItem::Function(func), All | Fn) => {
|
(hir::AssocItem::Function(func), All | Fn) => {
|
||||||
|
|
|
@ -51,9 +51,15 @@ pub(crate) fn complete_pattern(
|
||||||
ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt())
|
ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt())
|
||||||
{
|
{
|
||||||
if refutable || single_variant_enum(e) {
|
if refutable || single_variant_enum(e) {
|
||||||
super::enum_variants_with_paths(acc, ctx, e, |acc, ctx, variant, path| {
|
super::enum_variants_with_paths(
|
||||||
acc.add_qualified_variant_pat(ctx, variant, path);
|
acc,
|
||||||
});
|
ctx,
|
||||||
|
e,
|
||||||
|
&patctx.impl_,
|
||||||
|
|acc, ctx, variant, path| {
|
||||||
|
acc.add_qualified_variant_pat(ctx, variant, path);
|
||||||
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -93,8 +93,12 @@ pub(super) enum PathKind {
|
||||||
after_if_expr: bool,
|
after_if_expr: bool,
|
||||||
/// Whether this expression is the direct condition of an if or while expression
|
/// Whether this expression is the direct condition of an if or while expression
|
||||||
in_condition: bool,
|
in_condition: bool,
|
||||||
|
incomplete_let: bool,
|
||||||
ref_expr_parent: Option<ast::RefExpr>,
|
ref_expr_parent: Option<ast::RefExpr>,
|
||||||
is_func_update: Option<ast::RecordExpr>,
|
is_func_update: Option<ast::RecordExpr>,
|
||||||
|
self_param: Option<hir::SelfParam>,
|
||||||
|
innermost_ret_ty: Option<hir::Type>,
|
||||||
|
impl_: Option<ast::Impl>,
|
||||||
},
|
},
|
||||||
Type {
|
Type {
|
||||||
location: TypeLocation,
|
location: TypeLocation,
|
||||||
|
@ -140,12 +144,12 @@ pub(crate) enum TypeAscriptionTarget {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The kind of item list a [`PathKind::Item`] belongs to.
|
/// The kind of item list a [`PathKind::Item`] belongs to.
|
||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub(super) enum ItemListKind {
|
pub(super) enum ItemListKind {
|
||||||
SourceFile,
|
SourceFile,
|
||||||
Module,
|
Module,
|
||||||
Impl,
|
Impl,
|
||||||
TraitImpl,
|
TraitImpl(Option<ast::Impl>),
|
||||||
Trait,
|
Trait,
|
||||||
ExternBlock,
|
ExternBlock,
|
||||||
}
|
}
|
||||||
|
@ -176,6 +180,7 @@ pub(super) struct PatternContext {
|
||||||
pub(super) mut_token: Option<SyntaxToken>,
|
pub(super) mut_token: Option<SyntaxToken>,
|
||||||
/// The record pattern this name or ref is a field of
|
/// The record pattern this name or ref is a field of
|
||||||
pub(super) record_pat: Option<ast::RecordPat>,
|
pub(super) record_pat: Option<ast::RecordPat>,
|
||||||
|
pub(super) impl_: Option<ast::Impl>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The state of the lifetime we are completing.
|
/// The state of the lifetime we are completing.
|
||||||
|
@ -317,16 +322,6 @@ pub(crate) struct CompletionContext<'a> {
|
||||||
/// The expected type of what we are completing.
|
/// The expected type of what we are completing.
|
||||||
pub(super) expected_type: Option<Type>,
|
pub(super) expected_type: Option<Type>,
|
||||||
|
|
||||||
/// The parent function of the cursor position if it exists.
|
|
||||||
// FIXME: This probably doesn't belong here
|
|
||||||
pub(super) function_def: Option<ast::Fn>,
|
|
||||||
/// The parent impl of the cursor position if it exists.
|
|
||||||
// FIXME: This probably doesn't belong here
|
|
||||||
pub(super) impl_def: Option<ast::Impl>,
|
|
||||||
/// Are we completing inside a let statement with a missing semicolon?
|
|
||||||
// FIXME: This should be part of PathKind::Expr
|
|
||||||
pub(super) incomplete_let: bool,
|
|
||||||
|
|
||||||
// FIXME: This shouldn't exist
|
// FIXME: This shouldn't exist
|
||||||
pub(super) previous_token: Option<SyntaxToken>,
|
pub(super) previous_token: Option<SyntaxToken>,
|
||||||
|
|
||||||
|
@ -500,9 +495,6 @@ impl<'a> CompletionContext<'a> {
|
||||||
module,
|
module,
|
||||||
expected_name: None,
|
expected_name: None,
|
||||||
expected_type: None,
|
expected_type: None,
|
||||||
function_def: None,
|
|
||||||
impl_def: None,
|
|
||||||
incomplete_let: false,
|
|
||||||
previous_token: None,
|
previous_token: None,
|
||||||
// dummy value, will be overwritten
|
// dummy value, will be overwritten
|
||||||
ident_ctx: IdentContext::UnexpandedAttrTT { fake_attribute_under_caret: None },
|
ident_ctx: IdentContext::UnexpandedAttrTT { fake_attribute_under_caret: None },
|
||||||
|
|
|
@ -330,11 +330,6 @@ impl<'a> CompletionContext<'a> {
|
||||||
self.previous_token =
|
self.previous_token =
|
||||||
syntax_element.clone().into_token().and_then(previous_non_trivia_token);
|
syntax_element.clone().into_token().and_then(previous_non_trivia_token);
|
||||||
|
|
||||||
self.incomplete_let =
|
|
||||||
syntax_element.ancestors().take(6).find_map(ast::LetStmt::cast).map_or(false, |it| {
|
|
||||||
it.syntax().text_range().end() == syntax_element.text_range().end()
|
|
||||||
});
|
|
||||||
|
|
||||||
(self.expected_type, self.expected_name) = self.expected_type_and_name();
|
(self.expected_type, self.expected_name) = self.expected_type_and_name();
|
||||||
|
|
||||||
// Overwrite the path kind for derives
|
// Overwrite the path kind for derives
|
||||||
|
@ -394,28 +389,6 @@ impl<'a> CompletionContext<'a> {
|
||||||
return Some(());
|
return Some(());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
self.impl_def = self
|
|
||||||
.sema
|
|
||||||
.token_ancestors_with_macros(self.token.clone())
|
|
||||||
.take_while(|it| it.kind() != SyntaxKind::SOURCE_FILE)
|
|
||||||
.filter_map(ast::Item::cast)
|
|
||||||
.take(2)
|
|
||||||
.find_map(|it| match it {
|
|
||||||
ast::Item::Impl(impl_) => Some(impl_),
|
|
||||||
_ => None,
|
|
||||||
});
|
|
||||||
self.function_def = self
|
|
||||||
.sema
|
|
||||||
.token_ancestors_with_macros(self.token.clone())
|
|
||||||
.take_while(|it| {
|
|
||||||
it.kind() != SyntaxKind::SOURCE_FILE && it.kind() != SyntaxKind::MODULE
|
|
||||||
})
|
|
||||||
.filter_map(ast::Item::cast)
|
|
||||||
.take(2)
|
|
||||||
.find_map(|it| match it {
|
|
||||||
ast::Item::Fn(fn_) => Some(fn_),
|
|
||||||
_ => None,
|
|
||||||
});
|
|
||||||
|
|
||||||
match name_like {
|
match name_like {
|
||||||
ast::NameLike::Lifetime(lifetime) => {
|
ast::NameLike::Lifetime(lifetime) => {
|
||||||
|
@ -469,7 +442,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn classify_name(
|
fn classify_name(
|
||||||
_sema: &Semantics<RootDatabase>,
|
sema: &Semantics<RootDatabase>,
|
||||||
original_file: &SyntaxNode,
|
original_file: &SyntaxNode,
|
||||||
name: ast::Name,
|
name: ast::Name,
|
||||||
) -> Option<NameContext> {
|
) -> Option<NameContext> {
|
||||||
|
@ -481,9 +454,9 @@ impl<'a> CompletionContext<'a> {
|
||||||
ast::Enum(_) => NameKind::Enum,
|
ast::Enum(_) => NameKind::Enum,
|
||||||
ast::Fn(_) => NameKind::Function,
|
ast::Fn(_) => NameKind::Function,
|
||||||
ast::IdentPat(bind_pat) => {
|
ast::IdentPat(bind_pat) => {
|
||||||
let mut pat_ctx = pattern_context_for(original_file, bind_pat.into());
|
let mut pat_ctx = pattern_context_for(sema, original_file, bind_pat.into());
|
||||||
if let Some(record_field) = ast::RecordPatField::for_field_name(&name) {
|
if let Some(record_field) = ast::RecordPatField::for_field_name(&name) {
|
||||||
pat_ctx.record_pat = find_node_in_file_compensated(original_file, &record_field.parent_record_pat());
|
pat_ctx.record_pat = find_node_in_file_compensated(sema, original_file, &record_field.parent_record_pat());
|
||||||
}
|
}
|
||||||
|
|
||||||
NameKind::IdentPat(pat_ctx)
|
NameKind::IdentPat(pat_ctx)
|
||||||
|
@ -520,9 +493,13 @@ impl<'a> CompletionContext<'a> {
|
||||||
|kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
|
|kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
|
||||||
|
|
||||||
if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
|
if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
|
||||||
return find_node_in_file_compensated(original_file, &record_field.parent_record_lit())
|
return find_node_in_file_compensated(
|
||||||
.map(NameRefKind::RecordExpr)
|
sema,
|
||||||
.map(make_res);
|
original_file,
|
||||||
|
&record_field.parent_record_lit(),
|
||||||
|
)
|
||||||
|
.map(NameRefKind::RecordExpr)
|
||||||
|
.map(make_res);
|
||||||
}
|
}
|
||||||
if let Some(record_field) = ast::RecordPatField::for_field_name_ref(&name_ref) {
|
if let Some(record_field) = ast::RecordPatField::for_field_name_ref(&name_ref) {
|
||||||
let kind = NameRefKind::Pattern(PatternContext {
|
let kind = NameRefKind::Pattern(PatternContext {
|
||||||
|
@ -531,10 +508,12 @@ impl<'a> CompletionContext<'a> {
|
||||||
ref_token: None,
|
ref_token: None,
|
||||||
mut_token: None,
|
mut_token: None,
|
||||||
record_pat: find_node_in_file_compensated(
|
record_pat: find_node_in_file_compensated(
|
||||||
|
sema,
|
||||||
original_file,
|
original_file,
|
||||||
&record_field.parent_record_pat(),
|
&record_field.parent_record_pat(),
|
||||||
),
|
),
|
||||||
..pattern_context_for(
|
..pattern_context_for(
|
||||||
|
sema,
|
||||||
original_file,
|
original_file,
|
||||||
record_field.parent_record_pat().clone().into(),
|
record_field.parent_record_pat().clone().into(),
|
||||||
)
|
)
|
||||||
|
@ -594,7 +573,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
};
|
};
|
||||||
let func_update_record = |syn: &SyntaxNode| {
|
let func_update_record = |syn: &SyntaxNode| {
|
||||||
if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) {
|
if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) {
|
||||||
find_node_in_file_compensated(original_file, &record_expr)
|
find_node_in_file_compensated(sema, original_file, &record_expr)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -696,9 +675,9 @@ impl<'a> CompletionContext<'a> {
|
||||||
ast::TypeBound(_) => TypeLocation::TypeBound,
|
ast::TypeBound(_) => TypeLocation::TypeBound,
|
||||||
// is this case needed?
|
// is this case needed?
|
||||||
ast::TypeBoundList(_) => TypeLocation::TypeBound,
|
ast::TypeBoundList(_) => TypeLocation::TypeBound,
|
||||||
ast::GenericArg(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(original_file, it.syntax().parent().and_then(ast::GenericArgList::cast))),
|
ast::GenericArg(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, it.syntax().parent().and_then(ast::GenericArgList::cast))),
|
||||||
// is this case needed?
|
// is this case needed?
|
||||||
ast::GenericArgList(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(original_file, Some(it))),
|
ast::GenericArgList(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, Some(it))),
|
||||||
ast::TupleField(_) => TypeLocation::TupleField,
|
ast::TupleField(_) => TypeLocation::TupleField,
|
||||||
_ => return None,
|
_ => return None,
|
||||||
}
|
}
|
||||||
|
@ -727,8 +706,63 @@ impl<'a> CompletionContext<'a> {
|
||||||
let after_if_expr = after_if_expr(it.clone());
|
let after_if_expr = after_if_expr(it.clone());
|
||||||
let ref_expr_parent =
|
let ref_expr_parent =
|
||||||
path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
|
path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
|
||||||
|
let (innermost_ret_ty, self_param) = {
|
||||||
|
let find_ret_ty = |it: SyntaxNode| {
|
||||||
|
if let Some(item) = ast::Item::cast(it.clone()) {
|
||||||
|
match item {
|
||||||
|
ast::Item::Fn(f) => {
|
||||||
|
Some(sema.to_def(&f).map(|it| it.ret_type(sema.db)))
|
||||||
|
}
|
||||||
|
ast::Item::MacroCall(_) => None,
|
||||||
|
_ => Some(None),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let expr = ast::Expr::cast(it)?;
|
||||||
|
let callable = match expr {
|
||||||
|
// FIXME
|
||||||
|
// ast::Expr::BlockExpr(b) if b.async_token().is_some() || b.try_token().is_some() => sema.type_of_expr(b),
|
||||||
|
ast::Expr::ClosureExpr(_) => sema.type_of_expr(&expr),
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
Some(
|
||||||
|
callable
|
||||||
|
.and_then(|c| c.adjusted().as_callable(sema.db))
|
||||||
|
.map(|it| it.return_type()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let find_fn_self_param = |it| match it {
|
||||||
|
ast::Item::Fn(fn_) => {
|
||||||
|
Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db)))
|
||||||
|
}
|
||||||
|
ast::Item::MacroCall(_) => None,
|
||||||
|
_ => Some(None),
|
||||||
|
};
|
||||||
|
|
||||||
|
match find_node_in_file_compensated(sema, original_file, &expr) {
|
||||||
|
Some(it) => {
|
||||||
|
let innermost_ret_ty = sema
|
||||||
|
.ancestors_with_macros(it.syntax().clone())
|
||||||
|
.find_map(find_ret_ty)
|
||||||
|
.flatten();
|
||||||
|
|
||||||
|
let self_param = sema
|
||||||
|
.ancestors_with_macros(it.syntax().clone())
|
||||||
|
.filter_map(ast::Item::cast)
|
||||||
|
.find_map(find_fn_self_param)
|
||||||
|
.flatten();
|
||||||
|
(innermost_ret_ty, self_param)
|
||||||
|
}
|
||||||
|
None => (None, None),
|
||||||
|
}
|
||||||
|
};
|
||||||
let is_func_update = func_update_record(it);
|
let is_func_update = func_update_record(it);
|
||||||
let in_condition = is_in_condition(&expr);
|
let in_condition = is_in_condition(&expr);
|
||||||
|
let incomplete_let = it
|
||||||
|
.parent()
|
||||||
|
.and_then(ast::LetStmt::cast)
|
||||||
|
.map_or(false, |it| it.semicolon_token().is_none());
|
||||||
|
let impl_ = fetch_immediate_impl(sema, original_file, expr.syntax());
|
||||||
|
|
||||||
PathKind::Expr {
|
PathKind::Expr {
|
||||||
in_block_expr,
|
in_block_expr,
|
||||||
|
@ -737,6 +771,10 @@ impl<'a> CompletionContext<'a> {
|
||||||
in_condition,
|
in_condition,
|
||||||
ref_expr_parent,
|
ref_expr_parent,
|
||||||
is_func_update,
|
is_func_update,
|
||||||
|
innermost_ret_ty,
|
||||||
|
self_param,
|
||||||
|
incomplete_let,
|
||||||
|
impl_,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let make_path_kind_type = |ty: ast::Type| {
|
let make_path_kind_type = |ty: ast::Type| {
|
||||||
|
@ -764,14 +802,14 @@ impl<'a> CompletionContext<'a> {
|
||||||
},
|
},
|
||||||
ast::TupleStructPat(it) => {
|
ast::TupleStructPat(it) => {
|
||||||
path_ctx.has_call_parens = true;
|
path_ctx.has_call_parens = true;
|
||||||
PathKind::Pat { pat_ctx: pattern_context_for(original_file, it.into())}
|
PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
|
||||||
},
|
},
|
||||||
ast::RecordPat(it) => {
|
ast::RecordPat(it) => {
|
||||||
path_ctx.has_call_parens = true;
|
path_ctx.has_call_parens = true;
|
||||||
PathKind::Pat { pat_ctx: pattern_context_for(original_file, it.into())}
|
PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
|
||||||
},
|
},
|
||||||
ast::PathPat(it) => {
|
ast::PathPat(it) => {
|
||||||
PathKind::Pat { pat_ctx: pattern_context_for(original_file, it.into())}
|
PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
|
||||||
},
|
},
|
||||||
ast::MacroCall(it) => {
|
ast::MacroCall(it) => {
|
||||||
// A macro call in this position is usually a result of parsing recovery, so check that
|
// A macro call in this position is usually a result of parsing recovery, so check that
|
||||||
|
@ -785,7 +823,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
match_ast! {
|
match_ast! {
|
||||||
match parent {
|
match parent {
|
||||||
ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
|
ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
|
||||||
ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(original_file, it.into())},
|
ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
|
||||||
ast::MacroType(ty) => make_path_kind_type(ty.into()),
|
ast::MacroType(ty) => make_path_kind_type(ty.into()),
|
||||||
ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
|
ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
|
||||||
ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
|
ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
|
||||||
|
@ -793,7 +831,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
match it {
|
match it {
|
||||||
ast::Trait(_) => ItemListKind::Trait,
|
ast::Trait(_) => ItemListKind::Trait,
|
||||||
ast::Impl(it) => if it.trait_().is_some() {
|
ast::Impl(it) => if it.trait_().is_some() {
|
||||||
ItemListKind::TraitImpl
|
ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it))
|
||||||
} else {
|
} else {
|
||||||
ItemListKind::Impl
|
ItemListKind::Impl
|
||||||
},
|
},
|
||||||
|
@ -930,7 +968,11 @@ impl<'a> CompletionContext<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pattern_context_for(original_file: &SyntaxNode, pat: ast::Pat) -> PatternContext {
|
fn pattern_context_for(
|
||||||
|
sema: &Semantics<RootDatabase>,
|
||||||
|
original_file: &SyntaxNode,
|
||||||
|
pat: ast::Pat,
|
||||||
|
) -> PatternContext {
|
||||||
let mut is_param = None;
|
let mut is_param = None;
|
||||||
let (refutability, has_type_ascription) =
|
let (refutability, has_type_ascription) =
|
||||||
pat
|
pat
|
||||||
|
@ -946,7 +988,7 @@ fn pattern_context_for(original_file: &SyntaxNode, pat: ast::Pat) -> PatternCont
|
||||||
let has_type_ascription = param.ty().is_some();
|
let has_type_ascription = param.ty().is_some();
|
||||||
is_param = (|| {
|
is_param = (|| {
|
||||||
let fake_param_list = param.syntax().parent().and_then(ast::ParamList::cast)?;
|
let fake_param_list = param.syntax().parent().and_then(ast::ParamList::cast)?;
|
||||||
let param_list = find_node_in_file_compensated(original_file, &fake_param_list)?;
|
let param_list = find_node_in_file_compensated(sema, original_file, &fake_param_list)?;
|
||||||
let param_list_owner = param_list.syntax().parent()?;
|
let param_list_owner = param_list.syntax().parent()?;
|
||||||
let kind = match_ast! {
|
let kind = match_ast! {
|
||||||
match param_list_owner {
|
match param_list_owner {
|
||||||
|
@ -971,6 +1013,7 @@ fn pattern_context_for(original_file: &SyntaxNode, pat: ast::Pat) -> PatternCont
|
||||||
ast::Pat::IdentPat(it) => (it.ref_token(), it.mut_token()),
|
ast::Pat::IdentPat(it) => (it.ref_token(), it.mut_token()),
|
||||||
_ => (None, None),
|
_ => (None, None),
|
||||||
};
|
};
|
||||||
|
|
||||||
PatternContext {
|
PatternContext {
|
||||||
refutability,
|
refutability,
|
||||||
param_ctx: is_param,
|
param_ctx: is_param,
|
||||||
|
@ -979,6 +1022,27 @@ fn pattern_context_for(original_file: &SyntaxNode, pat: ast::Pat) -> PatternCont
|
||||||
mut_token,
|
mut_token,
|
||||||
ref_token,
|
ref_token,
|
||||||
record_pat: None,
|
record_pat: None,
|
||||||
|
impl_: fetch_immediate_impl(sema, original_file, pat.syntax()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fetch_immediate_impl(
|
||||||
|
sema: &Semantics<RootDatabase>,
|
||||||
|
original_file: &SyntaxNode,
|
||||||
|
node: &SyntaxNode,
|
||||||
|
) -> Option<ast::Impl> {
|
||||||
|
let mut ancestors = ancestors_in_file_compensated(sema, original_file, node)?
|
||||||
|
.filter_map(ast::Item::cast)
|
||||||
|
.filter(|it| !matches!(it, ast::Item::MacroCall(_)));
|
||||||
|
|
||||||
|
match ancestors.next()? {
|
||||||
|
ast::Item::Const(_) | ast::Item::Fn(_) | ast::Item::TypeAlias(_) => (),
|
||||||
|
ast::Item::Impl(it) => return Some(it),
|
||||||
|
_ => return None,
|
||||||
|
}
|
||||||
|
match ancestors.next()? {
|
||||||
|
ast::Item::Impl(it) => Some(it),
|
||||||
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1000,9 +1064,21 @@ fn find_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
|
||||||
/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
|
/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
|
||||||
/// for the offset introduced by the fake ident.
|
/// for the offset introduced by the fake ident.
|
||||||
/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
|
/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
|
||||||
fn find_node_in_file_compensated<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
|
fn find_node_in_file_compensated<N: AstNode>(
|
||||||
let syntax_range = syntax.text_range();
|
sema: &Semantics<RootDatabase>,
|
||||||
let range = node.syntax().text_range();
|
in_file: &SyntaxNode,
|
||||||
|
node: &N,
|
||||||
|
) -> Option<N> {
|
||||||
|
ancestors_in_file_compensated(sema, in_file, node.syntax())?.find_map(N::cast)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ancestors_in_file_compensated<'sema>(
|
||||||
|
sema: &'sema Semantics<RootDatabase>,
|
||||||
|
in_file: &SyntaxNode,
|
||||||
|
node: &SyntaxNode,
|
||||||
|
) -> Option<impl Iterator<Item = SyntaxNode> + 'sema> {
|
||||||
|
let syntax_range = in_file.text_range();
|
||||||
|
let range = node.text_range();
|
||||||
let end = range.end().checked_sub(TextSize::try_from(COMPLETION_MARKER.len()).ok()?)?;
|
let end = range.end().checked_sub(TextSize::try_from(COMPLETION_MARKER.len()).ok()?)?;
|
||||||
if end < range.start() {
|
if end < range.start() {
|
||||||
return None;
|
return None;
|
||||||
|
@ -1010,17 +1086,22 @@ fn find_node_in_file_compensated<N: AstNode>(syntax: &SyntaxNode, node: &N) -> O
|
||||||
let range = TextRange::new(range.start(), end);
|
let range = TextRange::new(range.start(), end);
|
||||||
// our inserted ident could cause `range` to go outside of the original syntax, so cap it
|
// our inserted ident could cause `range` to go outside of the original syntax, so cap it
|
||||||
let intersection = range.intersect(syntax_range)?;
|
let intersection = range.intersect(syntax_range)?;
|
||||||
syntax.covering_element(intersection).ancestors().find_map(N::cast)
|
let node = match in_file.covering_element(intersection) {
|
||||||
|
NodeOrToken::Node(node) => node,
|
||||||
|
NodeOrToken::Token(tok) => tok.parent()?,
|
||||||
|
};
|
||||||
|
Some(sema.ancestors_with_macros(node))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
|
/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
|
||||||
/// for the offset introduced by the fake ident..
|
/// for the offset introduced by the fake ident..
|
||||||
/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
|
/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
|
||||||
fn find_opt_node_in_file_compensated<N: AstNode>(
|
fn find_opt_node_in_file_compensated<N: AstNode>(
|
||||||
|
sema: &Semantics<RootDatabase>,
|
||||||
syntax: &SyntaxNode,
|
syntax: &SyntaxNode,
|
||||||
node: Option<N>,
|
node: Option<N>,
|
||||||
) -> Option<N> {
|
) -> Option<N> {
|
||||||
find_node_in_file_compensated(syntax, &node?)
|
find_node_in_file_compensated(sema, syntax, &node?)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
|
fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
|
||||||
|
|
Loading…
Reference in a new issue