rust-analyzer/crates/ide-completion/src/context.rs

1683 lines
59 KiB
Rust
Raw Normal View History

//! See `CompletionContext` structure.
use std::iter;
2021-07-23 17:57:16 +00:00
use base_db::SourceDatabaseExt;
use hir::{
HasAttrs, Local, Name, PathResolution, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo,
};
2021-05-26 19:09:27 +00:00
use ide_db::{
active_parameter::ActiveParameter,
2021-05-26 19:09:27 +00:00
base_db::{FilePosition, SourceDatabase},
2022-03-06 18:01:30 +00:00
famous_defs::FamousDefs,
FxHashMap, FxHashSet, RootDatabase,
2021-05-26 19:09:27 +00:00
};
2020-08-12 16:26:51 +00:00
use syntax::{
algo::{find_node_at_offset, non_trivia_sibling},
2022-05-07 11:46:43 +00:00
ast::{self, AttrKind, HasArgList, HasName, NameOrNameRef},
match_ast, AstNode, NodeOrToken,
2021-05-26 19:09:27 +00:00
SyntaxKind::{self, *},
SyntaxNode, SyntaxToken, TextRange, TextSize, T,
2019-01-08 19:33:36 +00:00
};
2020-08-12 15:03:06 +00:00
use text_edit::Indel;
2019-01-08 19:33:36 +00:00
2020-08-13 16:06:14 +00:00
use crate::{
patterns::{
determine_location, determine_prev_sibling, is_in_loop_body, is_in_token_of_for_loop,
previous_token, ImmediateLocation, ImmediatePrevSibling,
2020-08-13 16:06:14 +00:00
},
CompletionConfig,
2020-06-11 12:16:35 +00:00
};
const COMPLETION_MARKER: &str = "intellijRulezz";
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(crate) enum PatternRefutability {
Refutable,
Irrefutable,
}
2022-02-02 11:05:21 +00:00
pub(crate) enum Visible {
Yes,
Editable,
No,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
2021-06-08 14:50:10 +00:00
pub(super) enum PathKind {
Expr {
in_block_expr: bool,
in_loop_body: bool,
},
2021-06-08 14:50:10 +00:00
Type,
Attr {
kind: AttrKind,
annotated_item_kind: Option<SyntaxKind>,
},
Derive,
/// Path in item position, that is inside an (Assoc)ItemList
Item {
kind: ItemListKind,
},
Pat,
Vis {
has_in_token: bool,
},
Use,
2021-06-08 14:50:10 +00:00
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(super) enum ItemListKind {
SourceFile,
Module,
Impl,
Trait,
ExternBlock,
}
#[derive(Debug)]
pub(crate) struct PathCompletionCtx {
/// If this is a call with () already there (or {} in case of record patterns)
pub(super) has_call_parens: bool,
/// If this has a macro call bang !
pub(super) has_macro_bang: bool,
/// Whether this path stars with a `::`.
pub(super) is_absolute_path: bool,
/// The qualifier of the current path if it exists.
pub(super) qualifier: Option<PathQualifierCtx>,
#[allow(dead_code)]
// FIXME: use this
/// The parent of the path we are completing.
pub(super) parent: Option<ast::Path>,
pub(super) kind: PathKind,
2021-06-08 14:50:10 +00:00
/// Whether the path segment has type args or not.
pub(super) has_type_args: bool,
}
#[derive(Debug)]
pub(crate) struct PathQualifierCtx {
2022-02-02 17:18:08 +00:00
pub(crate) path: ast::Path,
pub(crate) resolution: Option<PathResolution>,
/// Whether this path consists solely of `super` segments
2022-02-02 17:18:08 +00:00
pub(crate) is_super_chain: bool,
/// Whether the qualifier comes from a use tree parent or not
2022-02-02 17:18:08 +00:00
pub(crate) use_tree_parent: bool,
/// <_>
pub(crate) is_infer_qualifier: bool,
}
2021-08-14 17:06:35 +00:00
#[derive(Debug)]
pub(super) struct PatternContext {
pub(super) refutability: PatternRefutability,
pub(super) param_ctx: Option<(ast::ParamList, ast::Param, ParamKind)>,
pub(super) has_type_ascription: bool,
pub(super) parent_pat: Option<ast::Pat>,
pub(super) ref_token: Option<SyntaxToken>,
pub(super) mut_token: Option<SyntaxToken>,
2021-08-14 17:06:35 +00:00
}
#[derive(Debug)]
pub(super) enum LifetimeContext {
LifetimeParam { is_decl: bool, param: ast::LifetimeParam },
Lifetime,
LabelRef,
LabelDef,
}
#[derive(Debug)]
#[allow(dead_code)]
pub(super) enum NameContext {
Const,
ConstParam,
Enum,
Function,
IdentPat,
MacroDef,
MacroRules,
/// Fake node
Module(ast::Module),
RecordField,
Rename,
SelfParam,
Static,
Struct,
Trait,
TypeAlias,
TypeParam,
Union,
Variant,
}
2022-05-07 11:46:43 +00:00
#[derive(Debug)]
pub(super) struct NameRefContext {
pub(super) dot_access: Option<DotAccess>,
pub(super) path_ctx: Option<PathCompletionCtx>,
}
#[derive(Debug)]
pub(super) enum DotAccess {
Field {
receiver: Option<ast::Expr>,
/// True if the receiver is an integer and there is no ident in the original file after it yet
/// like `0.$0`
receiver_is_ambiguous_float_literal: bool,
},
Method {
receiver: Option<ast::Expr>,
has_parens: bool,
},
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub(crate) enum ParamKind {
Function(ast::Fn),
Closure(ast::ClosureExpr),
}
2019-01-08 19:33:36 +00:00
/// `CompletionContext` is created early during completion to figure out, where
/// exactly is the cursor, syntax-wise.
#[derive(Debug)]
pub(crate) struct CompletionContext<'a> {
pub(super) sema: Semantics<'a, RootDatabase>,
2020-07-10 23:26:24 +00:00
pub(super) scope: SemanticsScope<'a>,
2020-02-06 11:52:32 +00:00
pub(super) db: &'a RootDatabase,
2020-03-31 14:02:55 +00:00
pub(super) config: &'a CompletionConfig,
2020-08-11 06:54:33 +00:00
pub(super) position: FilePosition,
2020-03-07 14:27:03 +00:00
/// The token before the cursor, in the original file.
pub(super) original_token: SyntaxToken,
/// The token before the cursor, in the macro-expanded file.
2019-07-19 09:56:47 +00:00
pub(super) token: SyntaxToken,
2021-10-11 19:49:39 +00:00
/// The crate of the current file.
pub(super) krate: hir::Crate,
2022-02-02 11:05:21 +00:00
/// The module of the `scope`.
pub(super) module: hir::Module,
/// The expected name of what we are completing.
/// This is usually the parameter name of the function argument we are completing.
pub(super) expected_name: Option<NameOrNameRef>,
/// The expected type of what we are completing.
pub(super) expected_type: Option<Type>,
/// The parent function of the cursor position if it exists.
pub(super) function_def: Option<ast::Fn>,
/// The parent impl of the cursor position if it exists.
2020-07-30 16:28:28 +00:00
pub(super) impl_def: Option<ast::Impl>,
/// The NameLike under the cursor in the original file if it exists.
2021-09-01 14:13:53 +00:00
pub(super) name_syntax: Option<ast::NameLike>,
/// Are we completing inside a let statement with a missing semicolon?
pub(super) incomplete_let: bool,
pub(super) completion_location: Option<ImmediateLocation>,
pub(super) prev_sibling: Option<ImmediatePrevSibling>,
pub(super) fake_attribute_under_caret: Option<ast::Attr>,
2021-06-07 17:06:03 +00:00
pub(super) previous_token: Option<SyntaxToken>,
pub(super) name_ctx: Option<NameContext>,
pub(super) lifetime_ctx: Option<LifetimeContext>,
2022-05-07 11:46:43 +00:00
pub(super) nameref_ctx: Option<NameRefContext>,
2021-08-14 17:06:35 +00:00
pub(super) pattern_ctx: Option<PatternContext>,
pub(super) existing_derives: FxHashSet<hir::Macro>,
2022-03-14 19:36:35 +00:00
pub(super) locals: FxHashMap<Name, Local>,
2019-01-08 19:33:36 +00:00
}
2019-01-08 19:33:36 +00:00
impl<'a> CompletionContext<'a> {
/// The range of the identifier that is being completed.
pub(crate) fn source_range(&self) -> TextRange {
2020-03-07 14:27:03 +00:00
// check kind of macro-expanded token, but use range of original token
let kind = self.token.kind();
match kind {
CHAR => {
// assume we are completing a lifetime but the user has only typed the '
cov_mark::hit!(completes_if_lifetime_without_idents);
TextRange::at(self.original_token.text_range().start(), TextSize::from(1))
}
IDENT | LIFETIME_IDENT | UNDERSCORE => self.original_token.text_range(),
_ if kind.is_keyword() => self.original_token.text_range(),
_ => TextRange::empty(self.position.offset),
2019-01-20 05:34:16 +00:00
}
}
2022-04-23 00:21:27 +00:00
pub(crate) fn name_ref(&self) -> Option<&ast::NameRef> {
self.name_syntax.as_ref().and_then(ast::NameLike::as_name_ref)
}
pub(crate) fn lifetime(&self) -> Option<&ast::Lifetime> {
self.name_syntax.as_ref().and_then(ast::NameLike::as_lifetime)
}
2021-05-26 19:09:27 +00:00
pub(crate) fn previous_token_is(&self, kind: SyntaxKind) -> bool {
self.previous_token.as_ref().map_or(false, |tok| tok.kind() == kind)
}
pub(crate) fn famous_defs(&self) -> FamousDefs {
FamousDefs(&self.sema, self.krate)
}
pub(crate) fn dot_receiver(&self) -> Option<&ast::Expr> {
2022-05-07 11:46:43 +00:00
match &self.nameref_ctx {
Some(NameRefContext {
dot_access:
Some(DotAccess::Method { receiver, .. } | DotAccess::Field { receiver, .. }),
..
}) => receiver.as_ref(),
_ => None,
}
}
pub(crate) fn has_dot_receiver(&self) -> bool {
2022-05-07 11:46:43 +00:00
self.dot_receiver().is_some()
}
pub(crate) fn expects_assoc_item(&self) -> bool {
matches!(self.completion_location, Some(ImmediateLocation::Trait | ImmediateLocation::Impl))
}
pub(crate) fn expects_variant(&self) -> bool {
matches!(self.name_ctx, Some(NameContext::Variant))
}
pub(crate) fn expects_non_trait_assoc_item(&self) -> bool {
matches!(self.completion_location, Some(ImmediateLocation::Impl))
}
pub(crate) fn expects_item(&self) -> bool {
matches!(self.completion_location, Some(ImmediateLocation::ItemList))
}
pub(crate) fn expects_generic_arg(&self) -> bool {
matches!(self.completion_location, Some(ImmediateLocation::GenericArgList(_)))
}
pub(crate) fn has_block_expr_parent(&self) -> bool {
matches!(self.completion_location, Some(ImmediateLocation::StmtList))
}
pub(crate) fn expects_ident_ref_expr(&self) -> bool {
matches!(self.completion_location, Some(ImmediateLocation::RefExpr))
}
2021-06-21 13:14:28 +00:00
pub(crate) fn expect_field(&self) -> bool {
matches!(self.completion_location, Some(ImmediateLocation::TupleField))
|| matches!(self.name_ctx, Some(NameContext::RecordField))
}
/// Whether the cursor is right after a trait or impl header.
/// trait Foo ident$0
// FIXME: This probably shouldn't exist
pub(crate) fn has_unfinished_impl_or_trait_prev_sibling(&self) -> bool {
matches!(
self.prev_sibling,
2021-06-17 15:37:14 +00:00
Some(ImmediatePrevSibling::ImplDefType | ImmediatePrevSibling::TraitDefName)
)
}
// FIXME: This probably shouldn't exist
2021-06-17 13:43:21 +00:00
pub(crate) fn has_impl_prev_sibling(&self) -> bool {
matches!(self.prev_sibling, Some(ImmediatePrevSibling::ImplDefType))
}
pub(crate) fn has_visibility_prev_sibling(&self) -> bool {
matches!(self.prev_sibling, Some(ImmediatePrevSibling::Visibility))
}
pub(crate) fn after_if(&self) -> bool {
matches!(self.prev_sibling, Some(ImmediatePrevSibling::IfExpr))
}
// FIXME: This shouldn't exist
2021-05-27 01:47:20 +00:00
pub(crate) fn is_path_disallowed(&self) -> bool {
self.previous_token_is(T![unsafe])
2021-06-16 16:50:18 +00:00
|| matches!(
self.prev_sibling,
2021-06-17 15:37:14 +00:00
Some(ImmediatePrevSibling::Attribute | ImmediatePrevSibling::Visibility)
2021-06-16 16:50:18 +00:00
)
|| matches!(
self.completion_location,
Some(ImmediateLocation::RecordPat(_) | ImmediateLocation::RecordExpr(_))
)
|| matches!(self.name_ctx, Some(NameContext::Module(_) | NameContext::Rename))
2020-06-11 12:16:35 +00:00
}
2022-05-07 11:46:43 +00:00
pub(crate) fn path_context(&self) -> Option<&PathCompletionCtx> {
self.nameref_ctx.as_ref().and_then(|ctx| ctx.path_ctx.as_ref())
}
pub(crate) fn expects_expression(&self) -> bool {
2022-05-07 11:46:43 +00:00
matches!(self.path_context(), Some(PathCompletionCtx { kind: PathKind::Expr { .. }, .. }))
2021-06-08 14:50:10 +00:00
}
pub(crate) fn expects_type(&self) -> bool {
2022-05-07 11:46:43 +00:00
matches!(self.path_context(), Some(PathCompletionCtx { kind: PathKind::Type, .. }))
}
pub(crate) fn path_is_call(&self) -> bool {
2022-05-07 11:46:43 +00:00
self.path_context().map_or(false, |it| it.has_call_parens)
}
pub(crate) fn is_non_trivial_path(&self) -> bool {
matches!(
2022-05-07 11:46:43 +00:00
self.path_context(),
Some(
PathCompletionCtx { is_absolute_path: true, .. }
| PathCompletionCtx { qualifier: Some(_), .. }
)
)
}
pub(crate) fn path_qual(&self) -> Option<&ast::Path> {
2022-05-07 11:46:43 +00:00
self.path_context().and_then(|it| it.qualifier.as_ref().map(|it| &it.path))
}
pub(crate) fn path_kind(&self) -> Option<PathKind> {
2022-05-07 11:46:43 +00:00
self.path_context().map(|it| it.kind)
}
2022-03-14 19:36:35 +00:00
pub(crate) fn is_immediately_after_macro_bang(&self) -> bool {
self.token.kind() == BANG && self.token.parent().map_or(false, |it| it.kind() == MACRO_CALL)
}
/// Checks if an item is visible and not `doc(hidden)` at the completion site.
pub(crate) fn is_visible<I>(&self, item: &I) -> Visible
where
I: hir::HasVisibility + hir::HasAttrs + hir::HasCrate + Copy,
{
self.is_visible_impl(&item.visibility(self.db), &item.attrs(self.db), item.krate(self.db))
}
2021-12-21 13:07:48 +00:00
pub(crate) fn is_scope_def_hidden(&self, scope_def: ScopeDef) -> bool {
if let (Some(attrs), Some(krate)) = (scope_def.attrs(self.db), scope_def.krate(self.db)) {
return self.is_doc_hidden(&attrs, krate);
}
false
}
2021-10-11 19:49:39 +00:00
/// Check if an item is `#[doc(hidden)]`.
pub(crate) fn is_item_hidden(&self, item: &hir::ItemInNs) -> bool {
let attrs = item.attrs(self.db);
let krate = item.krate(self.db);
match (attrs, krate) {
(Some(attrs), Some(krate)) => self.is_doc_hidden(&attrs, krate),
_ => false,
}
}
/// Whether the given trait is an operator trait or not.
pub(crate) fn is_ops_trait(&self, trait_: hir::Trait) -> bool {
match trait_.attrs(self.db).lang() {
Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang.as_str()),
None => false,
}
}
/// Returns the traits in scope, with the [`Drop`] trait removed.
pub(crate) fn traits_in_scope(&self) -> hir::VisibleTraits {
let mut traits_in_scope = self.scope.visible_traits();
if let Some(drop) = self.famous_defs().core_ops_Drop() {
traits_in_scope.0.remove(&drop.into());
}
traits_in_scope
}
/// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items.
pub(crate) fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
2021-12-21 17:25:50 +00:00
let _p = profile::span("CompletionContext::process_all_names");
self.scope.process_all_names(&mut |name, def| {
2021-12-21 13:07:48 +00:00
if self.is_scope_def_hidden(def) {
return;
}
f(name, def);
2022-03-14 19:36:35 +00:00
});
}
pub(crate) fn process_all_names_raw(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
let _p = profile::span("CompletionContext::process_all_names_raw");
self.scope.process_all_names(&mut |name, def| f(name, def));
}
fn is_visible_impl(
&self,
vis: &hir::Visibility,
attrs: &hir::Attrs,
defining_crate: hir::Crate,
) -> Visible {
if !vis.is_visible_from(self.db, self.module.into()) {
if !self.config.enable_private_editable {
return Visible::No;
}
2021-07-23 17:57:16 +00:00
// If the definition location is editable, also show private items
let root_file = defining_crate.root_file(self.db);
let source_root_id = self.db.file_source_root(root_file);
let is_editable = !self.db.source_root(source_root_id).is_library;
return if is_editable { Visible::Editable } else { Visible::No };
}
if self.is_doc_hidden(attrs, defining_crate) {
Visible::No
} else {
Visible::Yes
}
}
fn is_doc_hidden(&self, attrs: &hir::Attrs, defining_crate: hir::Crate) -> bool {
// `doc(hidden)` items are only completed within the defining crate.
self.krate != defining_crate && attrs.has_doc_hidden()
}
2021-10-17 09:15:56 +00:00
}
// CompletionContext construction
impl<'a> CompletionContext<'a> {
pub(super) fn new(
db: &'a RootDatabase,
2021-11-10 16:33:35 +00:00
position @ FilePosition { file_id, offset }: FilePosition,
2021-10-17 09:15:56 +00:00
config: &'a CompletionConfig,
) -> Option<CompletionContext<'a>> {
2021-11-08 18:41:16 +00:00
let _p = profile::span("CompletionContext::new");
2021-10-17 09:15:56 +00:00
let sema = Semantics::new(db);
2021-11-10 16:33:35 +00:00
let original_file = sema.parse(file_id);
2021-10-17 09:15:56 +00:00
// Insert a fake ident to get a valid parse tree. We will use this file
// to determine context, though the original_file will be used for
// actual completion.
let file_with_fake_ident = {
2021-11-10 16:33:35 +00:00
let parse = db.parse(file_id);
let edit = Indel::insert(offset, COMPLETION_MARKER.to_string());
2021-10-17 09:15:56 +00:00
parse.reparse(&edit).tree()
};
let fake_ident_token =
2021-12-21 13:07:48 +00:00
file_with_fake_ident.syntax().token_at_offset(offset).right_biased()?;
2021-10-17 09:15:56 +00:00
2021-11-10 16:33:35 +00:00
let original_token = original_file.syntax().token_at_offset(offset).left_biased()?;
2021-10-17 09:15:56 +00:00
let token = sema.descend_into_macros_single(original_token.clone());
let scope = sema.scope_at_offset(&token.parent()?, offset)?;
2021-10-17 09:15:56 +00:00
let krate = scope.krate();
let module = scope.module();
2022-03-14 19:36:35 +00:00
let mut locals = FxHashMap::default();
2021-10-17 09:15:56 +00:00
scope.process_all_names(&mut |name, scope| {
if let ScopeDef::Local(local) = scope {
2022-03-14 19:36:35 +00:00
locals.insert(name, local);
2021-10-17 09:15:56 +00:00
}
});
2021-10-17 09:15:56 +00:00
let mut ctx = CompletionContext {
sema,
scope,
db,
config,
position,
original_token,
token,
krate,
module,
2021-10-17 09:15:56 +00:00
expected_name: None,
expected_type: None,
function_def: None,
impl_def: None,
name_syntax: None,
2022-05-07 11:46:43 +00:00
incomplete_let: false,
2021-10-17 09:15:56 +00:00
completion_location: None,
prev_sibling: None,
fake_attribute_under_caret: None,
2021-10-17 09:15:56 +00:00
previous_token: None,
2022-05-07 11:46:43 +00:00
name_ctx: None,
lifetime_ctx: None,
nameref_ctx: None,
pattern_ctx: None,
existing_derives: Default::default(),
2022-05-07 11:46:43 +00:00
locals,
2021-10-17 09:15:56 +00:00
};
ctx.expand_and_fill(
original_file.syntax().clone(),
file_with_fake_ident.syntax().clone(),
2021-11-10 16:33:35 +00:00
offset,
2021-10-17 09:15:56 +00:00
fake_ident_token,
);
Some(ctx)
}
/// Expand attributes and macro calls at the current cursor position for both the original file
/// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
/// and speculative states stay in sync.
2021-10-17 09:15:56 +00:00
fn expand_and_fill(
&mut self,
mut original_file: SyntaxNode,
mut speculative_file: SyntaxNode,
mut offset: TextSize,
mut fake_ident_token: SyntaxToken,
) {
2021-12-21 17:25:50 +00:00
let _p = profile::span("CompletionContext::expand_and_fill");
let mut derive_ctx = None;
'expansion: loop {
let parent_item =
|item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
let ancestor_items = iter::successors(
Option::zip(
find_node_at_offset::<ast::Item>(&original_file, offset),
find_node_at_offset::<ast::Item>(&speculative_file, offset),
),
|(a, b)| parent_item(a).zip(parent_item(b)),
);
// first try to expand attributes as these are always the outermost macro calls
'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
2021-10-17 09:15:56 +00:00
match (
self.sema.expand_attr_macro(&actual_item),
self.sema.speculative_expand_attr_macro(
&actual_item,
&item_with_fake_ident,
fake_ident_token.clone(),
),
) {
// maybe parent items have attributes, so continue walking the ancestors
(None, None) => continue 'ancestors,
// successful expansions
(Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
let new_offset = fake_mapped_token.text_range().start();
2021-10-17 09:15:56 +00:00
if new_offset > actual_expansion.text_range().end() {
// offset outside of bounds from the original expansion,
// stop here to prevent problems from happening
break 'expansion;
2021-10-17 09:15:56 +00:00
}
original_file = actual_expansion;
speculative_file = fake_expansion;
fake_ident_token = fake_mapped_token;
2021-10-17 09:15:56 +00:00
offset = new_offset;
continue 'expansion;
2021-10-17 09:15:56 +00:00
}
// exactly one expansion failed, inconsistent state so stop expanding completely
_ => break 'expansion,
2021-10-17 09:15:56 +00:00
}
}
// No attributes have been expanded, so look for macro_call! token trees or derive token trees
let orig_tt = match find_node_at_offset::<ast::TokenTree>(&original_file, offset) {
Some(it) => it,
None => break 'expansion,
};
let spec_tt = match find_node_at_offset::<ast::TokenTree>(&speculative_file, offset) {
Some(it) => it,
None => break 'expansion,
};
// Expand pseudo-derive expansion
if let (Some(orig_attr), Some(spec_attr)) = (
orig_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
) {
if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = (
self.sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
self.sema.speculative_expand_derive_as_pseudo_attr_macro(
&orig_attr,
&spec_attr,
fake_ident_token.clone(),
),
) {
derive_ctx = Some((
actual_expansion,
fake_expansion,
fake_mapped_token.text_range().start(),
orig_attr,
));
}
// at this point we won't have any more successful expansions, so stop
break 'expansion;
}
2021-10-17 09:15:56 +00:00
// Expand fn-like macro calls
if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
orig_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
spec_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
2021-10-17 09:15:56 +00:00
) {
let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
let mac_call_path1 =
macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
// inconsistent state, stop expanding
2021-10-17 09:15:56 +00:00
if mac_call_path0 != mac_call_path1 {
break 'expansion;
2021-10-17 09:15:56 +00:00
}
let speculative_args = match macro_call_with_fake_ident.token_tree() {
Some(tt) => tt,
None => break 'expansion,
2021-10-17 09:15:56 +00:00
};
match (
2021-10-17 09:15:56 +00:00
self.sema.expand(&actual_macro_call),
self.sema.speculative_expand(
&actual_macro_call,
&speculative_args,
fake_ident_token.clone(),
2021-10-17 09:15:56 +00:00
),
) {
// successful expansions
(Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
let new_offset = fake_mapped_token.text_range().start();
if new_offset > actual_expansion.text_range().end() {
// offset outside of bounds from the original expansion,
// stop here to prevent problems from happening
break 'expansion;
}
original_file = actual_expansion;
speculative_file = fake_expansion;
fake_ident_token = fake_mapped_token;
offset = new_offset;
continue 'expansion;
2021-10-17 09:15:56 +00:00
}
// at least on expansion failed, we won't have anything to expand from this point
// onwards so break out
_ => break 'expansion,
2021-10-17 09:15:56 +00:00
}
}
// none of our states have changed so stop the loop
break 'expansion;
2021-10-17 09:15:56 +00:00
}
self.fill(&original_file, speculative_file, offset, derive_ctx);
2021-10-17 09:15:56 +00:00
}
/// Calculate the expected type and name of the cursor position.
fn expected_type_and_name(&self) -> (Option<Type>, Option<NameOrNameRef>) {
let mut node = match self.token.parent() {
Some(it) => it,
None => return (None, None),
};
loop {
break match_ast! {
match node {
ast::LetStmt(it) => {
cov_mark::hit!(expected_type_let_with_leading_char);
cov_mark::hit!(expected_type_let_without_leading_char);
let ty = it.pat()
2021-05-08 21:14:08 +00:00
.and_then(|pat| self.sema.type_of_pat(&pat))
2021-08-02 18:42:25 +00:00
.or_else(|| it.initializer().and_then(|it| self.sema.type_of_expr(&it)))
.map(TypeInfo::original);
let name = match it.pat() {
Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
Some(_) | None => None,
};
(ty, name)
},
ast::LetExpr(it) => {
cov_mark::hit!(expected_type_if_let_without_leading_char);
let ty = it.pat()
.and_then(|pat| self.sema.type_of_pat(&pat))
.or_else(|| it.expr().and_then(|it| self.sema.type_of_expr(&it)))
.map(TypeInfo::original);
(ty, None)
},
ast::ArgList(_) => {
cov_mark::hit!(expected_type_fn_param);
ActiveParameter::at_token(
&self.sema,
self.token.clone(),
).map(|ap| {
let name = ap.ident().map(NameOrNameRef::Name);
let ty = if has_ref(&self.token) {
cov_mark::hit!(expected_type_fn_param_ref);
ap.ty.remove_ref()
} else {
Some(ap.ty)
};
(ty, name)
})
.unwrap_or((None, None))
},
ast::RecordExprFieldList(it) => {
// wouldn't try {} be nice...
(|| {
if self.token.kind() == T![..]
|| self.token.prev_token().map(|t| t.kind()) == Some(T![..])
{
cov_mark::hit!(expected_type_struct_func_update);
let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?;
let ty = self.sema.type_of_expr(&record_expr.into())?;
Some((
Some(ty.original),
None
))
} else {
cov_mark::hit!(expected_type_struct_field_without_leading_char);
let expr_field = self.token.prev_sibling_or_token()?
.into_node()
.and_then(ast::RecordExprField::cast)?;
let (_, _, ty) = self.sema.resolve_record_field(&expr_field)?;
Some((
Some(ty),
expr_field.field_name().map(NameOrNameRef::NameRef),
))
}
})().unwrap_or((None, None))
},
ast::RecordExprField(it) => {
if let Some(expr) = it.expr() {
cov_mark::hit!(expected_type_struct_field_with_leading_char);
(
self.sema.type_of_expr(&expr).map(TypeInfo::original),
it.field_name().map(NameOrNameRef::NameRef),
)
} else {
cov_mark::hit!(expected_type_struct_field_followed_by_comma);
let ty = self.sema.resolve_record_field(&it)
.map(|(_, _, ty)| ty);
(
ty,
it.field_name().map(NameOrNameRef::NameRef),
)
}
},
ast::MatchExpr(it) => {
cov_mark::hit!(expected_type_match_arm_without_leading_char);
let ty = it.expr().and_then(|e| self.sema.type_of_expr(&e)).map(TypeInfo::original);
(ty, None)
},
ast::IfExpr(it) => {
let ty = it.condition()
2021-08-02 18:42:25 +00:00
.and_then(|e| self.sema.type_of_expr(&e))
.map(TypeInfo::original);
(ty, None)
},
ast::IdentPat(it) => {
cov_mark::hit!(expected_type_if_let_with_leading_char);
cov_mark::hit!(expected_type_match_arm_with_leading_char);
let ty = self.sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
(ty, None)
},
ast::Fn(it) => {
cov_mark::hit!(expected_type_fn_ret_with_leading_char);
cov_mark::hit!(expected_type_fn_ret_without_leading_char);
let def = self.sema.to_def(&it);
(def.map(|def| def.ret_type(self.db)), None)
},
ast::ClosureExpr(it) => {
let ty = self.sema.type_of_expr(&it.into());
ty.and_then(|ty| ty.original.as_callable(self.db))
.map(|c| (Some(c.return_type()), None))
.unwrap_or((None, None))
},
ast::ParamList(_) => (None, None),
ast::Stmt(_) => (None, None),
ast::Item(_) => (None, None),
_ => {
match node.parent() {
Some(n) => {
node = n;
continue;
},
None => (None, None),
}
},
}
};
}
}
/// Fill the completion context, this is what does semantic reasoning about the surrounding context
/// of the completion location.
fn fill(
&mut self,
2020-03-07 14:27:03 +00:00
original_file: &SyntaxNode,
file_with_fake_ident: SyntaxNode,
2020-04-24 21:40:41 +00:00
offset: TextSize,
derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
) {
let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased().unwrap();
let syntax_element = NodeOrToken::Token(fake_ident_token);
if is_in_token_of_for_loop(syntax_element.clone()) {
// for pat $0
// there is nothing to complete here except `in` keyword
// don't bother populating the context
// FIXME: the completion calculations should end up good enough
// such that this special case becomes unnecessary
return;
}
self.previous_token = previous_token(syntax_element.clone());
self.fake_attribute_under_caret = syntax_element.ancestors().find_map(ast::Attr::cast);
self.incomplete_let =
syntax_element.ancestors().take(6).find_map(ast::LetStmt::cast).map_or(false, |it| {
it.syntax().text_range().end() == syntax_element.text_range().end()
});
(self.expected_type, self.expected_name) = self.expected_type_and_name();
// Overwrite the path kind for derives
if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
self.existing_derives = self
.sema
.resolve_derive_macro(&origin_attr)
.into_iter()
.flatten()
.flatten()
.collect();
if let Some(ast::NameLike::NameRef(name_ref)) =
find_node_at_offset(&file_with_fake_ident, offset)
{
self.name_syntax =
find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
2022-05-07 11:46:43 +00:00
if let Some((mut nameref_ctx, _)) =
Self::classify_name_ref(&self.sema, &original_file, name_ref)
{
2022-05-07 11:46:43 +00:00
if let Some(path_ctx) = &mut nameref_ctx.path_ctx {
path_ctx.kind = PathKind::Derive;
}
self.nameref_ctx = Some(nameref_ctx);
}
}
return;
}
2021-06-13 03:54:16 +00:00
let name_like = match find_node_at_offset(&file_with_fake_ident, offset) {
Some(it) => it,
None => return,
};
self.completion_location =
determine_location(&self.sema, original_file, offset, &name_like);
self.prev_sibling = determine_prev_sibling(&name_like);
2021-09-01 14:13:53 +00:00
self.name_syntax =
find_node_at_offset(original_file, name_like.syntax().text_range().start());
self.impl_def = self
.sema
.token_ancestors_with_macros(self.token.clone())
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
.find_map(ast::Impl::cast);
self.function_def = self
.sema
.token_ancestors_with_macros(self.token.clone())
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
.find_map(ast::Fn::cast);
match name_like {
ast::NameLike::Lifetime(lifetime) => {
self.lifetime_ctx = Self::classify_lifetime(&self.sema, original_file, lifetime);
2019-02-24 20:49:47 +00:00
}
ast::NameLike::NameRef(name_ref) => {
2022-05-07 11:46:43 +00:00
if let Some((nameref_ctx, pat_ctx)) =
Self::classify_name_ref(&self.sema, original_file, name_ref)
{
2022-05-07 11:46:43 +00:00
self.nameref_ctx = Some(nameref_ctx);
self.pattern_ctx = pat_ctx;
}
2019-01-08 19:33:36 +00:00
}
ast::NameLike::Name(name) => {
if let Some((name_ctx, pat_ctx)) =
Self::classify_name(&self.sema, original_file, name)
{
self.pattern_ctx = pat_ctx;
self.name_ctx = Some(name_ctx);
}
2019-07-21 11:11:45 +00:00
}
2019-01-08 19:33:36 +00:00
}
}
2019-02-24 20:49:47 +00:00
2021-03-20 21:43:42 +00:00
fn classify_lifetime(
_sema: &Semantics<RootDatabase>,
_original_file: &SyntaxNode,
2021-03-20 21:43:42 +00:00
lifetime: ast::Lifetime,
2021-10-17 09:15:56 +00:00
) -> Option<LifetimeContext> {
let parent = lifetime.syntax().parent()?;
if parent.kind() == ERROR {
return None;
}
Some(match_ast! {
match parent {
ast::LifetimeParam(param) => LifetimeContext::LifetimeParam {
is_decl: param.lifetime().as_ref() == Some(&lifetime),
param
},
ast::BreakExpr(_) => LifetimeContext::LabelRef,
ast::ContinueExpr(_) => LifetimeContext::LabelRef,
ast::Label(_) => LifetimeContext::LabelDef,
2021-10-17 09:15:56 +00:00
_ => LifetimeContext::Lifetime,
2021-03-21 00:00:09 +00:00
}
2021-10-17 09:15:56 +00:00
})
}
2021-03-21 00:00:09 +00:00
fn classify_name(
_sema: &Semantics<RootDatabase>,
original_file: &SyntaxNode,
name: ast::Name,
) -> Option<(NameContext, Option<PatternContext>)> {
let parent = name.syntax().parent()?;
let mut pat_ctx = None;
let name_ctx = match_ast! {
match parent {
ast::Const(_) => NameContext::Const,
ast::ConstParam(_) => NameContext::ConstParam,
ast::Enum(_) => NameContext::Enum,
ast::Fn(_) => NameContext::Function,
ast::IdentPat(bind_pat) => {
let is_name_in_field_pat = bind_pat
.syntax()
.parent()
.and_then(ast::RecordPatField::cast)
.map_or(false, |pat_field| pat_field.name_ref().is_none());
if !is_name_in_field_pat {
pat_ctx = Some(pattern_context_for(original_file, bind_pat.into()));
}
NameContext::IdentPat
},
ast::MacroDef(_) => NameContext::MacroDef,
ast::MacroRules(_) => NameContext::MacroRules,
ast::Module(module) => NameContext::Module(module),
ast::RecordField(_) => NameContext::RecordField,
ast::Rename(_) => NameContext::Rename,
ast::SelfParam(_) => NameContext::SelfParam,
ast::Static(_) => NameContext::Static,
ast::Struct(_) => NameContext::Struct,
ast::Trait(_) => NameContext::Trait,
ast::TypeAlias(_) => NameContext::TypeAlias,
ast::TypeParam(_) => NameContext::TypeParam,
ast::Union(_) => NameContext::Union,
ast::Variant(_) => NameContext::Variant,
_ => return None,
}
};
Some((name_ctx, pat_ctx))
2021-03-20 21:43:42 +00:00
}
2021-10-17 09:15:56 +00:00
fn classify_name_ref(
sema: &Semantics<RootDatabase>,
2021-10-17 09:15:56 +00:00
original_file: &SyntaxNode,
name_ref: ast::NameRef,
2022-05-07 11:46:43 +00:00
) -> Option<(NameRefContext, Option<PatternContext>)> {
2021-10-17 09:15:56 +00:00
let parent = name_ref.syntax().parent()?;
2022-05-07 11:46:43 +00:00
let mut nameref_ctx = NameRefContext { dot_access: None, path_ctx: None };
fn find_in_original_file<N: AstNode>(
x: Option<N>,
original_file: &SyntaxNode,
) -> Option<N> {
fn find_node_with_range<N: AstNode>(
syntax: &SyntaxNode,
range: TextRange,
) -> Option<N> {
let range = syntax.text_range().intersect(range)?;
syntax.covering_element(range).ancestors().find_map(N::cast)
}
x.map(|e| e.syntax().text_range()).and_then(|r| find_node_with_range(original_file, r))
}
let segment = match_ast! {
match parent {
ast::PathSegment(segment) => segment,
ast::FieldExpr(field) => {
let receiver = find_in_original_file(field.expr(), original_file);
let receiver_is_ambiguous_float_literal = match &receiver {
Some(ast::Expr::Literal(l)) => matches! {
l.kind(),
ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().map_or(false, |it| it.kind() == T![.])
},
_ => false,
};
nameref_ctx.dot_access = Some(DotAccess::Field { receiver, receiver_is_ambiguous_float_literal });
return Some((nameref_ctx, None));
},
ast::MethodCallExpr(method) => {
nameref_ctx.dot_access = Some(
DotAccess::Method {
receiver: find_in_original_file(method.receiver(), original_file),
has_parens: method.arg_list().map_or(false, |it| it.l_paren_token().is_some())
}
);
return Some((nameref_ctx, None));
},
_ => return None,
}
};
let path = segment.parent_path();
let mut path_ctx = PathCompletionCtx {
has_call_parens: false,
has_macro_bang: false,
is_absolute_path: false,
2021-10-17 09:15:56 +00:00
qualifier: None,
parent: path.parent_path(),
kind: PathKind::Item { kind: ItemListKind::SourceFile },
2021-10-17 09:15:56 +00:00
has_type_args: false,
};
let mut pat_ctx = None;
let is_in_block = |it: &SyntaxNode| {
it.parent()
.map(|node| {
ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind())
})
.unwrap_or(false)
};
path_ctx.kind = path.syntax().ancestors().find_map(|it| {
// using Option<Option<PathKind>> as extra controlflow
let kind = match_ast! {
match it {
ast::PathType(_) => Some(PathKind::Type),
ast::PathExpr(it) => {
path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
let in_block_expr = is_in_block(it.syntax());
let in_loop_body = is_in_loop_body(it.syntax());
Some(PathKind::Expr { in_block_expr, in_loop_body })
},
ast::TupleStructPat(it) => {
path_ctx.has_call_parens = true;
pat_ctx = Some(pattern_context_for(original_file, it.into()));
Some(PathKind::Pat)
},
ast::RecordPat(it) => {
path_ctx.has_call_parens = true;
pat_ctx = Some(pattern_context_for(original_file, it.into()));
Some(PathKind::Pat)
},
ast::PathPat(it) => {
pat_ctx = Some(pattern_context_for(original_file, it.into()));
Some(PathKind::Pat)
},
ast::MacroCall(it) => {
path_ctx.has_macro_bang = it.excl_token().is_some();
let parent = it.syntax().parent();
match parent.as_ref().map(|it| it.kind()) {
Some(SyntaxKind::MACRO_PAT) => Some(PathKind::Pat),
Some(SyntaxKind::MACRO_TYPE) => Some(PathKind::Type),
Some(SyntaxKind::ITEM_LIST) => Some(PathKind::Item { kind: ItemListKind::Module }),
Some(SyntaxKind::ASSOC_ITEM_LIST) => Some(PathKind::Item { kind: match parent.and_then(|it| it.parent()).map(|it| it.kind()) {
Some(SyntaxKind::TRAIT) => ItemListKind::Trait,
Some(SyntaxKind::IMPL) => ItemListKind::Impl,
_ => return Some(None),
} }),
Some(SyntaxKind::EXTERN_ITEM_LIST) => Some(PathKind::Item { kind: ItemListKind::ExternBlock }),
Some(SyntaxKind::SOURCE_FILE) => Some(PathKind::Item { kind: ItemListKind::SourceFile }),
_ => {
return Some(parent.and_then(ast::MacroExpr::cast).map(|it| {
let in_loop_body = is_in_loop_body(it.syntax());
let in_block_expr = is_in_block(it.syntax());
PathKind::Expr { in_block_expr, in_loop_body }
}));
},
}
},
ast::Meta(meta) => (|| {
let attr = meta.parent_attr()?;
let kind = attr.kind();
let attached = attr.syntax().parent()?;
let is_trailing_outer_attr = kind != AttrKind::Inner
&& non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next).is_none();
let annotated_item_kind = if is_trailing_outer_attr {
None
} else {
Some(attached.kind())
};
Some(PathKind::Attr {
kind,
annotated_item_kind,
})
})(),
ast::Visibility(it) => Some(PathKind::Vis { has_in_token: it.in_token().is_some() }),
ast::UseTree(_) => Some(PathKind::Use),
ast::ItemList(_) => Some(PathKind::Item { kind: ItemListKind::Module }),
ast::AssocItemList(it) => Some(PathKind::Item { kind: {
match it.syntax().parent()?.kind() {
SyntaxKind::TRAIT => ItemListKind::Trait,
SyntaxKind::IMPL => ItemListKind::Impl,
_ => return None,
}
}}),
ast::ExternItemList(_) => Some(PathKind::Item { kind: ItemListKind::ExternBlock }),
ast::SourceFile(_) => Some(PathKind::Item { kind: ItemListKind::SourceFile }),
_ => return None,
2021-10-17 09:15:56 +00:00
}
};
Some(kind)
}).flatten()?;
2021-10-17 09:15:56 +00:00
path_ctx.has_type_args = segment.generic_arg_list().is_some();
if let Some((path, use_tree_parent)) = path_or_use_tree_qualifier(&path) {
if !use_tree_parent {
path_ctx.is_absolute_path =
path.top_path().segment().map_or(false, |it| it.coloncolon_token().is_some());
}
let path = path
2021-10-17 09:15:56 +00:00
.segment()
.and_then(|it| find_node_in_file(original_file, &it))
2021-10-17 09:15:56 +00:00
.map(|it| it.parent_path());
path_ctx.qualifier = path.map(|path| {
let res = sema.resolve_path(&path);
let is_super_chain = iter::successors(Some(path.clone()), |p| p.qualifier())
.all(|p| p.segment().and_then(|s| s.super_token()).is_some());
// `<_>::$0`
let is_infer_qualifier = path.qualifier().is_none()
&& matches!(
path.segment().and_then(|it| it.kind()),
Some(ast::PathSegmentKind::Type {
type_ref: Some(ast::Type::InferType(_)),
trait_ref: None,
})
);
PathQualifierCtx {
path,
resolution: res,
is_super_chain,
use_tree_parent,
is_infer_qualifier,
}
});
2022-05-07 11:46:43 +00:00
} else if let Some(segment) = path.segment() {
2021-10-17 09:15:56 +00:00
if segment.coloncolon_token().is_some() {
path_ctx.is_absolute_path = true;
}
2021-10-17 09:15:56 +00:00
}
2022-05-07 11:46:43 +00:00
nameref_ctx.path_ctx = Some(path_ctx);
Some((nameref_ctx, pat_ctx))
2019-01-08 19:33:36 +00:00
}
}
fn pattern_context_for(original_file: &SyntaxNode, pat: ast::Pat) -> PatternContext {
let mut is_param = None;
let (refutability, has_type_ascription) =
pat
.syntax()
.ancestors()
.skip_while(|it| ast::Pat::can_cast(it.kind()))
.next()
.map_or((PatternRefutability::Irrefutable, false), |node| {
let refutability = match_ast! {
match node {
ast::LetStmt(let_) => return (PatternRefutability::Irrefutable, let_.ty().is_some()),
ast::Param(param) => {
let has_type_ascription = param.ty().is_some();
is_param = (|| {
let fake_param_list = param.syntax().parent().and_then(ast::ParamList::cast)?;
let param_list = find_node_in_file_compensated(original_file, &fake_param_list)?;
let param_list_owner = param_list.syntax().parent()?;
let kind = match_ast! {
match param_list_owner {
ast::ClosureExpr(closure) => ParamKind::Closure(closure),
ast::Fn(fn_) => ParamKind::Function(fn_),
_ => return None,
}
};
Some((param_list, param, kind))
})();
return (PatternRefutability::Irrefutable, has_type_ascription)
},
ast::MatchArm(_) => PatternRefutability::Refutable,
ast::LetExpr(_) => PatternRefutability::Refutable,
ast::ForExpr(_) => PatternRefutability::Irrefutable,
_ => PatternRefutability::Irrefutable,
}
};
(refutability, false)
});
let (ref_token, mut_token) = match &pat {
ast::Pat::IdentPat(it) => (it.ref_token(), it.mut_token()),
_ => (None, None),
};
PatternContext {
refutability,
param_ctx: is_param,
has_type_ascription,
parent_pat: pat.syntax().parent().and_then(ast::Pat::cast),
mut_token,
ref_token,
}
}
/// Attempts to find `node` inside `syntax` via `node`'s text range.
fn find_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
let syntax_range = syntax.text_range();
let range = node.syntax().text_range();
let intersection = range.intersect(syntax_range)?;
syntax.covering_element(intersection).ancestors().find_map(N::cast)
}
/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
/// for the offset introduced by the fake ident.
/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
fn find_node_in_file_compensated<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
let syntax_range = syntax.text_range();
let range = node.syntax().text_range();
let end = range.end().checked_sub(TextSize::try_from(COMPLETION_MARKER.len()).ok()?)?;
if end < range.start() {
return None;
}
let range = TextRange::new(range.start(), end);
// our inserted ident could cause `range` to be go outside of the original syntax, so cap it
let intersection = range.intersect(syntax_range)?;
syntax.covering_element(intersection).ancestors().find_map(N::cast)
2019-01-08 19:33:36 +00:00
}
fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
2020-08-13 20:41:55 +00:00
if let Some(qual) = path.qualifier() {
return Some((qual, false));
2020-08-13 20:41:55 +00:00
}
let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
let use_tree = use_tree_list.syntax().parent().and_then(ast::UseTree::cast)?;
Some((use_tree.path()?, true))
2020-08-13 20:41:55 +00:00
}
fn has_ref(token: &SyntaxToken) -> bool {
let mut token = token.clone();
2022-01-06 19:45:09 +00:00
for skip in [IDENT, WHITESPACE, T![mut]] {
if token.kind() == skip {
token = match token.prev_token() {
Some(it) => it,
None => return false,
}
}
}
token.kind() == T![&]
}
const OP_TRAIT_LANG_NAMES: &[&str] = &[
"add_assign",
"add",
"bitand_assign",
"bitand",
"bitor_assign",
"bitor",
"bitxor_assign",
"bitxor",
"deref_mut",
"deref",
"div_assign",
"div",
"eq",
"fn_mut",
"fn_once",
"fn",
"index_mut",
"index",
"mul_assign",
"mul",
"neg",
"not",
"partial_ord",
"rem_assign",
"rem",
"shl_assign",
"shl",
"shr_assign",
"shr",
"sub",
];
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
use hir::HirDisplay;
2021-06-16 19:45:02 +00:00
use crate::tests::{position, TEST_CONFIG};
use super::CompletionContext;
fn check_expected_type_and_name(ra_fixture: &str, expect: Expect) {
let (db, pos) = position(ra_fixture);
let config = TEST_CONFIG;
let completion_context = CompletionContext::new(&db, pos, &config).unwrap();
let ty = completion_context
.expected_type
.map(|t| t.display_test(&db).to_string())
.unwrap_or("?".to_owned());
let name = completion_context
.expected_name
.map_or_else(|| "?".to_owned(), |name| name.to_string());
expect.assert_eq(&format!("ty: {}, name: {}", ty, name));
}
#[test]
fn expected_type_let_without_leading_char() {
cov_mark::check!(expected_type_let_without_leading_char);
check_expected_type_and_name(
r#"
fn foo() {
let x: u32 = $0;
}
"#,
expect![[r#"ty: u32, name: x"#]],
);
}
#[test]
fn expected_type_let_with_leading_char() {
cov_mark::check!(expected_type_let_with_leading_char);
check_expected_type_and_name(
r#"
fn foo() {
let x: u32 = c$0;
}
"#,
expect![[r#"ty: u32, name: x"#]],
);
}
2021-05-08 21:14:08 +00:00
#[test]
fn expected_type_let_pat() {
check_expected_type_and_name(
r#"
fn foo() {
let x$0 = 0u32;
}
"#,
expect![[r#"ty: u32, name: ?"#]],
);
check_expected_type_and_name(
r#"
fn foo() {
let $0 = 0u32;
}
"#,
expect![[r#"ty: u32, name: ?"#]],
);
}
#[test]
fn expected_type_fn_param() {
cov_mark::check!(expected_type_fn_param);
check_expected_type_and_name(
r#"
fn foo() { bar($0); }
fn bar(x: u32) {}
"#,
expect![[r#"ty: u32, name: x"#]],
);
check_expected_type_and_name(
r#"
fn foo() { bar(c$0); }
fn bar(x: u32) {}
"#,
expect![[r#"ty: u32, name: x"#]],
);
}
#[test]
fn expected_type_fn_param_ref() {
cov_mark::check!(expected_type_fn_param_ref);
check_expected_type_and_name(
r#"
fn foo() { bar(&$0); }
fn bar(x: &u32) {}
"#,
expect![[r#"ty: u32, name: x"#]],
);
check_expected_type_and_name(
r#"
fn foo() { bar(&mut $0); }
fn bar(x: &mut u32) {}
2022-01-06 19:45:09 +00:00
"#,
expect![[r#"ty: u32, name: x"#]],
);
check_expected_type_and_name(
r#"
fn foo() { bar(& c$0); }
fn bar(x: &u32) {}
"#,
expect![[r#"ty: u32, name: x"#]],
);
check_expected_type_and_name(
r#"
fn foo() { bar(&mut c$0); }
fn bar(x: &mut u32) {}
"#,
expect![[r#"ty: u32, name: x"#]],
);
check_expected_type_and_name(
r#"
fn foo() { bar(&c$0); }
fn bar(x: &u32) {}
"#,
expect![[r#"ty: u32, name: x"#]],
);
}
#[test]
fn expected_type_struct_field_without_leading_char() {
cov_mark::check!(expected_type_struct_field_without_leading_char);
check_expected_type_and_name(
r#"
struct Foo { a: u32 }
fn foo() {
Foo { a: $0 };
}
"#,
expect![[r#"ty: u32, name: a"#]],
)
}
#[test]
fn expected_type_struct_field_followed_by_comma() {
cov_mark::check!(expected_type_struct_field_followed_by_comma);
check_expected_type_and_name(
r#"
struct Foo { a: u32 }
fn foo() {
Foo { a: $0, };
}
"#,
expect![[r#"ty: u32, name: a"#]],
)
}
#[test]
fn expected_type_generic_struct_field() {
check_expected_type_and_name(
r#"
struct Foo<T> { a: T }
fn foo() -> Foo<u32> {
Foo { a: $0 }
}
"#,
expect![[r#"ty: u32, name: a"#]],
)
}
#[test]
fn expected_type_struct_field_with_leading_char() {
cov_mark::check!(expected_type_struct_field_with_leading_char);
check_expected_type_and_name(
r#"
struct Foo { a: u32 }
fn foo() {
Foo { a: c$0 };
}
"#,
expect![[r#"ty: u32, name: a"#]],
);
}
#[test]
fn expected_type_match_arm_without_leading_char() {
cov_mark::check!(expected_type_match_arm_without_leading_char);
check_expected_type_and_name(
r#"
enum E { X }
fn foo() {
match E::X { $0 }
}
"#,
expect![[r#"ty: E, name: ?"#]],
);
}
#[test]
fn expected_type_match_arm_with_leading_char() {
cov_mark::check!(expected_type_match_arm_with_leading_char);
check_expected_type_and_name(
r#"
enum E { X }
fn foo() {
match E::X { c$0 }
}
"#,
expect![[r#"ty: E, name: ?"#]],
);
}
#[test]
fn expected_type_if_let_without_leading_char() {
cov_mark::check!(expected_type_if_let_without_leading_char);
check_expected_type_and_name(
r#"
enum Foo { Bar, Baz, Quux }
fn foo() {
let f = Foo::Quux;
if let $0 = f { }
}
"#,
expect![[r#"ty: Foo, name: ?"#]],
)
}
#[test]
fn expected_type_if_let_with_leading_char() {
cov_mark::check!(expected_type_if_let_with_leading_char);
check_expected_type_and_name(
r#"
enum Foo { Bar, Baz, Quux }
fn foo() {
let f = Foo::Quux;
if let c$0 = f { }
}
"#,
expect![[r#"ty: Foo, name: ?"#]],
)
}
#[test]
fn expected_type_fn_ret_without_leading_char() {
cov_mark::check!(expected_type_fn_ret_without_leading_char);
check_expected_type_and_name(
r#"
fn foo() -> u32 {
$0
}
"#,
expect![[r#"ty: u32, name: ?"#]],
)
}
#[test]
fn expected_type_fn_ret_with_leading_char() {
cov_mark::check!(expected_type_fn_ret_with_leading_char);
check_expected_type_and_name(
r#"
fn foo() -> u32 {
c$0
}
"#,
expect![[r#"ty: u32, name: ?"#]],
)
}
#[test]
fn expected_type_fn_ret_fn_ref_fully_typed() {
check_expected_type_and_name(
r#"
fn foo() -> u32 {
foo$0
}
"#,
expect![[r#"ty: u32, name: ?"#]],
)
}
#[test]
fn expected_type_closure_param_return() {
// FIXME: make this work with `|| $0`
check_expected_type_and_name(
r#"
2021-06-18 19:14:39 +00:00
//- minicore: fn
fn foo() {
bar(|| a$0);
}
fn bar(f: impl FnOnce() -> u32) {}
"#,
expect![[r#"ty: u32, name: ?"#]],
);
}
#[test]
fn expected_type_generic_function() {
check_expected_type_and_name(
r#"
fn foo() {
bar::<u32>($0);
}
fn bar<T>(t: T) {}
"#,
expect![[r#"ty: u32, name: t"#]],
);
}
#[test]
fn expected_type_generic_method() {
check_expected_type_and_name(
r#"
fn foo() {
S(1u32).bar($0);
}
struct S<T>(T);
impl<T> S<T> {
fn bar(self, t: T) {}
}
"#,
expect![[r#"ty: u32, name: t"#]],
);
}
#[test]
fn expected_type_functional_update() {
cov_mark::check!(expected_type_struct_func_update);
check_expected_type_and_name(
r#"
struct Foo { field: u32 }
fn foo() {
Foo {
..$0
}
}
"#,
expect![[r#"ty: Foo, name: ?"#]],
);
}
#[test]
fn expected_type_param_pat() {
check_expected_type_and_name(
r#"
struct Foo { field: u32 }
fn foo(a$0: Foo) {}
"#,
expect![[r#"ty: Foo, name: ?"#]],
);
check_expected_type_and_name(
r#"
struct Foo { field: u32 }
fn foo($0: Foo) {}
"#,
// FIXME make this work, currently fails due to pattern recovery eating the `:`
expect![[r#"ty: ?, name: ?"#]],
);
}
}