This commit is contained in:
Lukas Wirth 2021-11-10 17:33:35 +01:00
parent f724c84e7d
commit dea973089c
3 changed files with 33 additions and 26 deletions

View file

@ -897,13 +897,13 @@ impl<'db> SemanticsImpl<'db> {
}
fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
let sa = self.analyze(node);
SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver }
let SourceAnalyzer { file_id, resolver, .. } = self.analyze(node);
SemanticsScope { db: self.db, file_id, resolver }
}
fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
let sa = self.analyze_with_offset(node, offset);
SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver }
let SourceAnalyzer { file_id, resolver, .. } = self.analyze_with_offset(node, offset);
SemanticsScope { db: self.db, file_id, resolver }
}
fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
@ -924,9 +924,11 @@ impl<'db> SemanticsImpl<'db> {
fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
self.analyze_impl(node, None)
}
fn analyze_with_offset(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer {
self.analyze_impl(node, Some(offset))
}
fn analyze_impl(&self, node: &SyntaxNode, offset: Option<TextSize>) -> SourceAnalyzer {
let _p = profile::span("Semantics::analyze_impl");
let node = self.find_file(node.clone());

View file

@ -13,6 +13,7 @@ use hir_def::{
};
use hir_expand::name::Name;
use rustc_hash::{FxHashMap, FxHashSet};
use stdx::never;
use crate::{
autoderef,
@ -322,7 +323,7 @@ pub fn def_crates(
}};
}
let mod_to_crate_ids = |module: ModuleId| Some(std::iter::once(module.krate()).collect());
let mod_to_crate_ids = |module: ModuleId| Some(iter::once(module.krate()).collect());
let lang_item_targets = match ty.kind(&Interner) {
TyKind::Adt(AdtId(def_id), _) => {
@ -521,9 +522,16 @@ fn iterate_method_candidates_with_autoref(
name: Option<&Name>,
mut callback: &mut dyn FnMut(&Canonical<Ty>, AssocItemId) -> ControlFlow<()>,
) -> ControlFlow<()> {
let (receiver_ty, rest) = match deref_chain.split_first() {
Some((rec, rest)) => (rec.clone(), rest),
None => {
never!("received empty deref-chain");
return ControlFlow::Break(());
}
};
iterate_method_candidates_by_receiver(
&deref_chain[0],
&deref_chain[1..],
&receiver_ty,
&rest,
db,
env.clone(),
krate,
@ -534,8 +542,8 @@ fn iterate_method_candidates_with_autoref(
)?;
let refed = Canonical {
binders: deref_chain[0].binders.clone(),
value: TyKind::Ref(Mutability::Not, static_lifetime(), deref_chain[0].value.clone())
binders: receiver_ty.binders.clone(),
value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone())
.intern(&Interner),
};
@ -552,9 +560,8 @@ fn iterate_method_candidates_with_autoref(
)?;
let ref_muted = Canonical {
binders: deref_chain[0].binders.clone(),
value: TyKind::Ref(Mutability::Mut, static_lifetime(), deref_chain[0].value.clone())
.intern(&Interner),
binders: receiver_ty.binders,
value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value).intern(&Interner),
};
iterate_method_candidates_by_receiver(
@ -584,7 +591,7 @@ fn iterate_method_candidates_by_receiver(
// We're looking for methods with *receiver* type receiver_ty. These could
// be found in any of the derefs of receiver_ty, so we have to go through
// that.
for self_ty in std::iter::once(receiver_ty).chain(rest_of_deref_chain) {
for self_ty in iter::once(receiver_ty).chain(rest_of_deref_chain) {
iterate_inherent_methods(
self_ty,
db,
@ -597,7 +604,7 @@ fn iterate_method_candidates_by_receiver(
)?
}
for self_ty in std::iter::once(receiver_ty).chain(rest_of_deref_chain) {
for self_ty in iter::once(receiver_ty).chain(rest_of_deref_chain) {
iterate_trait_method_candidates(
self_ty,
db,
@ -659,8 +666,7 @@ fn iterate_trait_method_candidates(
}
_ => Vec::new(),
};
let traits =
inherent_trait.chain(env_traits.into_iter()).chain(traits_in_scope.iter().copied());
let traits = inherent_trait.chain(env_traits).chain(traits_in_scope.iter().copied());
'traits: for t in traits {
let data = db.trait_data(t);
@ -750,7 +756,7 @@ fn iterate_inherent_methods(
let impls_for_self_ty = filter_inherent_impls_for_self_ty(&impls, &self_ty.value);
for &impl_def in impls_for_self_ty {
for &item in db.impl_data(impl_def).items.iter() {
for &item in &db.impl_data(impl_def).items {
if !is_valid_candidate(
db,
env.clone(),

View file

@ -350,29 +350,28 @@ impl<'a> CompletionContext<'a> {
impl<'a> CompletionContext<'a> {
pub(super) fn new(
db: &'a RootDatabase,
position: FilePosition,
position @ FilePosition { file_id, offset }: FilePosition,
config: &'a CompletionConfig,
) -> Option<CompletionContext<'a>> {
let _p = profile::span("CompletionContext::new");
let sema = Semantics::new(db);
let original_file = sema.parse(position.file_id);
let original_file = sema.parse(file_id);
// Insert a fake ident to get a valid parse tree. We will use this file
// to determine context, though the original_file will be used for
// actual completion.
let file_with_fake_ident = {
let parse = db.parse(position.file_id);
let edit = Indel::insert(position.offset, "intellijRulezz".to_string());
let parse = db.parse(file_id);
let edit = Indel::insert(offset, "intellijRulezz".to_string());
parse.reparse(&edit).tree()
};
let fake_ident_token =
file_with_fake_ident.syntax().token_at_offset(position.offset).right_biased().unwrap();
file_with_fake_ident.syntax().token_at_offset(offset).right_biased().unwrap();
let original_token =
original_file.syntax().token_at_offset(position.offset).left_biased()?;
let original_token = original_file.syntax().token_at_offset(offset).left_biased()?;
let token = sema.descend_into_macros_single(original_token.clone());
let scope = sema.scope_at_offset(&token, position.offset);
let scope = sema.scope_at_offset(&token, offset);
let krate = scope.krate();
let mut locals = vec![];
scope.process_all_names(&mut |name, scope| {
@ -408,7 +407,7 @@ impl<'a> CompletionContext<'a> {
ctx.expand_and_fill(
original_file.syntax().clone(),
file_with_fake_ident.syntax().clone(),
position.offset,
offset,
fake_ident_token,
);
Some(ctx)