diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index b09c0e753b..c753369bfd 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -897,13 +897,13 @@ impl<'db> SemanticsImpl<'db> { } fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { - let sa = self.analyze(node); - SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver } + let SourceAnalyzer { file_id, resolver, .. } = self.analyze(node); + SemanticsScope { db: self.db, file_id, resolver } } fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { - let sa = self.analyze_with_offset(node, offset); - SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver } + let SourceAnalyzer { file_id, resolver, .. } = self.analyze_with_offset(node, offset); + SemanticsScope { db: self.db, file_id, resolver } } fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { @@ -924,9 +924,11 @@ impl<'db> SemanticsImpl<'db> { fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer { self.analyze_impl(node, None) } + fn analyze_with_offset(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer { self.analyze_impl(node, Some(offset)) } + fn analyze_impl(&self, node: &SyntaxNode, offset: Option) -> SourceAnalyzer { let _p = profile::span("Semantics::analyze_impl"); let node = self.find_file(node.clone()); diff --git a/crates/hir_ty/src/method_resolution.rs b/crates/hir_ty/src/method_resolution.rs index 8e6ab8af0f..eabe3acdcd 100644 --- a/crates/hir_ty/src/method_resolution.rs +++ b/crates/hir_ty/src/method_resolution.rs @@ -13,6 +13,7 @@ use hir_def::{ }; use hir_expand::name::Name; use rustc_hash::{FxHashMap, FxHashSet}; +use stdx::never; use crate::{ autoderef, @@ -322,7 +323,7 @@ pub fn def_crates( }}; } - let mod_to_crate_ids = |module: ModuleId| Some(std::iter::once(module.krate()).collect()); + let mod_to_crate_ids = |module: ModuleId| Some(iter::once(module.krate()).collect()); let lang_item_targets = match ty.kind(&Interner) { TyKind::Adt(AdtId(def_id), _) => { @@ -521,9 +522,16 @@ fn iterate_method_candidates_with_autoref( name: Option<&Name>, mut callback: &mut dyn FnMut(&Canonical, AssocItemId) -> ControlFlow<()>, ) -> ControlFlow<()> { + let (receiver_ty, rest) = match deref_chain.split_first() { + Some((rec, rest)) => (rec.clone(), rest), + None => { + never!("received empty deref-chain"); + return ControlFlow::Break(()); + } + }; iterate_method_candidates_by_receiver( - &deref_chain[0], - &deref_chain[1..], + &receiver_ty, + &rest, db, env.clone(), krate, @@ -534,8 +542,8 @@ fn iterate_method_candidates_with_autoref( )?; let refed = Canonical { - binders: deref_chain[0].binders.clone(), - value: TyKind::Ref(Mutability::Not, static_lifetime(), deref_chain[0].value.clone()) + binders: receiver_ty.binders.clone(), + value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone()) .intern(&Interner), }; @@ -552,9 +560,8 @@ fn iterate_method_candidates_with_autoref( )?; let ref_muted = Canonical { - binders: deref_chain[0].binders.clone(), - value: TyKind::Ref(Mutability::Mut, static_lifetime(), deref_chain[0].value.clone()) - .intern(&Interner), + binders: receiver_ty.binders, + value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value).intern(&Interner), }; iterate_method_candidates_by_receiver( @@ -584,7 +591,7 @@ fn iterate_method_candidates_by_receiver( // We're looking for methods with *receiver* type receiver_ty. These could // be found in any of the derefs of receiver_ty, so we have to go through // that. - for self_ty in std::iter::once(receiver_ty).chain(rest_of_deref_chain) { + for self_ty in iter::once(receiver_ty).chain(rest_of_deref_chain) { iterate_inherent_methods( self_ty, db, @@ -597,7 +604,7 @@ fn iterate_method_candidates_by_receiver( )? } - for self_ty in std::iter::once(receiver_ty).chain(rest_of_deref_chain) { + for self_ty in iter::once(receiver_ty).chain(rest_of_deref_chain) { iterate_trait_method_candidates( self_ty, db, @@ -659,8 +666,7 @@ fn iterate_trait_method_candidates( } _ => Vec::new(), }; - let traits = - inherent_trait.chain(env_traits.into_iter()).chain(traits_in_scope.iter().copied()); + let traits = inherent_trait.chain(env_traits).chain(traits_in_scope.iter().copied()); 'traits: for t in traits { let data = db.trait_data(t); @@ -750,7 +756,7 @@ fn iterate_inherent_methods( let impls_for_self_ty = filter_inherent_impls_for_self_ty(&impls, &self_ty.value); for &impl_def in impls_for_self_ty { - for &item in db.impl_data(impl_def).items.iter() { + for &item in &db.impl_data(impl_def).items { if !is_valid_candidate( db, env.clone(), diff --git a/crates/ide_completion/src/context.rs b/crates/ide_completion/src/context.rs index 3f40260d33..05ae95769b 100644 --- a/crates/ide_completion/src/context.rs +++ b/crates/ide_completion/src/context.rs @@ -350,29 +350,28 @@ impl<'a> CompletionContext<'a> { impl<'a> CompletionContext<'a> { pub(super) fn new( db: &'a RootDatabase, - position: FilePosition, + position @ FilePosition { file_id, offset }: FilePosition, config: &'a CompletionConfig, ) -> Option> { let _p = profile::span("CompletionContext::new"); let sema = Semantics::new(db); - let original_file = sema.parse(position.file_id); + let original_file = sema.parse(file_id); // Insert a fake ident to get a valid parse tree. We will use this file // to determine context, though the original_file will be used for // actual completion. let file_with_fake_ident = { - let parse = db.parse(position.file_id); - let edit = Indel::insert(position.offset, "intellijRulezz".to_string()); + let parse = db.parse(file_id); + let edit = Indel::insert(offset, "intellijRulezz".to_string()); parse.reparse(&edit).tree() }; let fake_ident_token = - file_with_fake_ident.syntax().token_at_offset(position.offset).right_biased().unwrap(); + file_with_fake_ident.syntax().token_at_offset(offset).right_biased().unwrap(); - let original_token = - original_file.syntax().token_at_offset(position.offset).left_biased()?; + let original_token = original_file.syntax().token_at_offset(offset).left_biased()?; let token = sema.descend_into_macros_single(original_token.clone()); - let scope = sema.scope_at_offset(&token, position.offset); + let scope = sema.scope_at_offset(&token, offset); let krate = scope.krate(); let mut locals = vec![]; scope.process_all_names(&mut |name, scope| { @@ -408,7 +407,7 @@ impl<'a> CompletionContext<'a> { ctx.expand_and_fill( original_file.syntax().clone(), file_with_fake_ident.syntax().clone(), - position.offset, + offset, fake_ident_token, ); Some(ctx)