mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-25 04:23:25 +00:00
internal: Enforce Resolver to always have a module scope
This commit is contained in:
parent
ef92453dfe
commit
75689f2ad8
71 changed files with 360 additions and 398 deletions
|
@ -159,7 +159,7 @@ impl Crate {
|
|||
.map(|dep| {
|
||||
let krate = Crate { id: dep.crate_id };
|
||||
let name = dep.as_name();
|
||||
CrateDependency { krate, name, }
|
||||
CrateDependency { krate, name }
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
@ -2224,7 +2224,7 @@ impl BuiltinAttr {
|
|||
Some(BuiltinAttr { krate: Some(krate.id), idx })
|
||||
}
|
||||
|
||||
pub(crate) fn builtin(name: &str) -> Option<Self> {
|
||||
fn builtin(name: &str) -> Option<Self> {
|
||||
hir_def::builtin_attr::INERT_ATTRIBUTES
|
||||
.iter()
|
||||
.position(|tool| tool.name == name)
|
||||
|
@ -2263,7 +2263,7 @@ impl ToolModule {
|
|||
Some(ToolModule { krate: Some(krate.id), idx })
|
||||
}
|
||||
|
||||
pub(crate) fn builtin(name: &str) -> Option<Self> {
|
||||
fn builtin(name: &str) -> Option<Self> {
|
||||
hir_def::builtin_attr::TOOL_MODULES
|
||||
.iter()
|
||||
.position(|&tool| tool == name)
|
||||
|
@ -2613,13 +2613,9 @@ pub struct Type {
|
|||
}
|
||||
|
||||
impl Type {
|
||||
pub(crate) fn new_with_resolver(
|
||||
db: &dyn HirDatabase,
|
||||
resolver: &Resolver,
|
||||
ty: Ty,
|
||||
) -> Option<Type> {
|
||||
let krate = resolver.krate()?;
|
||||
Some(Type::new_with_resolver_inner(db, krate, resolver, ty))
|
||||
pub(crate) fn new_with_resolver(db: &dyn HirDatabase, resolver: &Resolver, ty: Ty) -> Type {
|
||||
let krate = resolver.krate();
|
||||
Type::new_with_resolver_inner(db, krate, resolver, ty)
|
||||
}
|
||||
|
||||
pub(crate) fn new_with_resolver_inner(
|
||||
|
@ -3038,10 +3034,7 @@ impl Type {
|
|||
// There should be no inference vars in types passed here
|
||||
let canonical = hir_ty::replace_errors_with_variables(&self.ty);
|
||||
|
||||
let krate = match scope.krate() {
|
||||
Some(k) => k,
|
||||
None => return,
|
||||
};
|
||||
let krate = scope.krate();
|
||||
let environment = scope.resolver().generic_def().map_or_else(
|
||||
|| Arc::new(TraitEnvironment::empty(krate.id)),
|
||||
|d| db.trait_environment(d),
|
||||
|
@ -3098,10 +3091,7 @@ impl Type {
|
|||
) {
|
||||
let canonical = hir_ty::replace_errors_with_variables(&self.ty);
|
||||
|
||||
let krate = match scope.krate() {
|
||||
Some(k) => k,
|
||||
None => return,
|
||||
};
|
||||
let krate = scope.krate();
|
||||
let environment = scope.resolver().generic_def().map_or_else(
|
||||
|| Arc::new(TraitEnvironment::empty(krate.id)),
|
||||
|d| db.trait_environment(d),
|
||||
|
|
|
@ -403,11 +403,15 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
self.imp.to_module_def(file)
|
||||
}
|
||||
|
||||
pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
|
||||
pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
|
||||
self.imp.scope(node)
|
||||
}
|
||||
|
||||
pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
|
||||
pub fn scope_at_offset(
|
||||
&self,
|
||||
node: &SyntaxNode,
|
||||
offset: TextSize,
|
||||
) -> Option<SemanticsScope<'db>> {
|
||||
self.imp.scope_at_offset(node, offset)
|
||||
}
|
||||
|
||||
|
@ -456,7 +460,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
}
|
||||
|
||||
fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
|
||||
let sa = self.analyze_no_infer(macro_call.syntax());
|
||||
let sa = self.analyze_no_infer(macro_call.syntax())?;
|
||||
let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
|
||||
let node = self.parse_or_expand(file_id)?;
|
||||
Some(node)
|
||||
|
@ -535,9 +539,9 @@ impl<'db> SemanticsImpl<'db> {
|
|||
token_to_map: SyntaxToken,
|
||||
) -> Option<(SyntaxNode, SyntaxToken)> {
|
||||
let SourceAnalyzer { file_id, resolver, .. } =
|
||||
self.analyze_no_infer(actual_macro_call.syntax());
|
||||
self.analyze_no_infer(actual_macro_call.syntax())?;
|
||||
let macro_call = InFile::new(file_id, actual_macro_call);
|
||||
let krate = resolver.krate()?;
|
||||
let krate = resolver.krate();
|
||||
let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
|
||||
resolver
|
||||
.resolve_path_as_macro(self.db.upcast(), &path)
|
||||
|
@ -669,7 +673,10 @@ impl<'db> SemanticsImpl<'db> {
|
|||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
let sa = self.analyze_no_infer(&parent);
|
||||
let sa = match self.analyze_no_infer(&parent) {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
|
||||
let mut cache = self.expansion_info_cache.borrow_mut();
|
||||
let mut mcache = self.macro_call_cache.borrow_mut();
|
||||
|
@ -903,70 +910,74 @@ impl<'db> SemanticsImpl<'db> {
|
|||
}
|
||||
|
||||
fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
|
||||
let scope = self.scope(ty.syntax());
|
||||
let ctx = body::LowerCtx::new(self.db.upcast(), scope.file_id);
|
||||
let ty = hir_ty::TyLoweringContext::new(self.db, &scope.resolver)
|
||||
let analyze = self.analyze(ty.syntax())?;
|
||||
let ctx = body::LowerCtx::new(self.db.upcast(), analyze.file_id);
|
||||
let ty = hir_ty::TyLoweringContext::new(self.db, &analyze.resolver)
|
||||
.lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone()));
|
||||
Type::new_with_resolver(self.db, &scope.resolver, ty)
|
||||
Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
|
||||
}
|
||||
|
||||
fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option<Mutability> {
|
||||
self.analyze(expr.syntax()).is_implicit_reborrow(self.db, expr)
|
||||
self.analyze(expr.syntax())?.is_implicit_reborrow(self.db, expr)
|
||||
}
|
||||
|
||||
fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
|
||||
self.analyze(expr.syntax())
|
||||
self.analyze(expr.syntax())?
|
||||
.type_of_expr(self.db, expr)
|
||||
.map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
|
||||
}
|
||||
|
||||
fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
|
||||
self.analyze(pat.syntax())
|
||||
self.analyze(pat.syntax())?
|
||||
.type_of_pat(self.db, pat)
|
||||
.map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
|
||||
}
|
||||
|
||||
fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
|
||||
self.analyze(param.syntax()).type_of_self(self.db, param)
|
||||
self.analyze(param.syntax())?.type_of_self(self.db, param)
|
||||
}
|
||||
|
||||
fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<FunctionId> {
|
||||
self.analyze(call.syntax()).resolve_method_call(self.db, call).map(|(id, _)| id)
|
||||
self.analyze(call.syntax())?.resolve_method_call(self.db, call).map(|(id, _)| id)
|
||||
}
|
||||
|
||||
fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
|
||||
let (func, subst) = self.analyze(call.syntax()).resolve_method_call(self.db, call)?;
|
||||
let source_analyzer = self.analyze(call.syntax())?;
|
||||
let (func, subst) = source_analyzer.resolve_method_call(self.db, call)?;
|
||||
let ty = self.db.value_ty(func.into()).substitute(Interner, &subst);
|
||||
let resolver = self.analyze(call.syntax()).resolver;
|
||||
let ty = Type::new_with_resolver(self.db, &resolver, ty)?;
|
||||
let resolver = source_analyzer.resolver;
|
||||
let ty = Type::new_with_resolver(self.db, &resolver, ty);
|
||||
let mut res = ty.as_callable(self.db)?;
|
||||
res.is_bound_method = true;
|
||||
Some(res)
|
||||
}
|
||||
|
||||
fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
|
||||
self.analyze(field.syntax()).resolve_field(self.db, field)
|
||||
self.analyze(field.syntax())?.resolve_field(self.db, field)
|
||||
}
|
||||
|
||||
fn resolve_record_field(
|
||||
&self,
|
||||
field: &ast::RecordExprField,
|
||||
) -> Option<(Field, Option<Local>, Type)> {
|
||||
self.analyze(field.syntax()).resolve_record_field(self.db, field)
|
||||
self.analyze(field.syntax())?.resolve_record_field(self.db, field)
|
||||
}
|
||||
|
||||
fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
|
||||
self.analyze(field.syntax()).resolve_record_pat_field(self.db, field)
|
||||
self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
|
||||
}
|
||||
|
||||
fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
|
||||
let sa = self.analyze(macro_call.syntax());
|
||||
let sa = self.analyze(macro_call.syntax())?;
|
||||
let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
|
||||
sa.resolve_macro_call(self.db, macro_call)
|
||||
}
|
||||
|
||||
fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
|
||||
let sa = self.analyze(macro_call.syntax());
|
||||
let sa = match self.analyze(macro_call.syntax()) {
|
||||
Some(it) => it,
|
||||
None => return false,
|
||||
};
|
||||
let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
|
||||
sa.is_unsafe_macro_call(self.db, macro_call)
|
||||
}
|
||||
|
@ -981,11 +992,11 @@ impl<'db> SemanticsImpl<'db> {
|
|||
}
|
||||
|
||||
fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
|
||||
self.analyze(path.syntax()).resolve_path(self.db, path)
|
||||
self.analyze(path.syntax())?.resolve_path(self.db, path)
|
||||
}
|
||||
|
||||
fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
|
||||
let krate = self.scope(extern_crate.syntax()).krate()?;
|
||||
let krate = self.scope(extern_crate.syntax())?.krate();
|
||||
let name = extern_crate.name_ref()?.as_name();
|
||||
if name == known::SELF_PARAM {
|
||||
return Some(krate);
|
||||
|
@ -997,22 +1008,22 @@ impl<'db> SemanticsImpl<'db> {
|
|||
}
|
||||
|
||||
fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
|
||||
self.analyze(record_lit.syntax()).resolve_variant(self.db, record_lit)
|
||||
self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit)
|
||||
}
|
||||
|
||||
fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
|
||||
self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat)
|
||||
self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
|
||||
}
|
||||
|
||||
fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
|
||||
self.analyze(literal.syntax())
|
||||
.record_literal_missing_fields(self.db, literal)
|
||||
.and_then(|it| it.record_literal_missing_fields(self.db, literal))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
|
||||
self.analyze(pattern.syntax())
|
||||
.record_pattern_missing_fields(self.db, pattern)
|
||||
.and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
|
@ -1026,15 +1037,22 @@ impl<'db> SemanticsImpl<'db> {
|
|||
self.with_ctx(|ctx| ctx.file_to_def(file)).into_iter().map(Module::from)
|
||||
}
|
||||
|
||||
fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
|
||||
let SourceAnalyzer { file_id, resolver, .. } = self.analyze_no_infer(node);
|
||||
SemanticsScope { db: self.db, file_id, resolver }
|
||||
fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
|
||||
self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
|
||||
db: self.db,
|
||||
file_id,
|
||||
resolver,
|
||||
})
|
||||
}
|
||||
|
||||
fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
|
||||
let SourceAnalyzer { file_id, resolver, .. } =
|
||||
self.analyze_with_offset_no_infer(node, offset);
|
||||
SemanticsScope { db: self.db, file_id, resolver }
|
||||
fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> Option<SemanticsScope<'db>> {
|
||||
self.analyze_with_offset_no_infer(node, offset).map(
|
||||
|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
|
||||
db: self.db,
|
||||
file_id,
|
||||
resolver,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
|
||||
|
@ -1052,15 +1070,21 @@ impl<'db> SemanticsImpl<'db> {
|
|||
Some(res)
|
||||
}
|
||||
|
||||
fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
|
||||
/// Returns none if the file of the node is not part of a crate.
|
||||
fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
|
||||
self.analyze_impl(node, None, true)
|
||||
}
|
||||
|
||||
fn analyze_no_infer(&self, node: &SyntaxNode) -> SourceAnalyzer {
|
||||
/// Returns none if the file of the node is not part of a crate.
|
||||
fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
|
||||
self.analyze_impl(node, None, false)
|
||||
}
|
||||
|
||||
fn analyze_with_offset_no_infer(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer {
|
||||
fn analyze_with_offset_no_infer(
|
||||
&self,
|
||||
node: &SyntaxNode,
|
||||
offset: TextSize,
|
||||
) -> Option<SourceAnalyzer> {
|
||||
self.analyze_impl(node, Some(offset), false)
|
||||
}
|
||||
|
||||
|
@ -1069,22 +1093,22 @@ impl<'db> SemanticsImpl<'db> {
|
|||
node: &SyntaxNode,
|
||||
offset: Option<TextSize>,
|
||||
infer_body: bool,
|
||||
) -> SourceAnalyzer {
|
||||
) -> Option<SourceAnalyzer> {
|
||||
let _p = profile::span("Semantics::analyze_impl");
|
||||
let node = self.find_file(node);
|
||||
|
||||
let container = match self.with_ctx(|ctx| ctx.find_container(node)) {
|
||||
Some(it) => it,
|
||||
None => return SourceAnalyzer::new_for_resolver(Resolver::default(), node),
|
||||
None => return None,
|
||||
};
|
||||
|
||||
let resolver = match container {
|
||||
ChildContainer::DefWithBodyId(def) => {
|
||||
return if infer_body {
|
||||
return Some(if infer_body {
|
||||
SourceAnalyzer::new_for_body(self.db, def, node, offset)
|
||||
} else {
|
||||
SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
|
||||
}
|
||||
})
|
||||
}
|
||||
ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
|
||||
ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
|
||||
|
@ -1094,7 +1118,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()),
|
||||
ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()),
|
||||
};
|
||||
SourceAnalyzer::new_for_resolver(resolver, node)
|
||||
Some(SourceAnalyzer::new_for_resolver(resolver, node))
|
||||
}
|
||||
|
||||
fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
|
||||
|
@ -1118,6 +1142,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
InFile::new(file_id, node)
|
||||
}
|
||||
|
||||
/// Wraps the node in a [`InFile`] with the file id it belongs to.
|
||||
fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
|
||||
let root_node = find_root(node);
|
||||
let file_id = self.lookup(&root_node).unwrap_or_else(|| {
|
||||
|
@ -1319,12 +1344,12 @@ pub struct SemanticsScope<'a> {
|
|||
}
|
||||
|
||||
impl<'a> SemanticsScope<'a> {
|
||||
pub fn module(&self) -> Option<Module> {
|
||||
Some(Module { id: self.resolver.module()? })
|
||||
pub fn module(&self) -> Module {
|
||||
Module { id: self.resolver.module() }
|
||||
}
|
||||
|
||||
pub fn krate(&self) -> Option<Crate> {
|
||||
Some(Crate { id: self.resolver.krate()? })
|
||||
pub fn krate(&self) -> Crate {
|
||||
Crate { id: self.resolver.krate() }
|
||||
}
|
||||
|
||||
pub(crate) fn resolver(&self) -> &Resolver {
|
||||
|
|
|
@ -169,7 +169,7 @@ impl SourceAnalyzer {
|
|||
.and_then(|adjusts| adjusts.last().map(|adjust| adjust.target.clone()));
|
||||
let ty = infer[expr_id].clone();
|
||||
let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
|
||||
mk_ty(ty).zip(Some(coerced.and_then(mk_ty)))
|
||||
Some((mk_ty(ty), coerced.map(mk_ty)))
|
||||
}
|
||||
|
||||
pub(crate) fn type_of_pat(
|
||||
|
@ -185,7 +185,7 @@ impl SourceAnalyzer {
|
|||
.and_then(|adjusts| adjusts.last().map(|adjust| adjust.target.clone()));
|
||||
let ty = infer[pat_id].clone();
|
||||
let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
|
||||
mk_ty(ty).zip(Some(coerced.and_then(mk_ty)))
|
||||
Some((mk_ty(ty), coerced.map(mk_ty)))
|
||||
}
|
||||
|
||||
pub(crate) fn type_of_self(
|
||||
|
@ -196,7 +196,7 @@ impl SourceAnalyzer {
|
|||
let src = InFile { file_id: self.file_id, value: param };
|
||||
let pat_id = self.body_source_map()?.node_self_param(src)?;
|
||||
let ty = self.infer.as_ref()?[pat_id].clone();
|
||||
Type::new_with_resolver(db, &self.resolver, ty)
|
||||
Some(Type::new_with_resolver(db, &self.resolver, ty))
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_method_call(
|
||||
|
@ -244,7 +244,7 @@ impl SourceAnalyzer {
|
|||
let field = FieldId { parent: variant, local_id: variant_data.field(&local_name)? };
|
||||
let field_ty =
|
||||
db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
|
||||
Some((field.into(), local, Type::new_with_resolver(db, &self.resolver, field_ty)?))
|
||||
Some((field.into(), local, Type::new_with_resolver(db, &self.resolver, field_ty)))
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_record_pat_field(
|
||||
|
@ -366,10 +366,7 @@ impl SourceAnalyzer {
|
|||
return match resolve_hir_path_qualifier(db, &self.resolver, &hir_path) {
|
||||
None if is_path_of_attr => {
|
||||
path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| {
|
||||
match self.resolver.krate() {
|
||||
Some(krate) => ToolModule::by_name(db, krate.into(), &name_ref.text()),
|
||||
None => ToolModule::builtin(&name_ref.text()),
|
||||
}
|
||||
ToolModule::by_name(db, self.resolver.krate().into(), &name_ref.text())
|
||||
.map(PathResolution::ToolModule)
|
||||
})
|
||||
}
|
||||
|
@ -380,9 +377,8 @@ impl SourceAnalyzer {
|
|||
// in this case we have to check for inert/builtin attributes and tools and prioritize
|
||||
// resolution of attributes over other namespaces
|
||||
let name_ref = path.as_single_name_ref();
|
||||
let builtin = name_ref.as_ref().and_then(|name_ref| match self.resolver.krate() {
|
||||
Some(krate) => BuiltinAttr::by_name(db, krate.into(), &name_ref.text()),
|
||||
None => BuiltinAttr::builtin(&name_ref.text()),
|
||||
let builtin = name_ref.as_ref().and_then(|name_ref| {
|
||||
BuiltinAttr::by_name(db, self.resolver.krate().into(), &name_ref.text())
|
||||
});
|
||||
if let builtin @ Some(_) = builtin {
|
||||
return builtin.map(PathResolution::BuiltinAttr);
|
||||
|
@ -392,10 +388,7 @@ impl SourceAnalyzer {
|
|||
// this labels any path that starts with a tool module as the tool itself, this is technically wrong
|
||||
// but there is no benefit in differentiating these two cases for the time being
|
||||
None => path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| {
|
||||
match self.resolver.krate() {
|
||||
Some(krate) => ToolModule::by_name(db, krate.into(), &name_ref.text()),
|
||||
None => ToolModule::builtin(&name_ref.text()),
|
||||
}
|
||||
ToolModule::by_name(db, self.resolver.krate().into(), &name_ref.text())
|
||||
.map(PathResolution::ToolModule)
|
||||
}),
|
||||
};
|
||||
|
@ -412,7 +405,7 @@ impl SourceAnalyzer {
|
|||
db: &dyn HirDatabase,
|
||||
literal: &ast::RecordExpr,
|
||||
) -> Option<Vec<(Field, Type)>> {
|
||||
let krate = self.resolver.krate()?;
|
||||
let krate = self.resolver.krate();
|
||||
let body = self.body()?;
|
||||
let infer = self.infer.as_ref()?;
|
||||
|
||||
|
@ -430,7 +423,7 @@ impl SourceAnalyzer {
|
|||
db: &dyn HirDatabase,
|
||||
pattern: &ast::RecordPat,
|
||||
) -> Option<Vec<(Field, Type)>> {
|
||||
let krate = self.resolver.krate()?;
|
||||
let krate = self.resolver.krate();
|
||||
let body = self.body()?;
|
||||
let infer = self.infer.as_ref()?;
|
||||
|
||||
|
@ -468,7 +461,7 @@ impl SourceAnalyzer {
|
|||
db: &dyn HirDatabase,
|
||||
macro_call: InFile<&ast::MacroCall>,
|
||||
) -> Option<HirFileId> {
|
||||
let krate = self.resolver.krate()?;
|
||||
let krate = self.resolver.krate();
|
||||
let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
|
||||
self.resolver
|
||||
.resolve_path_as_macro(db.upcast(), &path)
|
||||
|
|
|
@ -25,12 +25,14 @@ use crate::{
|
|||
StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, VariantId,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Resolver {
|
||||
/// The stack of scopes, where the inner-most scope is the last item.
|
||||
///
|
||||
/// When using, you generally want to process the scopes in reverse order,
|
||||
/// there's `scopes` *method* for that.
|
||||
///
|
||||
/// Invariant: There exists at least one Scope::ModuleScope at the start of the vec.
|
||||
scopes: Vec<Scope>,
|
||||
}
|
||||
|
||||
|
@ -135,10 +137,7 @@ impl Resolver {
|
|||
path: &ModPath,
|
||||
shadow: BuiltinShadowMode,
|
||||
) -> PerNs {
|
||||
let (item_map, module) = match self.module_scope() {
|
||||
Some(it) => it,
|
||||
None => return PerNs::none(),
|
||||
};
|
||||
let (item_map, module) = self.module_scope();
|
||||
let (module_res, segment_index) = item_map.resolve_path(db, module, path, shadow);
|
||||
if segment_index.is_some() {
|
||||
return PerNs::none();
|
||||
|
@ -155,7 +154,7 @@ impl Resolver {
|
|||
db: &dyn DefDatabase,
|
||||
path: &ModPath,
|
||||
) -> Option<PerNs> {
|
||||
let (item_map, module) = self.module_scope()?;
|
||||
let (item_map, module) = self.module_scope();
|
||||
let (module_res, idx) = item_map.resolve_path(db, module, path, BuiltinShadowMode::Module);
|
||||
match module_res.take_types()? {
|
||||
ModuleDefId::TraitId(it) => {
|
||||
|
@ -235,10 +234,7 @@ impl Resolver {
|
|||
) -> Option<Visibility> {
|
||||
match visibility {
|
||||
RawVisibility::Module(_) => {
|
||||
let (item_map, module) = match self.module_scope() {
|
||||
Some(it) => it,
|
||||
None => return None,
|
||||
};
|
||||
let (item_map, module) = self.module_scope();
|
||||
item_map.resolve_visibility(db, module, visibility)
|
||||
}
|
||||
RawVisibility::Public => Some(Visibility::Public),
|
||||
|
@ -336,7 +332,7 @@ impl Resolver {
|
|||
}
|
||||
|
||||
pub fn resolve_path_as_macro(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<MacroId> {
|
||||
let (item_map, module) = self.module_scope()?;
|
||||
let (item_map, module) = self.module_scope();
|
||||
item_map.resolve_path(db, module, path, BuiltinShadowMode::Other).0.take_macros()
|
||||
}
|
||||
|
||||
|
@ -425,22 +421,22 @@ impl Resolver {
|
|||
traits
|
||||
}
|
||||
|
||||
fn module_scope(&self) -> Option<(&DefMap, LocalModuleId)> {
|
||||
self.scopes().find_map(|scope| match scope {
|
||||
fn module_scope(&self) -> (&DefMap, LocalModuleId) {
|
||||
self.scopes()
|
||||
.find_map(|scope| match scope {
|
||||
Scope::ModuleScope(m) => Some((&*m.def_map, m.module_id)),
|
||||
|
||||
_ => None,
|
||||
})
|
||||
.expect("module scope invariant violated")
|
||||
}
|
||||
|
||||
pub fn module(&self) -> Option<ModuleId> {
|
||||
let (def_map, local_id) = self.module_scope()?;
|
||||
Some(def_map.module_id(local_id))
|
||||
pub fn module(&self) -> ModuleId {
|
||||
let (def_map, local_id) = self.module_scope();
|
||||
def_map.module_id(local_id)
|
||||
}
|
||||
|
||||
pub fn krate(&self) -> Option<CrateId> {
|
||||
// FIXME: can this ever be `None`?
|
||||
self.module_scope().map(|t| t.0.krate())
|
||||
pub fn krate(&self) -> CrateId {
|
||||
self.module_scope().0.krate()
|
||||
}
|
||||
|
||||
pub fn where_predicates_in_scope(
|
||||
|
@ -749,8 +745,7 @@ impl HasResolver for ModuleId {
|
|||
def_map = parent.def_map(db);
|
||||
modules.push((def_map.clone(), parent.local_id));
|
||||
}
|
||||
let mut resolver = Resolver::default();
|
||||
resolver.scopes.reserve(modules.len());
|
||||
let mut resolver = Resolver { scopes: Vec::with_capacity(modules.len()) };
|
||||
for (def_map, module) in modules.into_iter().rev() {
|
||||
resolver = resolver.push_module_scope(def_map, module);
|
||||
}
|
||||
|
|
|
@ -420,9 +420,8 @@ pub(crate) fn associated_ty_data_query(
|
|||
.collect();
|
||||
|
||||
if !ctx.unsized_types.borrow().contains(&self_ty) {
|
||||
let sized_trait = resolver
|
||||
.krate()
|
||||
.and_then(|krate| db.lang_item(krate, SmolStr::new_inline("sized")))
|
||||
let sized_trait = db
|
||||
.lang_item(resolver.krate(), SmolStr::new_inline("sized"))
|
||||
.and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
|
||||
let sized_bound = sized_trait.into_iter().map(|sized_trait| {
|
||||
let trait_bound =
|
||||
|
|
|
@ -762,7 +762,7 @@ impl<'a> InferenceContext<'a> {
|
|||
}
|
||||
|
||||
fn resolve_lang_item(&self, name: Name) -> Option<LangItemTarget> {
|
||||
let krate = self.resolver.krate()?;
|
||||
let krate = self.resolver.krate();
|
||||
self.db.lang_item(krate, name.to_smol_str())
|
||||
}
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ use crate::{
|
|||
const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode,
|
||||
},
|
||||
mapping::{from_chalk, ToChalk},
|
||||
method_resolution,
|
||||
method_resolution::{self, VisibleFromModule},
|
||||
primitive::{self, UintTy},
|
||||
static_lifetime, to_chalk_trait_id,
|
||||
utils::{generics, Generics},
|
||||
|
@ -487,13 +487,8 @@ impl<'a> InferenceContext<'a> {
|
|||
}
|
||||
_ => return None,
|
||||
};
|
||||
let module = self.resolver.module();
|
||||
let is_visible = module
|
||||
.map(|mod_id| {
|
||||
self.db.field_visibilities(field_id.parent)[field_id.local_id]
|
||||
.is_visible_from(self.db.upcast(), mod_id)
|
||||
})
|
||||
.unwrap_or(true);
|
||||
let is_visible = self.db.field_visibilities(field_id.parent)[field_id.local_id]
|
||||
.is_visible_from(self.db.upcast(), self.resolver.module());
|
||||
if !is_visible {
|
||||
// Write down the first field resolution even if it is not visible
|
||||
// This aids IDE features for private fields like goto def and in
|
||||
|
@ -946,7 +941,7 @@ impl<'a> InferenceContext<'a> {
|
|||
self.db,
|
||||
self.trait_env.clone(),
|
||||
&traits_in_scope,
|
||||
self.resolver.module().into(),
|
||||
VisibleFromModule::Filter(self.resolver.module()),
|
||||
method_name,
|
||||
);
|
||||
let (receiver_ty, method_ty, substs) = match resolved {
|
||||
|
|
|
@ -9,8 +9,11 @@ use hir_def::{
|
|||
use hir_expand::name::Name;
|
||||
|
||||
use crate::{
|
||||
builder::ParamKind, consteval, method_resolution, GenericArgData, Interner, Substitution,
|
||||
TraitRefExt, Ty, TyBuilder, TyExt, TyKind, ValueTyDefId,
|
||||
builder::ParamKind,
|
||||
consteval,
|
||||
method_resolution::{self, VisibleFromModule},
|
||||
GenericArgData, Interner, Substitution, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
|
||||
ValueTyDefId,
|
||||
};
|
||||
|
||||
use super::{ExprOrPatId, InferenceContext, TraitRef};
|
||||
|
@ -231,7 +234,7 @@ impl<'a> InferenceContext<'a> {
|
|||
self.db,
|
||||
self.table.trait_env.clone(),
|
||||
&traits_in_scope,
|
||||
self.resolver.module().into(),
|
||||
VisibleFromModule::Filter(self.resolver.module()),
|
||||
Some(name),
|
||||
method_resolution::LookupMode::Path,
|
||||
move |_ty, item| {
|
||||
|
|
|
@ -335,12 +335,13 @@ impl<'a> TyLoweringContext<'a> {
|
|||
let mut expander = self.expander.borrow_mut();
|
||||
if expander.is_some() {
|
||||
(Some(expander), false)
|
||||
} else if let Some(module_id) = self.resolver.module() {
|
||||
*expander =
|
||||
Some(Expander::new(self.db.upcast(), macro_call.file_id, module_id));
|
||||
(Some(expander), true)
|
||||
} else {
|
||||
(None, false)
|
||||
*expander = Some(Expander::new(
|
||||
self.db.upcast(),
|
||||
macro_call.file_id,
|
||||
self.resolver.module(),
|
||||
));
|
||||
(Some(expander), true)
|
||||
}
|
||||
};
|
||||
let ty = if let Some(mut expander) = expander {
|
||||
|
@ -860,9 +861,8 @@ impl<'a> TyLoweringContext<'a> {
|
|||
}
|
||||
TypeBound::Path(path, TraitBoundModifier::Maybe) => {
|
||||
let sized_trait = self
|
||||
.resolver
|
||||
.krate()
|
||||
.and_then(|krate| self.db.lang_item(krate, SmolStr::new_inline("sized")))
|
||||
.db
|
||||
.lang_item(self.resolver.krate(), SmolStr::new_inline("sized"))
|
||||
.and_then(|lang_item| lang_item.as_trait());
|
||||
// Don't lower associated type bindings as the only possible relaxed trait bound
|
||||
// `?Sized` has no of them.
|
||||
|
@ -1268,9 +1268,8 @@ fn implicitly_sized_clauses<'a>(
|
|||
) -> impl Iterator<Item = WhereClause> + 'a {
|
||||
let is_trait_def = matches!(def, GenericDefId::TraitId(..));
|
||||
let generic_args = &substitution.as_slice(Interner)[is_trait_def as usize..];
|
||||
let sized_trait = resolver
|
||||
.krate()
|
||||
.and_then(|krate| db.lang_item(krate, SmolStr::new_inline("sized")))
|
||||
let sized_trait = db
|
||||
.lang_item(resolver.krate(), SmolStr::new_inline("sized"))
|
||||
.and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
|
||||
|
||||
sized_trait.into_iter().flat_map(move |sized_trait| {
|
||||
|
|
|
@ -170,7 +170,7 @@ pub(crate) fn hover_for_definition(
|
|||
config: &HoverConfig,
|
||||
) -> Option<HoverResult> {
|
||||
let famous_defs = match &definition {
|
||||
Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(node).krate())),
|
||||
Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(node)?.krate())),
|
||||
_ => None,
|
||||
};
|
||||
if let Some(markup) = render::definition(sema.db, definition, famous_defs.as_ref(), config) {
|
||||
|
|
|
@ -103,7 +103,7 @@ pub(super) fn try_expr(
|
|||
|
||||
let adts = inner_ty.as_adt().zip(body_ty.as_adt());
|
||||
if let Some((hir::Adt::Enum(inner), hir::Adt::Enum(body))) = adts {
|
||||
let famous_defs = FamousDefs(sema, sema.scope(try_expr.syntax()).krate());
|
||||
let famous_defs = FamousDefs(sema, sema.scope(try_expr.syntax())?.krate());
|
||||
// special case for two options, there is no value in showing them
|
||||
if let Some(option_enum) = famous_defs.core_option_Option() {
|
||||
if inner == option_enum && body == option_enum {
|
||||
|
@ -234,7 +234,7 @@ pub(super) fn keyword(
|
|||
return None;
|
||||
}
|
||||
let parent = token.parent()?;
|
||||
let famous_defs = FamousDefs(sema, sema.scope(&parent).krate());
|
||||
let famous_defs = FamousDefs(sema, sema.scope(&parent)?.krate());
|
||||
|
||||
let KeywordHint { description, keyword_mod, actions } = keyword_hints(sema, token, parent);
|
||||
|
||||
|
|
|
@ -110,7 +110,10 @@ fn hints(
|
|||
config: &InlayHintsConfig,
|
||||
node: SyntaxNode,
|
||||
) {
|
||||
let krate = sema.scope(&node).module().map(|it| it.krate());
|
||||
let krate = match sema.scope(&node) {
|
||||
Some(it) => it.krate(),
|
||||
None => return,
|
||||
};
|
||||
let famous_defs = FamousDefs(sema, krate);
|
||||
if let Some(expr) = ast::Expr::cast(node.clone()) {
|
||||
chaining_hints(hints, sema, &famous_defs, config, &expr);
|
||||
|
@ -503,7 +506,7 @@ fn bind_pat_hints(
|
|||
return None;
|
||||
}
|
||||
|
||||
let krate = sema.scope(desc_pat.syntax()).module().map(|it| it.krate());
|
||||
let krate = sema.scope(desc_pat.syntax())?.krate();
|
||||
let famous_defs = FamousDefs(sema, krate);
|
||||
let label = hint_iterator(sema, &famous_defs, config, &ty);
|
||||
|
||||
|
|
|
@ -635,7 +635,7 @@ impl Analysis {
|
|||
self.with_db(|db| {
|
||||
let rule: ide_ssr::SsrRule = query.parse()?;
|
||||
let mut match_finder =
|
||||
ide_ssr::MatchFinder::in_context(db, resolve_context, selections);
|
||||
ide_ssr::MatchFinder::in_context(db, resolve_context, selections)?;
|
||||
match_finder.add_rule(rule)?;
|
||||
let edits = if parse_only { Default::default() } else { match_finder.edits() };
|
||||
Ok(SourceChange::from(edits))
|
||||
|
|
|
@ -179,12 +179,16 @@ pub(crate) fn highlight(
|
|||
};
|
||||
|
||||
let mut hl = highlights::Highlights::new(root.text_range());
|
||||
let krate = match sema.scope(&root) {
|
||||
Some(it) => it.krate(),
|
||||
None => return hl.to_vec(),
|
||||
};
|
||||
traverse(
|
||||
&mut hl,
|
||||
&sema,
|
||||
file_id,
|
||||
&root,
|
||||
sema.scope(&root).krate(),
|
||||
krate,
|
||||
range_to_highlight,
|
||||
syntactic_name_ref_highlighting,
|
||||
);
|
||||
|
@ -196,7 +200,7 @@ fn traverse(
|
|||
sema: &Semantics<RootDatabase>,
|
||||
file_id: FileId,
|
||||
root: &SyntaxNode,
|
||||
krate: Option<hir::Crate>,
|
||||
krate: hir::Crate,
|
||||
range_to_highlight: TextRange,
|
||||
syntactic_name_ref_highlighting: bool,
|
||||
) {
|
||||
|
|
|
@ -48,7 +48,7 @@ pub(super) fn token(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> Optio
|
|||
|
||||
pub(super) fn name_like(
|
||||
sema: &Semantics<RootDatabase>,
|
||||
krate: Option<hir::Crate>,
|
||||
krate: hir::Crate,
|
||||
bindings_shadow_count: &mut FxHashMap<hir::Name, u32>,
|
||||
syntactic_name_ref_highlighting: bool,
|
||||
name_like: ast::NameLike,
|
||||
|
@ -192,7 +192,7 @@ fn keyword(
|
|||
|
||||
fn highlight_name_ref(
|
||||
sema: &Semantics<RootDatabase>,
|
||||
krate: Option<hir::Crate>,
|
||||
krate: hir::Crate,
|
||||
bindings_shadow_count: &mut FxHashMap<hir::Name, u32>,
|
||||
binding_hash: &mut Option<u64>,
|
||||
syntactic_name_ref_highlighting: bool,
|
||||
|
@ -278,7 +278,7 @@ fn highlight_name(
|
|||
sema: &Semantics<RootDatabase>,
|
||||
bindings_shadow_count: &mut FxHashMap<hir::Name, u32>,
|
||||
binding_hash: &mut Option<u64>,
|
||||
krate: Option<hir::Crate>,
|
||||
krate: hir::Crate,
|
||||
name: ast::Name,
|
||||
) -> Highlight {
|
||||
let name_kind = NameClass::classify(sema, &name);
|
||||
|
@ -322,11 +322,7 @@ fn calc_binding_hash(name: &hir::Name, shadow_count: u32) -> u64 {
|
|||
hash((name, shadow_count))
|
||||
}
|
||||
|
||||
fn highlight_def(
|
||||
sema: &Semantics<RootDatabase>,
|
||||
krate: Option<hir::Crate>,
|
||||
def: Definition,
|
||||
) -> Highlight {
|
||||
fn highlight_def(sema: &Semantics<RootDatabase>, krate: hir::Crate, def: Definition) -> Highlight {
|
||||
let db = sema.db;
|
||||
let mut h = match def {
|
||||
Definition::Macro(m) => Highlight::new(HlTag::Symbol(m.kind(sema.db).into())),
|
||||
|
@ -475,7 +471,7 @@ fn highlight_def(
|
|||
Definition::Module(module) => Some(module.krate()),
|
||||
_ => None,
|
||||
});
|
||||
let is_from_other_crate = def_crate != krate;
|
||||
let is_from_other_crate = def_crate != Some(krate);
|
||||
let is_from_builtin_crate = def_crate.map_or(false, |def_crate| def_crate.is_builtin(db));
|
||||
let is_builtin_type = matches!(def, Definition::BuiltinType(_));
|
||||
let is_public = def.visibility(db) == Some(hir::Visibility::Public);
|
||||
|
@ -495,7 +491,7 @@ fn highlight_def(
|
|||
|
||||
fn highlight_method_call_by_name_ref(
|
||||
sema: &Semantics<RootDatabase>,
|
||||
krate: Option<hir::Crate>,
|
||||
krate: hir::Crate,
|
||||
name_ref: &ast::NameRef,
|
||||
) -> Option<Highlight> {
|
||||
let mc = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast)?;
|
||||
|
@ -504,7 +500,7 @@ fn highlight_method_call_by_name_ref(
|
|||
|
||||
fn highlight_method_call(
|
||||
sema: &Semantics<RootDatabase>,
|
||||
krate: Option<hir::Crate>,
|
||||
krate: hir::Crate,
|
||||
method_call: &ast::MethodCallExpr,
|
||||
) -> Option<Highlight> {
|
||||
let func = sema.resolve_method_call(method_call)?;
|
||||
|
@ -523,7 +519,7 @@ fn highlight_method_call(
|
|||
}
|
||||
|
||||
let def_crate = func.module(sema.db).krate();
|
||||
let is_from_other_crate = Some(def_crate) != krate;
|
||||
let is_from_other_crate = def_crate != krate;
|
||||
let is_from_builtin_crate = def_crate.is_builtin(sema.db);
|
||||
let is_public = func.visibility(sema.db) == hir::Visibility::Public;
|
||||
|
||||
|
@ -589,7 +585,7 @@ fn highlight_name_by_syntax(name: ast::Name) -> Highlight {
|
|||
fn highlight_name_ref_by_syntax(
|
||||
name: ast::NameRef,
|
||||
sema: &Semantics<RootDatabase>,
|
||||
krate: Option<hir::Crate>,
|
||||
krate: hir::Crate,
|
||||
) -> Highlight {
|
||||
let default = HlTag::UnresolvedReference;
|
||||
|
||||
|
|
|
@ -41,7 +41,7 @@ pub(crate) fn add_explicit_type(acc: &mut Assists, ctx: &AssistContext) -> Optio
|
|||
return None;
|
||||
};
|
||||
|
||||
let module = ctx.sema.scope(pat.syntax()).module()?;
|
||||
let module = ctx.sema.scope(pat.syntax())?.module();
|
||||
let pat_range = pat.syntax().text_range();
|
||||
|
||||
// Don't enable the assist if there is a type ascription without any placeholders
|
||||
|
|
|
@ -104,6 +104,7 @@ fn add_missing_impl_members_inner(
|
|||
) -> Option<()> {
|
||||
let _p = profile::span("add_missing_impl_members_inner");
|
||||
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
|
||||
let target_scope = ctx.sema.scope(impl_def.syntax())?;
|
||||
let trait_ = resolve_target_trait(&ctx.sema, &impl_def)?;
|
||||
|
||||
let missing_items = filter_assoc_items(
|
||||
|
@ -118,7 +119,6 @@ fn add_missing_impl_members_inner(
|
|||
|
||||
let target = impl_def.syntax().text_range();
|
||||
acc.add(AssistId(assist_id, AssistKind::QuickFix), label, target, |builder| {
|
||||
let target_scope = ctx.sema.scope(impl_def.syntax());
|
||||
let missing_items = missing_items
|
||||
.into_iter()
|
||||
.map(|it| {
|
||||
|
|
|
@ -73,7 +73,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext) ->
|
|||
.filter(|pat| !matches!(pat, Pat::WildcardPat(_)))
|
||||
.collect();
|
||||
|
||||
let module = ctx.sema.scope(expr.syntax()).module()?;
|
||||
let module = ctx.sema.scope(expr.syntax())?.module();
|
||||
let (mut missing_pats, is_non_exhaustive): (
|
||||
Peekable<Box<dyn Iterator<Item = (ast::Pat, bool)>>>,
|
||||
bool,
|
||||
|
@ -92,8 +92,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext) ->
|
|||
})
|
||||
.filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
|
||||
|
||||
let option_enum =
|
||||
FamousDefs(&ctx.sema, Some(module.krate())).core_option_Option().map(lift_enum);
|
||||
let option_enum = FamousDefs(&ctx.sema, module.krate()).core_option_Option().map(lift_enum);
|
||||
let missing_pats: Box<dyn Iterator<Item = _>> = if Some(enum_def) == option_enum {
|
||||
// Match `Some` variant first.
|
||||
cov_mark::hit!(option_order);
|
||||
|
|
|
@ -17,7 +17,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
|
|||
// ```
|
||||
pub(crate) fn add_return_type(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let (fn_type, tail_expr, builder_edit_pos) = extract_tail(ctx)?;
|
||||
let module = ctx.sema.scope(tail_expr.syntax()).module()?;
|
||||
let module = ctx.sema.scope(tail_expr.syntax())?.module();
|
||||
let ty = ctx.sema.type_of_expr(&peel_blocks(tail_expr.clone()))?.original();
|
||||
if ty.is_unit() {
|
||||
return None;
|
||||
|
|
|
@ -222,7 +222,7 @@ fn option_variants(
|
|||
sema: &Semantics<RootDatabase>,
|
||||
expr: &SyntaxNode,
|
||||
) -> Option<(hir::Variant, hir::Variant)> {
|
||||
let fam = FamousDefs(sema, sema.scope(expr).krate());
|
||||
let fam = FamousDefs(sema, sema.scope(expr)?.krate());
|
||||
let option_variants = fam.core_option_Option()?.variants(sema.db);
|
||||
match &*option_variants {
|
||||
&[variant0, variant1] => Some(if variant0.name(sema.db) == known::None {
|
||||
|
|
|
@ -36,10 +36,10 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext) -> Op
|
|||
let src_type = impl_.self_ty()?;
|
||||
let ast_trait = impl_.trait_()?;
|
||||
|
||||
let module = ctx.sema.scope(impl_.syntax()).module()?;
|
||||
let module = ctx.sema.scope(impl_.syntax())?.module();
|
||||
|
||||
let trait_ = resolve_target_trait(&ctx.sema, &impl_)?;
|
||||
if trait_ != FamousDefs(&ctx.sema, Some(module.krate())).core_convert_Into()? {
|
||||
if trait_ != FamousDefs(&ctx.sema, module.krate()).core_convert_Into()? {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
|
|
@ -146,10 +146,10 @@ fn is_ref_and_impls_iter_method(
|
|||
let wanted_method = if ref_expr.mut_token().is_some() { known::iter_mut } else { known::iter };
|
||||
let expr_behind_ref = ref_expr.expr()?;
|
||||
let ty = sema.type_of_expr(&expr_behind_ref)?.adjusted();
|
||||
let scope = sema.scope(iterable.syntax());
|
||||
let krate = scope.module()?.krate();
|
||||
let scope = sema.scope(iterable.syntax())?;
|
||||
let krate = scope.krate();
|
||||
let traits_in_scope = scope.visible_traits();
|
||||
let iter_trait = FamousDefs(sema, Some(krate)).core_iter_Iterator()?;
|
||||
let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
|
||||
|
||||
let has_wanted_method = ty
|
||||
.iterate_method_candidates(
|
||||
|
@ -175,24 +175,17 @@ fn is_ref_and_impls_iter_method(
|
|||
|
||||
/// Whether iterable implements core::Iterator
|
||||
fn impls_core_iter(sema: &hir::Semantics<ide_db::RootDatabase>, iterable: &ast::Expr) -> bool {
|
||||
let it_typ = match sema.type_of_expr(iterable) {
|
||||
Some(it) => it.adjusted(),
|
||||
None => return false,
|
||||
};
|
||||
(|| {
|
||||
let it_typ = sema.type_of_expr(iterable)?.adjusted();
|
||||
|
||||
let module = match sema.scope(iterable.syntax()).module() {
|
||||
Some(it) => it,
|
||||
None => return false,
|
||||
};
|
||||
let module = sema.scope(iterable.syntax())?.module();
|
||||
|
||||
let krate = module.krate();
|
||||
match FamousDefs(sema, Some(krate)).core_iter_Iterator() {
|
||||
Some(iter_trait) => {
|
||||
let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
|
||||
cov_mark::hit!(test_already_impls_iterator);
|
||||
it_typ.impls_trait(sema.db, iter_trait, &[])
|
||||
}
|
||||
None => false,
|
||||
}
|
||||
Some(it_typ.impls_trait(sema.db, iter_trait, &[]))
|
||||
})()
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn validate_method_call_expr(
|
||||
|
@ -214,10 +207,10 @@ fn validate_method_call_expr(
|
|||
let expr = ast::Expr::MethodCallExpr(expr);
|
||||
|
||||
let it_type = sema.type_of_expr(&receiver)?.adjusted();
|
||||
let module = sema.scope(receiver.syntax()).module()?;
|
||||
let module = sema.scope(receiver.syntax())?.module();
|
||||
let krate = module.krate();
|
||||
|
||||
let iter_trait = FamousDefs(sema, Some(krate)).core_iter_Iterator()?;
|
||||
let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
|
||||
it_type.impls_trait(sema.db, iter_trait, &[]).then(|| (expr, receiver))
|
||||
}
|
||||
|
||||
|
|
|
@ -49,8 +49,8 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Opti
|
|||
_ => return None,
|
||||
};
|
||||
|
||||
let current_scope = ctx.sema.scope(&star.parent()?);
|
||||
let current_module = current_scope.module()?;
|
||||
let current_scope = ctx.sema.scope(&star.parent()?)?;
|
||||
let current_module = current_scope.module();
|
||||
|
||||
let refs_in_target = find_refs_in_mod(ctx, target_module, Some(current_module))?;
|
||||
let imported_defs = find_imported_defs(ctx, star)?;
|
||||
|
|
|
@ -81,7 +81,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext) -> Option
|
|||
|
||||
let anchor = if self_param.is_some() { Anchor::Method } else { Anchor::Freestanding };
|
||||
let insert_after = node_to_insert_after(&body, anchor)?;
|
||||
let module = ctx.sema.scope(&insert_after).module()?;
|
||||
let module = ctx.sema.scope(&insert_after)?.module();
|
||||
|
||||
let ret_ty = body.return_ty(ctx)?;
|
||||
let control_flow = body.external_control_flow(ctx, &container_info)?;
|
||||
|
@ -132,7 +132,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext) -> Option
|
|||
};
|
||||
|
||||
let control_flow_enum =
|
||||
FamousDefs(&ctx.sema, Some(module.krate())).core_ops_ControlFlow();
|
||||
FamousDefs(&ctx.sema, module.krate()).core_ops_ControlFlow();
|
||||
|
||||
if let Some(control_flow_enum) = control_flow_enum {
|
||||
let mod_path = module.find_use_path_prefixed(
|
||||
|
|
|
@ -423,7 +423,7 @@ fn reference_to_node(
|
|||
_ => return None,
|
||||
}
|
||||
};
|
||||
let module = sema.scope(&expr_or_pat).module()?;
|
||||
let module = sema.scope(&expr_or_pat)?.module();
|
||||
Some((segment, expr_or_pat, module))
|
||||
}
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext) -> O
|
|||
_ => return None,
|
||||
};
|
||||
|
||||
let current_module = ctx.sema.scope(path.syntax()).module()?;
|
||||
let current_module = ctx.sema.scope(path.syntax())?.module();
|
||||
let target_module = def.module(ctx.db())?;
|
||||
|
||||
if def.visibility(ctx.db()).is_visible_from(ctx.db(), current_module.into()) {
|
||||
|
@ -86,7 +86,7 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext) ->
|
|||
let record_field: ast::RecordExprField = ctx.find_node_at_offset()?;
|
||||
let (record_field_def, _, _) = ctx.sema.resolve_record_field(&record_field)?;
|
||||
|
||||
let current_module = ctx.sema.scope(record_field.syntax()).module()?;
|
||||
let current_module = ctx.sema.scope(record_field.syntax())?.module();
|
||||
let visibility = record_field_def.visibility(ctx.db());
|
||||
if visibility.is_visible_from(ctx.db(), current_module.into()) {
|
||||
return None;
|
||||
|
|
|
@ -35,8 +35,8 @@ pub(crate) fn generate_constant(acc: &mut Assists, ctx: &AssistContext) -> Optio
|
|||
let expr = constant_token.syntax().ancestors().find_map(ast::Expr::cast)?;
|
||||
let statement = expr.syntax().ancestors().find_map(ast::Stmt::cast)?;
|
||||
let ty = ctx.sema.type_of_expr(&expr)?;
|
||||
let scope = ctx.sema.scope(statement.syntax());
|
||||
let module = scope.module()?;
|
||||
let scope = ctx.sema.scope(statement.syntax())?;
|
||||
let module = scope.module();
|
||||
let type_name = ty.original().display_source_code(ctx.db(), module.into()).ok()?;
|
||||
let indent = IndentLevel::from_node(statement.syntax());
|
||||
if constant_token.to_string().chars().any(|it| !(it.is_uppercase() || it == '_')) {
|
||||
|
|
|
@ -75,7 +75,7 @@ fn existing_default_impl(
|
|||
let enum_ = variant.parent_enum(sema.db);
|
||||
let krate = enum_.module(sema.db).krate();
|
||||
|
||||
let default_trait = FamousDefs(sema, Some(krate)).core_default_Default()?;
|
||||
let default_trait = FamousDefs(sema, krate).core_default_Default()?;
|
||||
let enum_type = enum_.ty(sema.db);
|
||||
|
||||
if enum_type.impls_trait(sema.db, default_trait, &[]) {
|
||||
|
|
|
@ -132,7 +132,7 @@ fn is_default_implemented(ctx: &AssistContext, impl_: &Impl) -> bool {
|
|||
|
||||
let ty = impl_def.self_ty(db);
|
||||
let krate = impl_def.module(db).krate();
|
||||
let default = FamousDefs(&ctx.sema, Some(krate)).core_default_Default();
|
||||
let default = FamousDefs(&ctx.sema, krate).core_default_Default();
|
||||
let default_trait = match default {
|
||||
Some(value) => value,
|
||||
None => return false,
|
||||
|
|
|
@ -45,7 +45,7 @@ use syntax::ast::edit::AstNodeEdit;
|
|||
pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
||||
let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
|
||||
let strukt_name = strukt.name()?;
|
||||
let current_module = ctx.sema.scope(strukt.syntax()).module()?;
|
||||
let current_module = ctx.sema.scope(strukt.syntax())?.module();
|
||||
|
||||
let (field_name, field_ty, target) = match ctx.find_node_at_offset::<ast::RecordField>() {
|
||||
Some(field) => {
|
||||
|
|
|
@ -110,7 +110,7 @@ fn existing_deref_impl(
|
|||
let strukt = sema.to_def(strukt)?;
|
||||
let krate = strukt.module(sema.db).krate();
|
||||
|
||||
let deref_trait = FamousDefs(sema, Some(krate)).core_ops_Deref()?;
|
||||
let deref_trait = FamousDefs(sema, krate).core_ops_Deref()?;
|
||||
let strukt_type = strukt.ty(sema.db);
|
||||
|
||||
if strukt_type.impls_trait(sema.db, deref_trait, &[]) {
|
||||
|
|
|
@ -202,7 +202,7 @@ fn all_parent_mods_public(hir_func: &hir::Function, ctx: &AssistContext) -> bool
|
|||
|
||||
/// Returns the name of the current crate
|
||||
fn crate_name(ast_func: &ast::Fn, ctx: &AssistContext) -> Option<String> {
|
||||
let krate = ctx.sema.scope(ast_func.syntax()).module()?.krate();
|
||||
let krate = ctx.sema.scope(ast_func.syntax())?.krate();
|
||||
Some(krate.display_name(ctx.db())?.to_string())
|
||||
}
|
||||
|
||||
|
|
|
@ -86,7 +86,7 @@ fn existing_from_impl(
|
|||
let enum_ = variant.parent_enum(sema.db);
|
||||
let krate = enum_.module(sema.db).krate();
|
||||
|
||||
let from_trait = FamousDefs(sema, Some(krate)).core_convert_From()?;
|
||||
let from_trait = FamousDefs(sema, krate).core_convert_From()?;
|
||||
|
||||
let enum_type = enum_.ty(sema.db);
|
||||
|
||||
|
|
|
@ -73,7 +73,7 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
|||
get_fn_target(ctx, &target_module, call.clone())?
|
||||
}
|
||||
Some(hir::PathResolution::Def(hir::ModuleDef::Adt(adt))) => {
|
||||
let current_module = current_module(call.syntax(), ctx)?;
|
||||
let current_module = ctx.sema.scope(call.syntax())?.module();
|
||||
let module = adt.module(ctx.sema.db);
|
||||
target_module = if current_module == module { None } else { Some(module) };
|
||||
if current_module.krate() != module.krate() {
|
||||
|
@ -117,7 +117,7 @@ fn gen_method(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
|
|||
let fn_name = call.name_ref()?;
|
||||
let adt = ctx.sema.type_of_expr(&call.receiver()?)?.original().strip_references().as_adt()?;
|
||||
|
||||
let current_module = current_module(call.syntax(), ctx)?;
|
||||
let current_module = ctx.sema.scope(call.syntax())?.module();
|
||||
let target_module = adt.module(ctx.sema.db);
|
||||
|
||||
if current_module.krate() != target_module.krate() {
|
||||
|
@ -166,10 +166,6 @@ fn add_func_to_accumulator(
|
|||
})
|
||||
}
|
||||
|
||||
fn current_module(current_node: &SyntaxNode, ctx: &AssistContext) -> Option<Module> {
|
||||
ctx.sema.scope(current_node).module()
|
||||
}
|
||||
|
||||
fn get_adt_source(
|
||||
ctx: &AssistContext,
|
||||
adt: &hir::Adt,
|
||||
|
@ -235,7 +231,8 @@ impl FunctionBuilder {
|
|||
target: GeneratedFunctionTarget,
|
||||
) -> Option<Self> {
|
||||
let needs_pub = target_module.is_some();
|
||||
let target_module = target_module.or_else(|| current_module(target.syntax(), ctx))?;
|
||||
let target_module =
|
||||
target_module.or_else(|| ctx.sema.scope(target.syntax()).map(|it| it.module()))?;
|
||||
let fn_name = make::name(fn_name);
|
||||
let (type_params, params) =
|
||||
fn_args(ctx, target_module, ast::CallableExpr::Call(call.clone()))?;
|
||||
|
@ -266,7 +263,7 @@ impl FunctionBuilder {
|
|||
target: GeneratedFunctionTarget,
|
||||
) -> Option<Self> {
|
||||
let needs_pub =
|
||||
!module_is_descendant(¤t_module(call.syntax(), ctx)?, &target_module, ctx);
|
||||
!module_is_descendant(&ctx.sema.scope(call.syntax())?.module(), &target_module, ctx);
|
||||
let fn_name = make::name(&name.text());
|
||||
let (type_params, params) =
|
||||
fn_args(ctx, target_module, ast::CallableExpr::MethodCall(call.clone()))?;
|
||||
|
@ -520,7 +517,7 @@ fn fn_arg_type(ctx: &AssistContext, target_module: hir::Module, fn_arg: &ast::Ex
|
|||
}
|
||||
|
||||
if ty.is_reference() || ty.is_mutable_reference() {
|
||||
let famous_defs = &FamousDefs(&ctx.sema, ctx.sema.scope(fn_arg.syntax()).krate());
|
||||
let famous_defs = &FamousDefs(&ctx.sema, ctx.sema.scope(fn_arg.syntax())?.krate());
|
||||
convert_reference_type(ty.strip_references(), ctx.db(), famous_defs)
|
||||
.map(|conversion| conversion.convert_type(ctx.db()))
|
||||
.or_else(|| ty.display_source_code(ctx.db(), target_module.into()).ok())
|
||||
|
|
|
@ -121,7 +121,9 @@ pub(crate) fn generate_getter_impl(
|
|||
"a mutable reference to ",
|
||||
)
|
||||
} else {
|
||||
let famous_defs = &FamousDefs(&ctx.sema, ctx.sema.scope(field_ty.syntax()).krate());
|
||||
(|| {
|
||||
let krate = ctx.sema.scope(field_ty.syntax())?.krate();
|
||||
let famous_defs = &FamousDefs(&ctx.sema, krate);
|
||||
ctx.sema
|
||||
.resolve_type(&field_ty)
|
||||
.and_then(|ty| convert_reference_type(ty, ctx.db(), famous_defs))
|
||||
|
@ -133,12 +135,9 @@ pub(crate) fn generate_getter_impl(
|
|||
if conversion.is_copy() { "" } else { "a reference to " },
|
||||
)
|
||||
})
|
||||
})()
|
||||
.unwrap_or_else(|| {
|
||||
(
|
||||
format!("&{}", field_ty),
|
||||
format!("&self.{}", field_name),
|
||||
"a reference to ",
|
||||
)
|
||||
(format!("&{}", field_ty), format!("&self.{}", field_name), "a reference to ")
|
||||
})
|
||||
};
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ fn get_impl_method(
|
|||
let db = ctx.sema.db;
|
||||
let impl_def: hir::Impl = ctx.sema.to_def(impl_)?;
|
||||
|
||||
let scope = ctx.sema.scope(impl_.syntax());
|
||||
let scope = ctx.sema.scope(impl_.syntax())?;
|
||||
let ty = impl_def.self_ty(db);
|
||||
let traits_in_scope = scope.visible_traits();
|
||||
ty.iterate_method_candidates(db, &scope, &traits_in_scope, None, Some(fn_name), |func| {
|
||||
|
|
|
@ -391,14 +391,12 @@ fn inline(
|
|||
}
|
||||
}
|
||||
if let Some(generic_arg_list) = generic_arg_list.clone() {
|
||||
PathTransform::function_call(
|
||||
&sema.scope(node.syntax()),
|
||||
&sema.scope(fn_body.syntax()),
|
||||
function,
|
||||
generic_arg_list,
|
||||
)
|
||||
if let Some((target, source)) = &sema.scope(node.syntax()).zip(sema.scope(fn_body.syntax()))
|
||||
{
|
||||
PathTransform::function_call(target, source, function, generic_arg_list)
|
||||
.apply(body.syntax());
|
||||
}
|
||||
}
|
||||
|
||||
let original_indentation = match node {
|
||||
ast::CallableExpr::Call(it) => it.indent_level(),
|
||||
|
|
|
@ -53,7 +53,7 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext) ->
|
|||
}
|
||||
let let_stmt = pat.syntax().parent().and_then(ast::LetStmt::cast)?;
|
||||
|
||||
let module = ctx.sema.scope(pat.syntax()).module()?;
|
||||
let module = ctx.sema.scope(pat.syntax())?.module();
|
||||
let local = ctx.sema.to_def(&pat)?;
|
||||
let ty = ctx.sema.type_of_pat(&pat.into())?.original;
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext) -> Opt
|
|||
let range = call.syntax().text_range();
|
||||
let resolved_call = ctx.sema.resolve_method_call(&call)?;
|
||||
|
||||
let current_module = ctx.sema.scope(call.syntax()).module()?;
|
||||
let current_module = ctx.sema.scope(call.syntax())?.module();
|
||||
let target_module_def = ModuleDef::from(resolved_call);
|
||||
let item_in_ns = ItemInNs::from(target_module_def);
|
||||
let receiver_path = current_module
|
||||
|
|
|
@ -69,7 +69,7 @@ pub(crate) fn replace_derive_with_manual_impl(
|
|||
let attr = ast::Attr::cast(value)?;
|
||||
let args = attr.token_tree()?;
|
||||
|
||||
let current_module = ctx.sema.scope(adt.syntax()).module()?;
|
||||
let current_module = ctx.sema.scope(adt.syntax())?.module();
|
||||
let current_crate = current_module.krate();
|
||||
|
||||
let found_traits = items_locator::items_with_name(
|
||||
|
@ -177,7 +177,7 @@ fn impl_def_from_trait(
|
|||
trait_path: &ast::Path,
|
||||
) -> Option<(ast::Impl, ast::AssocItem)> {
|
||||
let trait_ = trait_?;
|
||||
let target_scope = sema.scope(annotated_name.syntax());
|
||||
let target_scope = sema.scope(annotated_name.syntax())?;
|
||||
let trait_items = filter_assoc_items(sema, &trait_.items(sema.db), DefaultMethods::No);
|
||||
if trait_items.is_empty() {
|
||||
return None;
|
||||
|
|
|
@ -63,9 +63,11 @@ pub(crate) fn replace_qualified_name_with_use(
|
|||
);
|
||||
let path_to_qualifier = starts_with_name_ref
|
||||
.then(|| {
|
||||
ctx.sema.scope(path.syntax()).module().and_then(|m| {
|
||||
m.find_use_path_prefixed(ctx.sema.db, module, ctx.config.insert_use.prefix_kind)
|
||||
})
|
||||
ctx.sema.scope(path.syntax())?.module().find_use_path_prefixed(
|
||||
ctx.sema.db,
|
||||
module,
|
||||
ctx.config.insert_use.prefix_kind,
|
||||
)
|
||||
})
|
||||
.flatten();
|
||||
|
||||
|
|
|
@ -38,9 +38,9 @@ pub(crate) fn unwrap_result_return_type(acc: &mut Assists, ctx: &AssistContext)
|
|||
};
|
||||
|
||||
let type_ref = &ret_type.ty()?;
|
||||
let ty = ctx.sema.resolve_type(type_ref).and_then(|ty| ty.as_adt());
|
||||
let ty = ctx.sema.resolve_type(type_ref)?.as_adt();
|
||||
let result_enum =
|
||||
FamousDefs(&ctx.sema, ctx.sema.scope(type_ref.syntax()).krate()).core_result_Result()?;
|
||||
FamousDefs(&ctx.sema, ctx.sema.scope(type_ref.syntax())?.krate()).core_result_Result()?;
|
||||
|
||||
if !matches!(ty, Some(hir::Adt::Enum(ret_type)) if ret_type == result_enum) {
|
||||
return None;
|
||||
|
|
|
@ -39,9 +39,9 @@ pub(crate) fn wrap_return_type_in_result(acc: &mut Assists, ctx: &AssistContext)
|
|||
};
|
||||
|
||||
let type_ref = &ret_type.ty()?;
|
||||
let ty = ctx.sema.resolve_type(type_ref).and_then(|ty| ty.as_adt());
|
||||
let ty = ctx.sema.resolve_type(type_ref)?.as_adt();
|
||||
let result_enum =
|
||||
FamousDefs(&ctx.sema, ctx.sema.scope(type_ref.syntax()).krate()).core_result_Result()?;
|
||||
FamousDefs(&ctx.sema, ctx.sema.scope(type_ref.syntax())?.krate()).core_result_Result()?;
|
||||
|
||||
if matches!(ty, Some(hir::Adt::Enum(ret_type)) if ret_type == result_enum) {
|
||||
cov_mark::hit!(wrap_return_type_in_result_simple_return_type_already_result);
|
||||
|
|
|
@ -352,14 +352,6 @@ fn enum_variants_with_paths(
|
|||
) {
|
||||
let variants = enum_.variants(ctx.db);
|
||||
|
||||
let module = if let Some(module) = ctx.module {
|
||||
// Compute path from the completion site if available.
|
||||
module
|
||||
} else {
|
||||
// Otherwise fall back to the enum's definition site.
|
||||
enum_.module(ctx.db)
|
||||
};
|
||||
|
||||
if let Some(impl_) = ctx.impl_def.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) {
|
||||
if impl_.self_ty(ctx.db).as_adt() == Some(hir::Adt::Enum(enum_)) {
|
||||
for &variant in &variants {
|
||||
|
@ -373,7 +365,7 @@ fn enum_variants_with_paths(
|
|||
}
|
||||
|
||||
for variant in variants {
|
||||
if let Some(path) = module.find_use_path(ctx.db, hir::ModuleDef::from(variant)) {
|
||||
if let Some(path) = ctx.module.find_use_path(ctx.db, hir::ModuleDef::from(variant)) {
|
||||
// Variants with trivial paths are already added by the existing completion logic,
|
||||
// so we should avoid adding these twice
|
||||
if path.segments().len() > 1 {
|
||||
|
|
|
@ -91,7 +91,7 @@ pub(crate) fn complete_attribute(acc: &mut Completions, ctx: &CompletionContext)
|
|||
_ => return,
|
||||
};
|
||||
|
||||
for (name, def) in module.scope(ctx.db, ctx.module) {
|
||||
for (name, def) in module.scope(ctx.db, Some(ctx.module)) {
|
||||
if let Some(def) = module_or_attr(ctx.db, def) {
|
||||
acc.add_resolution(ctx, name, def);
|
||||
}
|
||||
|
|
|
@ -27,26 +27,17 @@ pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
Some("target_os") => KNOWN_OS.iter().copied().for_each(add_completion),
|
||||
Some("target_vendor") => KNOWN_VENDOR.iter().copied().for_each(add_completion),
|
||||
Some("target_endian") => ["little", "big"].into_iter().for_each(add_completion),
|
||||
Some(name) => {
|
||||
if let Some(krate) = ctx.krate {
|
||||
krate.potential_cfg(ctx.db).get_cfg_values(name).cloned().for_each(|s| {
|
||||
Some(name) => ctx.krate.potential_cfg(ctx.db).get_cfg_values(name).cloned().for_each(|s| {
|
||||
let insert_text = format!(r#""{}""#, s);
|
||||
let mut item =
|
||||
CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s);
|
||||
let mut item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s);
|
||||
item.insert_text(insert_text);
|
||||
|
||||
acc.add(item.build());
|
||||
})
|
||||
};
|
||||
}
|
||||
None => {
|
||||
if let Some(krate) = ctx.krate {
|
||||
krate.potential_cfg(ctx.db).get_cfg_keys().cloned().for_each(|s| {
|
||||
}),
|
||||
None => ctx.krate.potential_cfg(ctx.db).get_cfg_keys().cloned().for_each(|s| {
|
||||
let item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s);
|
||||
acc.add(item.build());
|
||||
})
|
||||
}
|
||||
}
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ pub(crate) fn complete_derive(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
_ => return,
|
||||
};
|
||||
|
||||
for (name, def) in module.scope(ctx.db, ctx.module) {
|
||||
for (name, def) in module.scope(ctx.db, Some(ctx.module)) {
|
||||
let add_def = match def {
|
||||
ScopeDef::ModuleDef(hir::ModuleDef::Macro(mac)) => {
|
||||
!ctx.existing_derives.contains(&mac) && mac.is_derive(ctx.db)
|
||||
|
|
|
@ -87,7 +87,7 @@ fn complete_methods(
|
|||
ctx.db,
|
||||
&ctx.scope,
|
||||
&traits_in_scope,
|
||||
ctx.module,
|
||||
Some(ctx.module),
|
||||
None,
|
||||
|func| {
|
||||
if func.self_param(ctx.db).is_some() && seen_methods.insert(func.name(ctx.db)) {
|
||||
|
|
|
@ -225,7 +225,7 @@ pub(crate) fn position_for_import(
|
|||
}
|
||||
|
||||
fn import_assets(ctx: &CompletionContext, fuzzy_name: String) -> Option<ImportAssets> {
|
||||
let current_module = ctx.module?;
|
||||
let current_module = ctx.module;
|
||||
if let Some(dot_receiver) = ctx.dot_receiver() {
|
||||
ImportAssets::for_fuzzy_method_call(
|
||||
current_module,
|
||||
|
|
|
@ -120,19 +120,19 @@ fn params_from_stmt_list_scope(
|
|||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
let scope = ctx.sema.scope_at_offset(stmt_list.syntax(), syntax_node.text_range().end());
|
||||
let module = match scope.module() {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
if let Some(scope) =
|
||||
ctx.sema.scope_at_offset(stmt_list.syntax(), syntax_node.text_range().end())
|
||||
{
|
||||
let module = scope.module().into();
|
||||
scope.process_all_names(&mut |name, def| {
|
||||
if let hir::ScopeDef::Local(local) = def {
|
||||
if let Ok(ty) = local.ty(ctx.db).display_source_code(ctx.db, module.into()) {
|
||||
if let Ok(ty) = local.ty(ctx.db).display_source_code(ctx.db, module) {
|
||||
cb(name, ty);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn remove_duplicated(
|
||||
file_params: &mut FxHashMap<String, String>,
|
||||
|
|
|
@ -22,7 +22,7 @@ pub(crate) fn complete_mod(acc: &mut Completions, ctx: &CompletionContext) -> Op
|
|||
|
||||
let _p = profile::span("completion::complete_mod");
|
||||
|
||||
let current_module = ctx.module?;
|
||||
let current_module = ctx.module;
|
||||
|
||||
let module_definition_file =
|
||||
current_module.definition_source(ctx.db).file_id.original_file(ctx.db);
|
||||
|
|
|
@ -122,7 +122,7 @@ fn pattern_path_completion(
|
|||
|
||||
match resolution {
|
||||
hir::PathResolution::Def(hir::ModuleDef::Module(module)) => {
|
||||
let module_scope = module.scope(ctx.db, ctx.module);
|
||||
let module_scope = module.scope(ctx.db, Some(ctx.module));
|
||||
for (name, def) in module_scope {
|
||||
let add_resolution = match def {
|
||||
ScopeDef::ModuleDef(hir::ModuleDef::Macro(mac)) => {
|
||||
|
@ -169,7 +169,7 @@ fn pattern_path_completion(
|
|||
ctx.db,
|
||||
&ctx.scope,
|
||||
&traits_in_scope,
|
||||
ctx.module,
|
||||
Some(ctx.module),
|
||||
None,
|
||||
|item| {
|
||||
match item {
|
||||
|
|
|
@ -57,7 +57,7 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
|
|||
match ctx.completion_location {
|
||||
Some(ImmediateLocation::ItemList | ImmediateLocation::Trait | ImmediateLocation::Impl) => {
|
||||
if let hir::PathResolution::Def(hir::ModuleDef::Module(module)) = resolution {
|
||||
for (name, def) in module.scope(ctx.db, ctx.module) {
|
||||
for (name, def) in module.scope(ctx.db, Some(ctx.module)) {
|
||||
if let Some(def) = module_or_fn_macro(ctx.db, def) {
|
||||
acc.add_resolution(ctx, name, def);
|
||||
}
|
||||
|
@ -89,7 +89,7 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
|
|||
|
||||
match resolution {
|
||||
hir::PathResolution::Def(hir::ModuleDef::Module(module)) => {
|
||||
let module_scope = module.scope(ctx.db, ctx.module);
|
||||
let module_scope = module.scope(ctx.db, Some(ctx.module));
|
||||
for (name, def) in module_scope {
|
||||
let add_resolution = match def {
|
||||
// Don't suggest attribute macros and derives.
|
||||
|
@ -141,14 +141,12 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
|
|||
// XXX: For parity with Rust bug #22519, this does not complete Ty::AssocType.
|
||||
// (where AssocType is defined on a trait, not an inherent impl)
|
||||
|
||||
let krate = ctx.krate;
|
||||
if let Some(krate) = krate {
|
||||
let traits_in_scope = traits_in_scope(ctx);
|
||||
ty.iterate_path_candidates(
|
||||
ctx.db,
|
||||
&ctx.scope,
|
||||
&traits_in_scope,
|
||||
ctx.module,
|
||||
Some(ctx.module),
|
||||
None,
|
||||
|item| {
|
||||
add_assoc_item(acc, ctx, item);
|
||||
|
@ -157,14 +155,13 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
|
|||
);
|
||||
|
||||
// Iterate assoc types separately
|
||||
ty.iterate_assoc_items(ctx.db, krate, |item| {
|
||||
ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
|
||||
if let hir::AssocItem::TypeAlias(ty) = item {
|
||||
acc.add_type_alias(ctx, ty)
|
||||
}
|
||||
None::<()>
|
||||
});
|
||||
}
|
||||
}
|
||||
hir::PathResolution::Def(hir::ModuleDef::Trait(t)) => {
|
||||
// Handles `Trait::assoc` as well as `<Ty as Trait>::assoc`.
|
||||
for item in t.items(ctx.db) {
|
||||
|
@ -187,7 +184,7 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
|
|||
ctx.db,
|
||||
&ctx.scope,
|
||||
&traits_in_scope,
|
||||
ctx.module,
|
||||
Some(ctx.module),
|
||||
None,
|
||||
|item| {
|
||||
// We might iterate candidates of a trait multiple times here, so deduplicate
|
||||
|
|
|
@ -83,17 +83,18 @@ pub(crate) fn complete_record_literal(
|
|||
|
||||
match ctx.expected_type.as_ref()?.as_adt()? {
|
||||
hir::Adt::Struct(strukt) if ctx.path_qual().is_none() => {
|
||||
let module = if let Some(module) = ctx.module { module } else { strukt.module(ctx.db) };
|
||||
let path = module
|
||||
let path = ctx
|
||||
.module
|
||||
.find_use_path(ctx.db, hir::ModuleDef::from(strukt))
|
||||
.filter(|it| it.len() > 1);
|
||||
|
||||
acc.add_struct_literal(ctx, strukt, path, None);
|
||||
}
|
||||
hir::Adt::Union(un) if ctx.path_qual().is_none() => {
|
||||
let module = if let Some(module) = ctx.module { module } else { un.module(ctx.db) };
|
||||
let path =
|
||||
module.find_use_path(ctx.db, hir::ModuleDef::from(un)).filter(|it| it.len() > 1);
|
||||
let path = ctx
|
||||
.module
|
||||
.find_use_path(ctx.db, hir::ModuleDef::from(un))
|
||||
.filter(|it| it.len() > 1);
|
||||
|
||||
acc.add_union_literal(ctx, un, path, None);
|
||||
}
|
||||
|
|
|
@ -183,7 +183,7 @@ fn get_transformed_assoc_item(
|
|||
let assoc_item = assoc_item.clone_for_update();
|
||||
let trait_ = impl_def.trait_(ctx.db)?;
|
||||
let source_scope = &ctx.sema.scope_for_def(trait_);
|
||||
let target_scope = &ctx.sema.scope(ctx.sema.source(impl_def)?.syntax().value);
|
||||
let target_scope = &ctx.sema.scope(ctx.sema.source(impl_def)?.syntax().value)?;
|
||||
let transform = PathTransform::trait_impl(
|
||||
target_scope,
|
||||
source_scope,
|
||||
|
|
|
@ -41,7 +41,7 @@ pub(crate) fn complete_use_tree(acc: &mut Completions, ctx: &CompletionContext)
|
|||
|
||||
match resolution {
|
||||
hir::PathResolution::Def(hir::ModuleDef::Module(module)) => {
|
||||
let module_scope = module.scope(ctx.db, ctx.module);
|
||||
let module_scope = module.scope(ctx.db, Some(ctx.module));
|
||||
let unknown_is_current = |name: &hir::Name| {
|
||||
matches!(
|
||||
ctx.name_syntax.as_ref(),
|
||||
|
|
|
@ -22,8 +22,8 @@ pub(crate) fn complete_vis(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
Some(PathQualifierCtx { resolution, is_super_chain, .. }) => {
|
||||
// Try completing next child module of the path that is still a parent of the current module
|
||||
if let Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))) = resolution {
|
||||
if let Some(current_module) = ctx.module {
|
||||
let next_towards_current = current_module
|
||||
let next_towards_current = ctx
|
||||
.module
|
||||
.path_to_root(ctx.db)
|
||||
.into_iter()
|
||||
.take_while(|it| it != module)
|
||||
|
@ -35,7 +35,6 @@ pub(crate) fn complete_vis(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if *is_super_chain {
|
||||
acc.add_keyword(ctx, "super::");
|
||||
|
|
|
@ -121,9 +121,9 @@ pub(crate) struct CompletionContext<'a> {
|
|||
/// The token before the cursor, in the macro-expanded file.
|
||||
pub(super) token: SyntaxToken,
|
||||
/// The crate of the current file.
|
||||
pub(super) krate: Option<hir::Crate>,
|
||||
pub(super) krate: hir::Crate,
|
||||
/// The module of the `scope`.
|
||||
pub(super) module: Option<hir::Module>,
|
||||
pub(super) module: hir::Module,
|
||||
pub(super) expected_name: Option<NameOrNameRef>,
|
||||
pub(super) expected_type: Option<Type>,
|
||||
|
||||
|
@ -353,11 +353,7 @@ impl<'a> CompletionContext<'a> {
|
|||
attrs: &hir::Attrs,
|
||||
defining_crate: hir::Crate,
|
||||
) -> Visible {
|
||||
let module = match self.module {
|
||||
Some(it) => it,
|
||||
None => return Visible::No,
|
||||
};
|
||||
if !vis.is_visible_from(self.db, module.into()) {
|
||||
if !vis.is_visible_from(self.db, self.module.into()) {
|
||||
if !self.config.enable_private_editable {
|
||||
return Visible::No;
|
||||
}
|
||||
|
@ -376,11 +372,8 @@ impl<'a> CompletionContext<'a> {
|
|||
}
|
||||
|
||||
fn is_doc_hidden(&self, attrs: &hir::Attrs, defining_crate: hir::Crate) -> bool {
|
||||
match self.krate {
|
||||
// `doc(hidden)` items are only completed within the defining crate.
|
||||
Some(krate) => krate != defining_crate && attrs.has_doc_hidden(),
|
||||
None => true,
|
||||
}
|
||||
self.krate != defining_crate && attrs.has_doc_hidden()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -409,7 +402,7 @@ impl<'a> CompletionContext<'a> {
|
|||
|
||||
let original_token = original_file.syntax().token_at_offset(offset).left_biased()?;
|
||||
let token = sema.descend_into_macros_single(original_token.clone());
|
||||
let scope = sema.scope_at_offset(&token.parent()?, offset);
|
||||
let scope = sema.scope_at_offset(&token.parent()?, offset)?;
|
||||
let krate = scope.krate();
|
||||
let module = scope.module();
|
||||
let mut locals = FxHashMap::default();
|
||||
|
|
|
@ -185,7 +185,7 @@ pub fn resolve_completion_edits(
|
|||
let position_for_import = &position_for_import(&ctx, None)?;
|
||||
let scope = ImportScope::find_insert_use_container(position_for_import, &ctx.sema)?;
|
||||
|
||||
let current_module = ctx.sema.scope(position_for_import).module()?;
|
||||
let current_module = ctx.sema.scope(position_for_import)?.module();
|
||||
let current_crate = current_module.krate();
|
||||
let new_ast = scope.clone_for_update();
|
||||
let mut import_insert = TextEdit::builder();
|
||||
|
|
|
@ -72,7 +72,7 @@ pub(crate) fn visible_fields(
|
|||
fields: &[hir::Field],
|
||||
item: impl HasAttrs,
|
||||
) -> Option<(Vec<hir::Field>, bool)> {
|
||||
let module = ctx.module?;
|
||||
let module = ctx.module;
|
||||
let n_fields = fields.len();
|
||||
let fields = fields
|
||||
.iter()
|
||||
|
|
|
@ -185,7 +185,7 @@ fn import_edits(
|
|||
_ => return None,
|
||||
};
|
||||
let path =
|
||||
ctx.module?.find_use_path_prefixed(ctx.db, item, ctx.config.insert_use.prefix_kind)?;
|
||||
ctx.module.find_use_path_prefixed(ctx.db, item, ctx.config.insert_use.prefix_kind)?;
|
||||
Some((path.len() > 1).then(|| ImportEdit {
|
||||
import: LocatedImport::new(path.clone(), item, item, None),
|
||||
scope: import_scope.clone(),
|
||||
|
|
|
@ -18,7 +18,7 @@ use crate::RootDatabase;
|
|||
/// ```
|
||||
/// //- minicore: iterator, ord, derive
|
||||
/// ```
|
||||
pub struct FamousDefs<'a, 'b>(pub &'a Semantics<'b, RootDatabase>, pub Option<Crate>);
|
||||
pub struct FamousDefs<'a, 'b>(pub &'a Semantics<'b, RootDatabase>, pub Crate);
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
impl FamousDefs<'_, '_> {
|
||||
|
@ -142,7 +142,7 @@ impl FamousDefs<'_, '_> {
|
|||
}
|
||||
|
||||
fn find_lang_crate(&self, origin: LangCrateOrigin) -> Option<Crate> {
|
||||
let krate = self.1?;
|
||||
let krate = self.1;
|
||||
let db = self.0.db;
|
||||
let crate_graph = self.0.db.crate_graph();
|
||||
let res = krate
|
||||
|
|
|
@ -104,7 +104,7 @@ impl ImportAssets {
|
|||
let candidate_node = method_call.syntax().clone();
|
||||
Some(Self {
|
||||
import_candidate: ImportCandidate::for_method_call(sema, method_call)?,
|
||||
module_with_candidate: sema.scope(&candidate_node).module()?,
|
||||
module_with_candidate: sema.scope(&candidate_node)?.module(),
|
||||
candidate_node,
|
||||
})
|
||||
}
|
||||
|
@ -119,7 +119,7 @@ impl ImportAssets {
|
|||
}
|
||||
Some(Self {
|
||||
import_candidate: ImportCandidate::for_regular_path(sema, fully_qualified_path)?,
|
||||
module_with_candidate: sema.scope(&candidate_node).module()?,
|
||||
module_with_candidate: sema.scope(&candidate_node)?.module(),
|
||||
candidate_node,
|
||||
})
|
||||
}
|
||||
|
@ -132,7 +132,7 @@ impl ImportAssets {
|
|||
let candidate_node = pat.syntax().clone();
|
||||
Some(Self {
|
||||
import_candidate: ImportCandidate::for_name(sema, &name)?,
|
||||
module_with_candidate: sema.scope(&candidate_node).module()?,
|
||||
module_with_candidate: sema.scope(&candidate_node)?.module(),
|
||||
candidate_node,
|
||||
})
|
||||
}
|
||||
|
@ -248,7 +248,10 @@ impl ImportAssets {
|
|||
};
|
||||
|
||||
let krate = self.module_with_candidate.krate();
|
||||
let scope = sema.scope(&self.candidate_node);
|
||||
let scope = match sema.scope(&self.candidate_node) {
|
||||
Some(it) => it,
|
||||
None => return Vec::new(),
|
||||
};
|
||||
|
||||
match &self.import_candidate {
|
||||
ImportCandidate::Path(path_candidate) => {
|
||||
|
@ -271,9 +274,11 @@ impl ImportAssets {
|
|||
fn scope_definitions(&self, sema: &Semantics<RootDatabase>) -> FxHashSet<ScopeDef> {
|
||||
let _p = profile::span("import_assets::scope_definitions");
|
||||
let mut scope_definitions = FxHashSet::default();
|
||||
sema.scope(&self.candidate_node).process_all_names(&mut |_, scope_def| {
|
||||
if let Some(scope) = sema.scope(&self.candidate_node) {
|
||||
scope.process_all_names(&mut |_, scope_def| {
|
||||
scope_definitions.insert(scope_def);
|
||||
});
|
||||
}
|
||||
scope_definitions
|
||||
}
|
||||
}
|
||||
|
@ -588,7 +593,7 @@ impl ImportCandidate {
|
|||
|
||||
fn for_name(sema: &Semantics<RootDatabase>, name: &ast::Name) -> Option<Self> {
|
||||
if sema
|
||||
.scope(name.syntax())
|
||||
.scope(name.syntax())?
|
||||
.speculative_resolve(&ast::make::ext::ident_path(&name.text()))
|
||||
.is_some()
|
||||
{
|
||||
|
|
|
@ -69,15 +69,13 @@ impl<'a> PathTransform<'a> {
|
|||
}
|
||||
|
||||
pub fn apply(&self, syntax: &SyntaxNode) {
|
||||
if let Some(ctx) = self.build_ctx() {
|
||||
ctx.apply(syntax)
|
||||
}
|
||||
self.build_ctx().apply(syntax)
|
||||
}
|
||||
|
||||
fn build_ctx(&self) -> Option<Ctx<'a>> {
|
||||
fn build_ctx(&self) -> Ctx<'a> {
|
||||
let db = self.source_scope.db;
|
||||
let target_module = self.target_scope.module()?;
|
||||
let source_module = self.source_scope.module()?;
|
||||
let target_module = self.target_scope.module();
|
||||
let source_module = self.source_scope.module();
|
||||
let skip = match self.generic_def {
|
||||
// this is a trait impl, so we need to skip the first type parameter -- this is a bit hacky
|
||||
hir::GenericDef::Trait(_) => 1,
|
||||
|
@ -111,8 +109,7 @@ impl<'a> PathTransform<'a> {
|
|||
},
|
||||
})
|
||||
.collect();
|
||||
let res = Ctx { substs: substs_by_param, target_module, source_scope: self.source_scope };
|
||||
Some(res)
|
||||
Ctx { substs: substs_by_param, target_module, source_scope: self.source_scope }
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
|
|||
|
||||
let new_field_list = old_field_list.clone_for_update();
|
||||
let mut locals = FxHashMap::default();
|
||||
ctx.sema.scope(field_list_parent.syntax()).process_all_names(&mut |name, def| {
|
||||
ctx.sema.scope(field_list_parent.syntax())?.process_all_names(&mut |name, def| {
|
||||
if let hir::ScopeDef::Local(local) = def {
|
||||
locals.insert(name, local);
|
||||
}
|
||||
|
@ -171,11 +171,7 @@ fn get_default_constructor(
|
|||
if has_new_func {
|
||||
Some(make::ext::expr_ty_new(&make_ty(ty, ctx.sema.db, module)))
|
||||
} else if !ty.is_array()
|
||||
&& ty.impls_trait(
|
||||
ctx.sema.db,
|
||||
FamousDefs(&ctx.sema, Some(krate)).core_default_Default()?,
|
||||
&[],
|
||||
)
|
||||
&& ty.impls_trait(ctx.sema.db, FamousDefs(&ctx.sema, krate).core_default_Default()?, &[])
|
||||
{
|
||||
Some(make::ext::expr_ty_default(&make_ty(ty, ctx.sema.db, module)))
|
||||
} else {
|
||||
|
|
|
@ -76,7 +76,7 @@ fn add_missing_ok_or_some(
|
|||
let root = ctx.sema.db.parse_or_expand(d.expr.file_id)?;
|
||||
let expr = d.expr.value.to_node(&root);
|
||||
let expr_range = expr.syntax().text_range();
|
||||
let scope = ctx.sema.scope(expr.syntax());
|
||||
let scope = ctx.sema.scope(expr.syntax())?;
|
||||
|
||||
let expected_adt = d.expected.as_adt()?;
|
||||
let expected_enum = expected_adt.as_enum()?;
|
||||
|
|
|
@ -25,7 +25,7 @@ pub fn ssr_from_comment(db: &RootDatabase, frange: FileRange) -> Option<(MatchFi
|
|||
|
||||
let lookup_context = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
|
||||
|
||||
let mut match_finder = MatchFinder::in_context(db, lookup_context, vec![]);
|
||||
let mut match_finder = MatchFinder::in_context(db, lookup_context, vec![]).ok()?;
|
||||
match_finder.add_rule(ssr_rule).ok()?;
|
||||
|
||||
Some((match_finder, comment.syntax().text_range()))
|
||||
|
|
|
@ -128,11 +128,12 @@ impl<'db> MatchFinder<'db> {
|
|||
db: &'db ide_db::RootDatabase,
|
||||
lookup_context: FilePosition,
|
||||
mut restrict_ranges: Vec<FileRange>,
|
||||
) -> MatchFinder<'db> {
|
||||
) -> Result<MatchFinder<'db>, SsrError> {
|
||||
restrict_ranges.retain(|range| !range.range.is_empty());
|
||||
let sema = Semantics::new(db);
|
||||
let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context);
|
||||
MatchFinder { sema, rules: Vec::new(), resolution_scope, restrict_ranges }
|
||||
let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context)
|
||||
.ok_or_else(|| SsrError("no resolution scope for file".into()))?;
|
||||
Ok(MatchFinder { sema, rules: Vec::new(), resolution_scope, restrict_ranges })
|
||||
}
|
||||
|
||||
/// Constructs an instance using the start of the first file in `db` as the lookup context.
|
||||
|
@ -142,11 +143,11 @@ impl<'db> MatchFinder<'db> {
|
|||
if let Some(first_file_id) =
|
||||
db.local_roots().iter().next().and_then(|root| db.source_root(*root).iter().next())
|
||||
{
|
||||
Ok(MatchFinder::in_context(
|
||||
MatchFinder::in_context(
|
||||
db,
|
||||
FilePosition { file_id: first_file_id, offset: 0.into() },
|
||||
vec![],
|
||||
))
|
||||
)
|
||||
} else {
|
||||
bail!("No files to search");
|
||||
}
|
||||
|
|
|
@ -646,8 +646,8 @@ impl Match {
|
|||
) -> Result<(), MatchFailed> {
|
||||
let module = sema
|
||||
.scope(&self.matched_node)
|
||||
.module()
|
||||
.ok_or_else(|| match_error!("Matched node isn't in a module"))?;
|
||||
.ok_or_else(|| match_error!("Matched node isn't in a module"))?
|
||||
.module();
|
||||
for (path, resolved_path) in &template.resolved_paths {
|
||||
if let hir::PathResolution::Def(module_def) = resolved_path.resolution {
|
||||
let mod_path = module.find_use_path(sema.db, module_def).ok_or_else(|| {
|
||||
|
@ -788,7 +788,7 @@ mod tests {
|
|||
let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }";
|
||||
|
||||
let (db, position, selections) = crate::tests::single_file(input);
|
||||
let mut match_finder = MatchFinder::in_context(&db, position, selections);
|
||||
let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
|
||||
match_finder.add_rule(rule).unwrap();
|
||||
let matches = match_finder.matches();
|
||||
assert_eq!(matches.matches.len(), 1);
|
||||
|
|
|
@ -186,7 +186,7 @@ impl<'db> ResolutionScope<'db> {
|
|||
pub(crate) fn new(
|
||||
sema: &hir::Semantics<'db, ide_db::RootDatabase>,
|
||||
resolve_context: FilePosition,
|
||||
) -> ResolutionScope<'db> {
|
||||
) -> Option<ResolutionScope<'db>> {
|
||||
use syntax::ast::AstNode;
|
||||
let file = sema.parse(resolve_context.file_id);
|
||||
// Find a node at the requested position, falling back to the whole file.
|
||||
|
@ -197,8 +197,8 @@ impl<'db> ResolutionScope<'db> {
|
|||
.and_then(|token| token.parent())
|
||||
.unwrap_or_else(|| file.syntax().clone());
|
||||
let node = pick_node_for_resolution(node);
|
||||
let scope = sema.scope(&node);
|
||||
ResolutionScope { scope, node }
|
||||
let scope = sema.scope(&node)?;
|
||||
Some(ResolutionScope { scope, node })
|
||||
}
|
||||
|
||||
/// Returns the function in which SSR was invoked, if any.
|
||||
|
@ -219,7 +219,7 @@ impl<'db> ResolutionScope<'db> {
|
|||
let resolved_qualifier = self.scope.speculative_resolve(&path.qualifier()?)?;
|
||||
if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier {
|
||||
let name = path.segment()?.name_ref()?;
|
||||
let module = self.scope.module()?;
|
||||
let module = self.scope.module();
|
||||
adt.ty(self.scope.db).iterate_path_candidates(
|
||||
self.scope.db,
|
||||
&self.scope,
|
||||
|
|
|
@ -94,7 +94,7 @@ fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) {
|
|||
|
||||
fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
|
||||
let (db, position, selections) = single_file(input);
|
||||
let mut match_finder = MatchFinder::in_context(&db, position, selections);
|
||||
let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
|
||||
for rule in rules {
|
||||
let rule: SsrRule = rule.parse().unwrap();
|
||||
match_finder.add_rule(rule).unwrap();
|
||||
|
@ -124,7 +124,7 @@ fn print_match_debug_info(match_finder: &MatchFinder, file_id: FileId, snippet:
|
|||
|
||||
fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
|
||||
let (db, position, selections) = single_file(code);
|
||||
let mut match_finder = MatchFinder::in_context(&db, position, selections);
|
||||
let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
|
||||
match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
|
||||
let matched_strings: Vec<String> =
|
||||
match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect();
|
||||
|
@ -136,7 +136,7 @@ fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
|
|||
|
||||
fn assert_no_match(pattern: &str, code: &str) {
|
||||
let (db, position, selections) = single_file(code);
|
||||
let mut match_finder = MatchFinder::in_context(&db, position, selections);
|
||||
let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
|
||||
match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
|
||||
let matches = match_finder.matches().flattened().matches;
|
||||
if !matches.is_empty() {
|
||||
|
@ -147,7 +147,7 @@ fn assert_no_match(pattern: &str, code: &str) {
|
|||
|
||||
fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) {
|
||||
let (db, position, selections) = single_file(code);
|
||||
let mut match_finder = MatchFinder::in_context(&db, position, selections);
|
||||
let mut match_finder = MatchFinder::in_context(&db, position, selections).unwrap();
|
||||
match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
|
||||
let mut reasons = Vec::new();
|
||||
for d in match_finder.debug_where_text_equal(position.file_id, snippet) {
|
||||
|
|
Loading…
Reference in a new issue