Merge branch 'master' into sync-from-rust

This commit is contained in:
Laurențiu Nicola 2023-11-15 09:46:27 +02:00
commit 6b53c09ef5
73 changed files with 1252 additions and 401 deletions

View file

@ -54,7 +54,7 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo
let mut p = Printer { db, body, buf: header, indent_level: 0, needs_indent: false }; let mut p = Printer { db, body, buf: header, indent_level: 0, needs_indent: false };
if let DefWithBodyId::FunctionId(it) = owner { if let DefWithBodyId::FunctionId(it) = owner {
p.buf.push('('); p.buf.push('(');
body.params.iter().zip(&db.function_data(it).params).for_each(|(&param, ty)| { body.params.iter().zip(db.function_data(it).params.iter()).for_each(|(&param, ty)| {
p.print_pat(param); p.print_pat(param);
p.buf.push(':'); p.buf.push(':');
p.print_type_ref(ty); p.print_type_ref(ty);

View file

@ -34,7 +34,7 @@ use crate::{
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct FunctionData { pub struct FunctionData {
pub name: Name, pub name: Name,
pub params: Vec<Interned<TypeRef>>, pub params: Box<[Interned<TypeRef>]>,
pub ret_type: Interned<TypeRef>, pub ret_type: Interned<TypeRef>,
pub attrs: Attrs, pub attrs: Attrs,
pub visibility: RawVisibility, pub visibility: RawVisibility,
@ -177,7 +177,7 @@ pub struct TypeAliasData {
pub rustc_has_incoherent_inherent_impls: bool, pub rustc_has_incoherent_inherent_impls: bool,
pub rustc_allow_incoherent_impl: bool, pub rustc_allow_incoherent_impl: bool,
/// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl). /// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl).
pub bounds: Vec<Interned<TypeBound>>, pub bounds: Box<[Interned<TypeBound>]>,
} }
impl TypeAliasData { impl TypeAliasData {
@ -210,7 +210,7 @@ impl TypeAliasData {
is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)), is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
rustc_has_incoherent_inherent_impls, rustc_has_incoherent_inherent_impls,
rustc_allow_incoherent_impl, rustc_allow_incoherent_impl,
bounds: typ.bounds.to_vec(), bounds: typ.bounds.clone(),
}) })
} }
} }
@ -327,6 +327,7 @@ pub struct ImplData {
pub self_ty: Interned<TypeRef>, pub self_ty: Interned<TypeRef>,
pub items: Vec<AssocItemId>, pub items: Vec<AssocItemId>,
pub is_negative: bool, pub is_negative: bool,
pub is_unsafe: bool,
// box it as the vec is usually empty anyways // box it as the vec is usually empty anyways
pub attribute_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>, pub attribute_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
} }
@ -348,6 +349,7 @@ impl ImplData {
let target_trait = impl_def.target_trait.clone(); let target_trait = impl_def.target_trait.clone();
let self_ty = impl_def.self_ty.clone(); let self_ty = impl_def.self_ty.clone();
let is_negative = impl_def.is_negative; let is_negative = impl_def.is_negative;
let is_unsafe = impl_def.is_unsafe;
let mut collector = let mut collector =
AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::ImplId(id)); AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::ImplId(id));
@ -357,7 +359,14 @@ impl ImplData {
let items = items.into_iter().map(|(_, item)| item).collect(); let items = items.into_iter().map(|(_, item)| item).collect();
( (
Arc::new(ImplData { target_trait, self_ty, items, is_negative, attribute_calls }), Arc::new(ImplData {
target_trait,
self_ty,
items,
is_negative,
is_unsafe,
attribute_calls,
}),
diagnostics.into(), diagnostics.into(),
) )
} }

View file

@ -21,9 +21,10 @@ pub fn find_path(
item: ItemInNs, item: ItemInNs,
from: ModuleId, from: ModuleId,
prefer_no_std: bool, prefer_no_std: bool,
prefer_prelude: bool,
) -> Option<ModPath> { ) -> Option<ModPath> {
let _p = profile::span("find_path"); let _p = profile::span("find_path");
find_path_inner(db, item, from, None, prefer_no_std) find_path_inner(db, item, from, None, prefer_no_std, prefer_prelude)
} }
pub fn find_path_prefixed( pub fn find_path_prefixed(
@ -32,9 +33,10 @@ pub fn find_path_prefixed(
from: ModuleId, from: ModuleId,
prefix_kind: PrefixKind, prefix_kind: PrefixKind,
prefer_no_std: bool, prefer_no_std: bool,
prefer_prelude: bool,
) -> Option<ModPath> { ) -> Option<ModPath> {
let _p = profile::span("find_path_prefixed"); let _p = profile::span("find_path_prefixed");
find_path_inner(db, item, from, Some(prefix_kind), prefer_no_std) find_path_inner(db, item, from, Some(prefix_kind), prefer_no_std, prefer_prelude)
} }
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
@ -88,6 +90,7 @@ fn find_path_inner(
from: ModuleId, from: ModuleId,
prefixed: Option<PrefixKind>, prefixed: Option<PrefixKind>,
prefer_no_std: bool, prefer_no_std: bool,
prefer_prelude: bool,
) -> Option<ModPath> { ) -> Option<ModPath> {
// - if the item is a builtin, it's in scope // - if the item is a builtin, it's in scope
if let ItemInNs::Types(ModuleDefId::BuiltinType(builtin)) = item { if let ItemInNs::Types(ModuleDefId::BuiltinType(builtin)) = item {
@ -109,6 +112,7 @@ fn find_path_inner(
MAX_PATH_LEN, MAX_PATH_LEN,
prefixed, prefixed,
prefer_no_std || db.crate_supports_no_std(crate_root.krate), prefer_no_std || db.crate_supports_no_std(crate_root.krate),
prefer_prelude,
) )
.map(|(item, _)| item); .map(|(item, _)| item);
} }
@ -134,6 +138,7 @@ fn find_path_inner(
from, from,
prefixed, prefixed,
prefer_no_std, prefer_no_std,
prefer_prelude,
) { ) {
let data = db.enum_data(variant.parent); let data = db.enum_data(variant.parent);
path.push_segment(data.variants[variant.local_id].name.clone()); path.push_segment(data.variants[variant.local_id].name.clone());
@ -156,6 +161,7 @@ fn find_path_inner(
from, from,
prefixed, prefixed,
prefer_no_std || db.crate_supports_no_std(crate_root.krate), prefer_no_std || db.crate_supports_no_std(crate_root.krate),
prefer_prelude,
scope_name, scope_name,
) )
.map(|(item, _)| item) .map(|(item, _)| item)
@ -171,6 +177,7 @@ fn find_path_for_module(
max_len: usize, max_len: usize,
prefixed: Option<PrefixKind>, prefixed: Option<PrefixKind>,
prefer_no_std: bool, prefer_no_std: bool,
prefer_prelude: bool,
) -> Option<(ModPath, Stability)> { ) -> Option<(ModPath, Stability)> {
if max_len == 0 { if max_len == 0 {
return None; return None;
@ -236,6 +243,7 @@ fn find_path_for_module(
from, from,
prefixed, prefixed,
prefer_no_std, prefer_no_std,
prefer_prelude,
scope_name, scope_name,
) )
} }
@ -316,6 +324,7 @@ fn calculate_best_path(
from: ModuleId, from: ModuleId,
mut prefixed: Option<PrefixKind>, mut prefixed: Option<PrefixKind>,
prefer_no_std: bool, prefer_no_std: bool,
prefer_prelude: bool,
scope_name: Option<Name>, scope_name: Option<Name>,
) -> Option<(ModPath, Stability)> { ) -> Option<(ModPath, Stability)> {
if max_len <= 1 { if max_len <= 1 {
@ -351,11 +360,14 @@ fn calculate_best_path(
best_path_len - 1, best_path_len - 1,
prefixed, prefixed,
prefer_no_std, prefer_no_std,
prefer_prelude,
) { ) {
path.0.push_segment(name); path.0.push_segment(name);
let new_path = match best_path.take() { let new_path = match best_path.take() {
Some(best_path) => select_best_path(best_path, path, prefer_no_std), Some(best_path) => {
select_best_path(best_path, path, prefer_no_std, prefer_prelude)
}
None => path, None => path,
}; };
best_path_len = new_path.0.len(); best_path_len = new_path.0.len();
@ -367,18 +379,18 @@ fn calculate_best_path(
// too (unless we can't name it at all). It could *also* be (re)exported by the same crate // too (unless we can't name it at all). It could *also* be (re)exported by the same crate
// that wants to import it here, but we always prefer to use the external path here. // that wants to import it here, but we always prefer to use the external path here.
let crate_graph = db.crate_graph(); for dep in &db.crate_graph()[from.krate].dependencies {
let extern_paths = crate_graph[from.krate].dependencies.iter().filter_map(|dep| {
let import_map = db.import_map(dep.crate_id); let import_map = db.import_map(dep.crate_id);
import_map.import_info_for(item).and_then(|info| { let Some(import_info_for) = import_map.import_info_for(item) else { continue };
for info in import_info_for {
if info.is_doc_hidden { if info.is_doc_hidden {
// the item or import is `#[doc(hidden)]`, so skip it as it is in an external crate // the item or import is `#[doc(hidden)]`, so skip it as it is in an external crate
return None; continue;
} }
// Determine best path for containing module and append last segment from `info`. // Determine best path for containing module and append last segment from `info`.
// FIXME: we should guide this to look up the path locally, or from the same crate again? // FIXME: we should guide this to look up the path locally, or from the same crate again?
let (mut path, path_stability) = find_path_for_module( let Some((mut path, path_stability)) = find_path_for_module(
db, db,
def_map, def_map,
visited_modules, visited_modules,
@ -388,22 +400,26 @@ fn calculate_best_path(
max_len - 1, max_len - 1,
prefixed, prefixed,
prefer_no_std, prefer_no_std,
)?; prefer_prelude,
) else {
continue;
};
cov_mark::hit!(partially_imported); cov_mark::hit!(partially_imported);
path.push_segment(info.name.clone()); path.push_segment(info.name.clone());
Some((
let path_with_stab = (
path, path,
zip_stability(path_stability, if info.is_unstable { Unstable } else { Stable }), zip_stability(path_stability, if info.is_unstable { Unstable } else { Stable }),
)) );
})
});
for path in extern_paths { let new_path_with_stab = match best_path.take() {
let new_path = match best_path.take() { Some(best_path) => {
Some(best_path) => select_best_path(best_path, path, prefer_no_std), select_best_path(best_path, path_with_stab, prefer_no_std, prefer_prelude)
None => path, }
}; None => path_with_stab,
update_best_path(&mut best_path, new_path); };
update_best_path(&mut best_path, new_path_with_stab);
}
} }
} }
if let Some(module) = item.module(db) { if let Some(module) = item.module(db) {
@ -420,17 +436,39 @@ fn calculate_best_path(
} }
} }
/// Select the best (most relevant) path between two paths.
/// This accounts for stability, path length whether std should be chosen over alloc/core paths as
/// well as ignoring prelude like paths or not.
fn select_best_path( fn select_best_path(
old_path: (ModPath, Stability), old_path @ (_, old_stability): (ModPath, Stability),
new_path: (ModPath, Stability), new_path @ (_, new_stability): (ModPath, Stability),
prefer_no_std: bool, prefer_no_std: bool,
prefer_prelude: bool,
) -> (ModPath, Stability) { ) -> (ModPath, Stability) {
match (old_path.1, new_path.1) { match (old_stability, new_stability) {
(Stable, Unstable) => return old_path, (Stable, Unstable) => return old_path,
(Unstable, Stable) => return new_path, (Unstable, Stable) => return new_path,
_ => {} _ => {}
} }
const STD_CRATES: [Name; 3] = [known::std, known::core, known::alloc]; const STD_CRATES: [Name; 3] = [known::std, known::core, known::alloc];
let choose = |new_path: (ModPath, _), old_path: (ModPath, _)| {
let new_has_prelude = new_path.0.segments().iter().any(|seg| seg == &known::prelude);
let old_has_prelude = old_path.0.segments().iter().any(|seg| seg == &known::prelude);
match (new_has_prelude, old_has_prelude, prefer_prelude) {
(true, false, true) | (false, true, false) => new_path,
(true, false, false) | (false, true, true) => old_path,
// no prelude difference in the paths, so pick the smaller one
(true, true, _) | (false, false, _) => {
if new_path.0.len() < old_path.0.len() {
new_path
} else {
old_path
}
}
}
};
match (old_path.0.segments().first(), new_path.0.segments().first()) { match (old_path.0.segments().first(), new_path.0.segments().first()) {
(Some(old), Some(new)) if STD_CRATES.contains(old) && STD_CRATES.contains(new) => { (Some(old), Some(new)) if STD_CRATES.contains(old) && STD_CRATES.contains(new) => {
let rank = match prefer_no_std { let rank = match prefer_no_std {
@ -451,23 +489,11 @@ fn select_best_path(
let orank = rank(old); let orank = rank(old);
match nrank.cmp(&orank) { match nrank.cmp(&orank) {
Ordering::Less => old_path, Ordering::Less => old_path,
Ordering::Equal => { Ordering::Equal => choose(new_path, old_path),
if new_path.0.len() < old_path.0.len() {
new_path
} else {
old_path
}
}
Ordering::Greater => new_path, Ordering::Greater => new_path,
} }
} }
_ => { _ => choose(new_path, old_path),
if new_path.0.len() < old_path.0.len() {
new_path
} else {
old_path
}
}
} }
} }
@ -570,7 +596,13 @@ mod tests {
/// `code` needs to contain a cursor marker; checks that `find_path` for the /// `code` needs to contain a cursor marker; checks that `find_path` for the
/// item the `path` refers to returns that same path when called from the /// item the `path` refers to returns that same path when called from the
/// module the cursor is in. /// module the cursor is in.
fn check_found_path_(ra_fixture: &str, path: &str, prefix_kind: Option<PrefixKind>) { #[track_caller]
fn check_found_path_(
ra_fixture: &str,
path: &str,
prefix_kind: Option<PrefixKind>,
prefer_prelude: bool,
) {
let (db, pos) = TestDB::with_position(ra_fixture); let (db, pos) = TestDB::with_position(ra_fixture);
let module = db.module_at_position(pos); let module = db.module_at_position(pos);
let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};")); let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};"));
@ -589,11 +621,17 @@ mod tests {
) )
.0 .0
.take_types() .take_types()
.unwrap(); .expect("path does not resolve to a type");
let found_path = let found_path = find_path_inner(
find_path_inner(&db, ItemInNs::Types(resolved), module, prefix_kind, false); &db,
assert_eq!(found_path, Some(mod_path), "{prefix_kind:?}"); ItemInNs::Types(resolved),
module,
prefix_kind,
false,
prefer_prelude,
);
assert_eq!(found_path, Some(mod_path), "on kind: {prefix_kind:?}");
} }
fn check_found_path( fn check_found_path(
@ -603,10 +641,23 @@ mod tests {
absolute: &str, absolute: &str,
self_prefixed: &str, self_prefixed: &str,
) { ) {
check_found_path_(ra_fixture, unprefixed, None); check_found_path_(ra_fixture, unprefixed, None, false);
check_found_path_(ra_fixture, prefixed, Some(PrefixKind::Plain)); check_found_path_(ra_fixture, prefixed, Some(PrefixKind::Plain), false);
check_found_path_(ra_fixture, absolute, Some(PrefixKind::ByCrate)); check_found_path_(ra_fixture, absolute, Some(PrefixKind::ByCrate), false);
check_found_path_(ra_fixture, self_prefixed, Some(PrefixKind::BySelf)); check_found_path_(ra_fixture, self_prefixed, Some(PrefixKind::BySelf), false);
}
fn check_found_path_prelude(
ra_fixture: &str,
unprefixed: &str,
prefixed: &str,
absolute: &str,
self_prefixed: &str,
) {
check_found_path_(ra_fixture, unprefixed, None, true);
check_found_path_(ra_fixture, prefixed, Some(PrefixKind::Plain), true);
check_found_path_(ra_fixture, absolute, Some(PrefixKind::ByCrate), true);
check_found_path_(ra_fixture, self_prefixed, Some(PrefixKind::BySelf), true);
} }
#[test] #[test]
@ -1421,4 +1472,34 @@ pub mod error {
"std::error::Error", "std::error::Error",
); );
} }
#[test]
fn respects_prelude_setting() {
let ra_fixture = r#"
//- /main.rs crate:main deps:krate
$0
//- /krate.rs crate:krate
pub mod prelude {
pub use crate::foo::*;
}
pub mod foo {
pub struct Foo;
}
"#;
check_found_path(
ra_fixture,
"krate::foo::Foo",
"krate::foo::Foo",
"krate::foo::Foo",
"krate::foo::Foo",
);
check_found_path_prelude(
ra_fixture,
"krate::prelude::Foo",
"krate::prelude::Foo",
"krate::prelude::Foo",
"krate::prelude::Foo",
);
}
} }

View file

@ -227,7 +227,7 @@ impl GenericParams {
let mut expander = Lazy::new(|| { let mut expander = Lazy::new(|| {
(module.def_map(db), Expander::new(db, loc.source(db).file_id, module)) (module.def_map(db), Expander::new(db, loc.source(db).file_id, module))
}); });
for param in &func_data.params { for param in func_data.params.iter() {
generic_params.fill_implicit_impl_trait_args(db, &mut expander, param); generic_params.fill_implicit_impl_trait_args(db, &mut expander, param);
} }

View file

@ -1,13 +1,14 @@
//! A map of all publicly exported items in a crate. //! A map of all publicly exported items in a crate.
use std::{collections::hash_map::Entry, fmt, hash::BuildHasherDefault}; use std::{fmt, hash::BuildHasherDefault};
use base_db::CrateId; use base_db::CrateId;
use fst::{self, Streamer}; use fst::{self, raw::IndexedValue, Streamer};
use hir_expand::name::Name; use hir_expand::name::Name;
use indexmap::IndexMap; use indexmap::IndexMap;
use itertools::Itertools; use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet, FxHasher}; use rustc_hash::{FxHashSet, FxHasher};
use smallvec::SmallVec;
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
@ -20,31 +21,28 @@ use crate::{
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>; type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>;
// FIXME: Support aliases: an item may be exported under multiple names, so `ImportInfo` should
// have `Vec<(Name, ModuleId)>` instead of `(Name, ModuleId)`.
/// Item import details stored in the `ImportMap`. /// Item import details stored in the `ImportMap`.
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct ImportInfo { pub struct ImportInfo {
/// A name that can be used to import the item, relative to the crate's root. /// A name that can be used to import the item, relative to the crate's root.
pub name: Name, pub name: Name,
/// The module containing this item. /// The module containing this item.
pub container: ModuleId, pub container: ModuleId,
/// Whether the import is a trait associated item or not.
pub is_trait_assoc_item: bool,
/// Whether this item is annotated with `#[doc(hidden)]`. /// Whether this item is annotated with `#[doc(hidden)]`.
pub is_doc_hidden: bool, pub is_doc_hidden: bool,
/// Whether this item is annotated with `#[unstable(..)]`. /// Whether this item is annotated with `#[unstable(..)]`.
pub is_unstable: bool, pub is_unstable: bool,
} }
type ImportMapIndex = FxIndexMap<ItemInNs, (SmallVec<[ImportInfo; 1]>, IsTraitAssocItem)>;
/// A map from publicly exported items to its name. /// A map from publicly exported items to its name.
/// ///
/// Reexports of items are taken into account, ie. if something is exported under multiple /// Reexports of items are taken into account, ie. if something is exported under multiple
/// names, the one with the shortest import path will be used. /// names, the one with the shortest import path will be used.
#[derive(Default)] #[derive(Default)]
pub struct ImportMap { pub struct ImportMap {
map: FxIndexMap<ItemInNs, ImportInfo>, map: ImportMapIndex,
/// List of keys stored in `map`, sorted lexicographically by their `ModPath`. Indexed by the /// List of keys stored in `map`, sorted lexicographically by their `ModPath`. Indexed by the
/// values returned by running `fst`. /// values returned by running `fst`.
/// ///
@ -55,6 +53,12 @@ pub struct ImportMap {
fst: fst::Map<Vec<u8>>, fst: fst::Map<Vec<u8>>,
} }
#[derive(Copy, Clone, PartialEq, Eq)]
enum IsTraitAssocItem {
Yes,
No,
}
impl ImportMap { impl ImportMap {
pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> { pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
let _p = profile::span("import_map_query"); let _p = profile::span("import_map_query");
@ -64,13 +68,18 @@ impl ImportMap {
let mut importables: Vec<_> = map let mut importables: Vec<_> = map
.iter() .iter()
// We've only collected items, whose name cannot be tuple field. // We've only collected items, whose name cannot be tuple field.
.map(|(&item, info)| (item, info.name.as_str().unwrap().to_ascii_lowercase())) .flat_map(|(&item, (info, _))| {
info.iter()
.map(move |info| (item, info.name.as_str().unwrap().to_ascii_lowercase()))
})
.collect(); .collect();
importables.sort_by(|(_, lhs_name), (_, rhs_name)| lhs_name.cmp(rhs_name)); importables.sort_by(|(_, lhs_name), (_, rhs_name)| lhs_name.cmp(rhs_name));
importables.dedup();
// Build the FST, taking care not to insert duplicate values. // Build the FST, taking care not to insert duplicate values.
let mut builder = fst::MapBuilder::memory(); let mut builder = fst::MapBuilder::memory();
let iter = importables.iter().enumerate().dedup_by(|lhs, rhs| lhs.1 .1 == rhs.1 .1); let iter =
importables.iter().enumerate().dedup_by(|(_, (_, lhs)), (_, (_, rhs))| lhs == rhs);
for (start_idx, (_, name)) in iter { for (start_idx, (_, name)) in iter {
let _ = builder.insert(name, start_idx as u64); let _ = builder.insert(name, start_idx as u64);
} }
@ -82,12 +91,12 @@ impl ImportMap {
}) })
} }
pub fn import_info_for(&self, item: ItemInNs) -> Option<&ImportInfo> { pub fn import_info_for(&self, item: ItemInNs) -> Option<&[ImportInfo]> {
self.map.get(&item) self.map.get(&item).map(|(info, _)| &**info)
} }
} }
fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemInNs, ImportInfo> { fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex {
let _p = profile::span("collect_import_map"); let _p = profile::span("collect_import_map");
let def_map = db.crate_def_map(krate); let def_map = db.crate_def_map(krate);
@ -95,11 +104,13 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemIn
// We look only into modules that are public(ly reexported), starting with the crate root. // We look only into modules that are public(ly reexported), starting with the crate root.
let root = def_map.module_id(DefMap::ROOT); let root = def_map.module_id(DefMap::ROOT);
let mut worklist = vec![(root, 0u32)]; let mut worklist = vec![root];
// Records items' minimum module depth. let mut visited = FxHashSet::default();
let mut depth_map = FxHashMap::default();
while let Some((module, depth)) = worklist.pop() { while let Some(module) = worklist.pop() {
if !visited.insert(module) {
continue;
}
let ext_def_map; let ext_def_map;
let mod_data = if module.krate == krate { let mod_data = if module.krate == krate {
&def_map[module.local_id] &def_map[module.local_id]
@ -131,62 +142,18 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemIn
ItemInNs::Macros(id) => Some(id.into()), ItemInNs::Macros(id) => Some(id.into()),
} }
}; };
let status @ (is_doc_hidden, is_unstable) = let (is_doc_hidden, is_unstable) = attr_id.map_or((false, false), |attr_id| {
attr_id.map_or((false, false), |attr_id| { let attrs = db.attrs(attr_id);
let attrs = db.attrs(attr_id); (attrs.has_doc_hidden(), attrs.is_unstable())
(attrs.has_doc_hidden(), attrs.is_unstable()) });
});
let import_info = ImportInfo { let import_info = ImportInfo {
name: name.clone(), name: name.clone(),
container: module, container: module,
is_trait_assoc_item: false,
is_doc_hidden, is_doc_hidden,
is_unstable, is_unstable,
}; };
match depth_map.entry(item) {
Entry::Vacant(entry) => _ = entry.insert((depth, status)),
Entry::Occupied(mut entry) => {
let &(occ_depth, (occ_is_doc_hidden, occ_is_unstable)) = entry.get();
(depth, occ_depth);
let overwrite = match (
is_doc_hidden,
occ_is_doc_hidden,
is_unstable,
occ_is_unstable,
) {
// no change of hiddeness or unstableness
(true, true, true, true)
| (true, true, false, false)
| (false, false, true, true)
| (false, false, false, false) => depth < occ_depth,
// either less hidden or less unstable, accept
(true, true, false, true)
| (false, true, true, true)
| (false, true, false, true)
| (false, true, false, false)
| (false, false, false, true) => true,
// more hidden or unstable, discard
(true, true, true, false)
| (true, false, true, true)
| (true, false, true, false)
| (true, false, false, false)
| (false, false, true, false) => false,
// exchanges doc(hidden) for unstable (and vice-versa),
(true, false, false, true) | (false, true, true, false) => {
depth < occ_depth
}
};
if !overwrite {
continue;
}
entry.insert((depth, status));
}
}
if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() { if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() {
collect_trait_assoc_items( collect_trait_assoc_items(
db, db,
@ -197,13 +164,14 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemIn
); );
} }
map.insert(item, import_info); let (infos, _) =
map.entry(item).or_insert_with(|| (SmallVec::new(), IsTraitAssocItem::No));
infos.reserve_exact(1);
infos.push(import_info);
// If we've just added a module, descend into it. We might traverse modules // If we've just added a module, descend into it.
// multiple times, but only if the module depth is smaller (else we `continue`
// above).
if let Some(ModuleDefId::ModuleId(mod_id)) = item.as_module_def_id() { if let Some(ModuleDefId::ModuleId(mod_id)) = item.as_module_def_id() {
worklist.push((mod_id, depth + 1)); worklist.push(mod_id);
} }
} }
} }
@ -214,7 +182,7 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemIn
fn collect_trait_assoc_items( fn collect_trait_assoc_items(
db: &dyn DefDatabase, db: &dyn DefDatabase,
map: &mut FxIndexMap<ItemInNs, ImportInfo>, map: &mut ImportMapIndex,
tr: TraitId, tr: TraitId,
is_type_in_ns: bool, is_type_in_ns: bool,
trait_import_info: &ImportInfo, trait_import_info: &ImportInfo,
@ -241,11 +209,14 @@ fn collect_trait_assoc_items(
let assoc_item_info = ImportInfo { let assoc_item_info = ImportInfo {
container: trait_import_info.container, container: trait_import_info.container,
name: assoc_item_name.clone(), name: assoc_item_name.clone(),
is_trait_assoc_item: true,
is_doc_hidden: attrs.has_doc_hidden(), is_doc_hidden: attrs.has_doc_hidden(),
is_unstable: attrs.is_unstable(), is_unstable: attrs.is_unstable(),
}; };
map.insert(assoc_item, assoc_item_info);
let (infos, _) =
map.entry(assoc_item).or_insert_with(|| (SmallVec::new(), IsTraitAssocItem::Yes));
infos.reserve_exact(1);
infos.push(assoc_item_info);
} }
} }
@ -263,10 +234,13 @@ impl fmt::Debug for ImportMap {
let mut importable_names: Vec<_> = self let mut importable_names: Vec<_> = self
.map .map
.iter() .iter()
.map(|(item, _)| match item { .map(|(item, (infos, _))| {
ItemInNs::Types(it) => format!("- {it:?} (t)",), let l = infos.len();
ItemInNs::Values(it) => format!("- {it:?} (v)",), match item {
ItemInNs::Macros(it) => format!("- {it:?} (m)",), ItemInNs::Types(it) => format!("- {it:?} (t) [{l}]",),
ItemInNs::Values(it) => format!("- {it:?} (v) [{l}]",),
ItemInNs::Macros(it) => format!("- {it:?} (m) [{l}]",),
}
}) })
.collect(); .collect();
@ -276,7 +250,7 @@ impl fmt::Debug for ImportMap {
} }
/// A way to match import map contents against the search query. /// A way to match import map contents against the search query.
#[derive(Debug)] #[derive(Copy, Clone, Debug)]
enum SearchMode { enum SearchMode {
/// Import map entry should strictly match the query string. /// Import map entry should strictly match the query string.
Exact, Exact,
@ -349,6 +323,15 @@ impl Query {
Self { case_sensitive: true, ..self } Self { case_sensitive: true, ..self }
} }
fn matches_assoc_mode(&self, is_trait_assoc_item: IsTraitAssocItem) -> bool {
match (is_trait_assoc_item, self.assoc_mode) {
(IsTraitAssocItem::Yes, AssocSearchMode::Exclude)
| (IsTraitAssocItem::No, AssocSearchMode::AssocItemsOnly) => false,
_ => true,
}
}
/// Checks whether the import map entry matches the query.
fn import_matches( fn import_matches(
&self, &self,
db: &dyn DefDatabase, db: &dyn DefDatabase,
@ -356,12 +339,8 @@ impl Query {
enforce_lowercase: bool, enforce_lowercase: bool,
) -> bool { ) -> bool {
let _p = profile::span("import_map::Query::import_matches"); let _p = profile::span("import_map::Query::import_matches");
match (import.is_trait_assoc_item, self.assoc_mode) {
(true, AssocSearchMode::Exclude) => return false,
(false, AssocSearchMode::AssocItemsOnly) => return false,
_ => {}
}
// FIXME: Can we get rid of the alloc here?
let mut input = import.name.display(db.upcast()).to_string(); let mut input = import.name.display(db.upcast()).to_string();
let case_insensitive = enforce_lowercase || !self.case_sensitive; let case_insensitive = enforce_lowercase || !self.case_sensitive;
if case_insensitive { if case_insensitive {
@ -392,7 +371,7 @@ impl Query {
pub fn search_dependencies( pub fn search_dependencies(
db: &dyn DefDatabase, db: &dyn DefDatabase,
krate: CrateId, krate: CrateId,
query: Query, ref query: Query,
) -> FxHashSet<ItemInNs> { ) -> FxHashSet<ItemInNs> {
let _p = profile::span("search_dependencies").detail(|| format!("{query:?}")); let _p = profile::span("search_dependencies").detail(|| format!("{query:?}"));
@ -410,31 +389,58 @@ pub fn search_dependencies(
let mut stream = op.union(); let mut stream = op.union();
let mut res = FxHashSet::default(); let mut res = FxHashSet::default();
let mut common_importable_data_scratch = vec![];
while let Some((_, indexed_values)) = stream.next() { while let Some((_, indexed_values)) = stream.next() {
for indexed_value in indexed_values { for &IndexedValue { index, value } in indexed_values {
let import_map = &import_maps[indexed_value.index]; let import_map = &import_maps[index];
let importables = &import_map.importables[indexed_value.value as usize..]; let importables @ [importable, ..] = &import_map.importables[value as usize..] else {
continue;
};
let common_importable_data = &import_map.map[&importables[0]]; let &(ref importable_data, is_trait_assoc_item) = &import_map.map[importable];
if !query.import_matches(db, common_importable_data, true) { if !query.matches_assoc_mode(is_trait_assoc_item) {
continue; continue;
} }
// Name shared by the importable items in this group. common_importable_data_scratch.extend(
let common_importable_name = importable_data
common_importable_data.name.to_smol_str().to_ascii_lowercase(); .iter()
// Add the items from this name group. Those are all subsequent items in .filter(|&info| query.import_matches(db, info, true))
// `importables` whose name match `common_importable_name`. // Name shared by the importable items in this group.
let iter = importables .map(|info| info.name.to_smol_str()),
.iter() );
.copied() if common_importable_data_scratch.is_empty() {
.take_while(|item| { continue;
common_importable_name }
== import_map.map[item].name.to_smol_str().to_ascii_lowercase() common_importable_data_scratch.sort();
}) common_importable_data_scratch.dedup();
.filter(|item| {
!query.case_sensitive // we've already checked the common importables name case-insensitively let iter =
|| query.import_matches(db, &import_map.map[item], false) common_importable_data_scratch.drain(..).flat_map(|common_importable_name| {
// Add the items from this name group. Those are all subsequent items in
// `importables` whose name match `common_importable_name`.
importables
.iter()
.copied()
.take_while(move |item| {
let &(ref import_infos, assoc_mode) = &import_map.map[item];
query.matches_assoc_mode(assoc_mode)
&& import_infos.iter().any(|info| {
info.name
.to_smol_str()
.eq_ignore_ascii_case(&common_importable_name)
})
})
.filter(move |item| {
!query.case_sensitive || {
// we've already checked the common importables name case-insensitively
let &(ref import_infos, assoc_mode) = &import_map.map[item];
query.matches_assoc_mode(assoc_mode)
&& import_infos
.iter()
.any(|info| query.import_matches(db, info, false))
}
})
}); });
res.extend(iter); res.extend(iter);
@ -461,6 +467,7 @@ mod tests {
let mut importable_paths: Vec<_> = self let mut importable_paths: Vec<_> = self
.map .map
.iter() .iter()
.flat_map(|(item, (info, _))| info.iter().map(move |info| (item, info)))
.map(|(item, info)| { .map(|(item, info)| {
let path = render_path(db, info); let path = render_path(db, info);
let ns = match item { let ns = match item {
@ -499,7 +506,7 @@ mod tests {
let (path, mark) = match assoc_item_path(&db, &dependency_imports, dependency) { let (path, mark) = match assoc_item_path(&db, &dependency_imports, dependency) {
Some(assoc_item_path) => (assoc_item_path, "a"), Some(assoc_item_path) => (assoc_item_path, "a"),
None => ( None => (
render_path(&db, dependency_imports.import_info_for(dependency)?), render_path(&db, &dependency_imports.import_info_for(dependency)?[0]),
match dependency { match dependency {
ItemInNs::Types(ModuleDefId::FunctionId(_)) ItemInNs::Types(ModuleDefId::FunctionId(_))
| ItemInNs::Values(ModuleDefId::FunctionId(_)) => "f", | ItemInNs::Values(ModuleDefId::FunctionId(_)) => "f",
@ -547,7 +554,12 @@ mod tests {
.items .items
.iter() .iter()
.find(|(_, assoc_item_id)| &dependency_assoc_item_id == assoc_item_id)?; .find(|(_, assoc_item_id)| &dependency_assoc_item_id == assoc_item_id)?;
Some(format!("{}::{}", render_path(db, trait_info), assoc_item_name.display(db.upcast()))) // FIXME: This should check all import infos, not just the first
Some(format!(
"{}::{}",
render_path(db, &trait_info[0]),
assoc_item_name.display(db.upcast())
))
} }
fn check(ra_fixture: &str, expect: Expect) { fn check(ra_fixture: &str, expect: Expect) {
@ -623,6 +635,7 @@ mod tests {
main: main:
- publ1 (t) - publ1 (t)
- real_pu2 (t) - real_pu2 (t)
- real_pu2::Pub (t)
- real_pub (t) - real_pub (t)
- real_pub::Pub (t) - real_pub::Pub (t)
"#]], "#]],
@ -648,6 +661,7 @@ mod tests {
- sub (t) - sub (t)
- sub::Def (t) - sub::Def (t)
- sub::subsub (t) - sub::subsub (t)
- sub::subsub::Def (t)
"#]], "#]],
); );
} }
@ -747,7 +761,9 @@ mod tests {
- module (t) - module (t)
- module::S (t) - module::S (t)
- module::S (v) - module::S (v)
- module::module (t)
- sub (t) - sub (t)
- sub::module (t)
"#]], "#]],
); );
} }

View file

@ -709,6 +709,7 @@ pub struct Impl {
pub target_trait: Option<Interned<TraitRef>>, pub target_trait: Option<Interned<TraitRef>>,
pub self_ty: Interned<TypeRef>, pub self_ty: Interned<TypeRef>,
pub is_negative: bool, pub is_negative: bool,
pub is_unsafe: bool,
pub items: Box<[AssocItem]>, pub items: Box<[AssocItem]>,
pub ast_id: FileAstId<ast::Impl>, pub ast_id: FileAstId<ast::Impl>,
} }

View file

@ -396,14 +396,7 @@ impl<'a> Ctx<'a> {
let bounds = self.lower_type_bounds(type_alias); let bounds = self.lower_type_bounds(type_alias);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias); let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias);
let ast_id = self.source_ast_id_map.ast_id(type_alias); let ast_id = self.source_ast_id_map.ast_id(type_alias);
let res = TypeAlias { let res = TypeAlias { name, visibility, bounds, generic_params, type_ref, ast_id };
name,
visibility,
bounds: bounds.into_boxed_slice(),
generic_params,
type_ref,
ast_id,
};
Some(id(self.data().type_aliases.alloc(res))) Some(id(self.data().type_aliases.alloc(res)))
} }
@ -499,6 +492,7 @@ impl<'a> Ctx<'a> {
let target_trait = impl_def.trait_().and_then(|tr| self.lower_trait_ref(&tr)); let target_trait = impl_def.trait_().and_then(|tr| self.lower_trait_ref(&tr));
let self_ty = self.lower_type_ref(&impl_def.self_ty()?); let self_ty = self.lower_type_ref(&impl_def.self_ty()?);
let is_negative = impl_def.excl_token().is_some(); let is_negative = impl_def.excl_token().is_some();
let is_unsafe = impl_def.unsafe_token().is_some();
// We cannot use `assoc_items()` here as that does not include macro calls. // We cannot use `assoc_items()` here as that does not include macro calls.
let items = impl_def let items = impl_def
@ -513,7 +507,8 @@ impl<'a> Ctx<'a> {
}) })
.collect(); .collect();
let ast_id = self.source_ast_id_map.ast_id(impl_def); let ast_id = self.source_ast_id_map.ast_id(impl_def);
let res = Impl { generic_params, target_trait, self_ty, is_negative, items, ast_id }; let res =
Impl { generic_params, target_trait, self_ty, is_negative, is_unsafe, items, ast_id };
Some(id(self.data().impls.alloc(res))) Some(id(self.data().impls.alloc(res)))
} }
@ -637,13 +632,13 @@ impl<'a> Ctx<'a> {
Interned::new(generics) Interned::new(generics)
} }
fn lower_type_bounds(&mut self, node: &dyn ast::HasTypeBounds) -> Vec<Interned<TypeBound>> { fn lower_type_bounds(&mut self, node: &dyn ast::HasTypeBounds) -> Box<[Interned<TypeBound>]> {
match node.type_bound_list() { match node.type_bound_list() {
Some(bound_list) => bound_list Some(bound_list) => bound_list
.bounds() .bounds()
.map(|it| Interned::new(TypeBound::from_ast(&self.body_ctx, it))) .map(|it| Interned::new(TypeBound::from_ast(&self.body_ctx, it)))
.collect(), .collect(),
None => Vec::new(), None => Box::default(),
} }
} }

View file

@ -388,8 +388,18 @@ impl Printer<'_> {
wln!(self); wln!(self);
} }
ModItem::Impl(it) => { ModItem::Impl(it) => {
let Impl { target_trait, self_ty, is_negative, items, generic_params, ast_id: _ } = let Impl {
&self.tree[it]; target_trait,
self_ty,
is_negative,
is_unsafe,
items,
generic_params,
ast_id: _,
} = &self.tree[it];
if *is_unsafe {
w!(self, "unsafe");
}
w!(self, "impl"); w!(self, "impl");
self.print_generic_params(generic_params); self.print_generic_params(generic_params);
w!(self, " "); w!(self, " ");

View file

@ -152,7 +152,7 @@ impl TryFrom<ModuleId> for CrateRootModuleId {
} }
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct ModuleId { pub struct ModuleId {
krate: CrateId, krate: CrateId,
/// If this `ModuleId` was derived from a `DefMap` for a block expression, this stores the /// If this `ModuleId` was derived from a `DefMap` for a block expression, this stores the

View file

@ -12,15 +12,11 @@ use syntax::{
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
ast_id_map::AstIdMap, ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_attr_macro::pseudo_derive_attr_expansion, builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
builtin_fn_macro::EagerExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
fixup, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
hygiene::HygieneFrame, MacroDefKind, MacroFile, ProcMacroExpander,
name::{name, AsName},
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind,
MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
}; };
/// Total limit on the number of tokens produced by any macro invocation. /// Total limit on the number of tokens produced by any macro invocation.
@ -619,20 +615,7 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
} }
// Skip checking token tree limit for include! macro call // Skip checking token tree limit for include! macro call
let skip_check_tt_count = match loc.kind { if !loc.def.is_include() {
MacroCallKind::FnLike { ast_id, expand_to: _ } => {
if let Some(name_ref) =
ast_id.to_node(db).path().and_then(|p| p.segment()).and_then(|s| s.name_ref())
{
name_ref.as_name() == name!(include)
} else {
false
}
}
_ => false,
};
if !skip_check_tt_count {
// Set a hard limit for the expanded tt // Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) { if let Err(value) = check_tt_count(&tt) {
return value; return value;

View file

@ -20,8 +20,8 @@ use crate::{
method_resolution::{InherentImpls, TraitImpls, TyFingerprint}, method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
mir::{BorrowckResult, MirBody, MirLowerError}, mir::{BorrowckResult, MirBody, MirLowerError},
Binders, CallableDefId, ClosureId, Const, FnDefId, GenericArg, ImplTraitId, InferenceResult, Binders, CallableDefId, ClosureId, Const, FnDefId, GenericArg, ImplTraitId, InferenceResult,
Interner, PolyFnSig, QuantifiedWhereClause, ReturnTypeImplTraits, Substitution, TraitRef, Ty, Interner, PolyFnSig, QuantifiedWhereClause, ReturnTypeImplTraits, Substitution,
TyDefId, ValueTyDefId, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId,
}; };
use hir_expand::name::Name; use hir_expand::name::Name;
@ -47,7 +47,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
&self, &self,
def: DefWithBodyId, def: DefWithBodyId,
subst: Substitution, subst: Substitution,
env: Arc<crate::TraitEnvironment>, env: Arc<TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError>; ) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)] #[salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)]
@ -55,7 +55,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
&self, &self,
def: ClosureId, def: ClosureId,
subst: Substitution, subst: Substitution,
env: Arc<crate::TraitEnvironment>, env: Arc<TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError>; ) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::borrowck_query)] #[salsa::invoke(crate::mir::borrowck_query)]
@ -81,7 +81,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
&self, &self,
def: GeneralConstId, def: GeneralConstId,
subst: Substitution, subst: Substitution,
trait_env: Option<Arc<crate::TraitEnvironment>>, trait_env: Option<Arc<TraitEnvironment>>,
) -> Result<Const, ConstEvalError>; ) -> Result<Const, ConstEvalError>;
#[salsa::invoke(crate::consteval::const_eval_static_query)] #[salsa::invoke(crate::consteval::const_eval_static_query)]
@ -104,16 +104,12 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
&self, &self,
def: AdtId, def: AdtId,
subst: Substitution, subst: Substitution,
env: Arc<crate::TraitEnvironment>, env: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError>; ) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::layout_of_ty_query)] #[salsa::invoke(crate::layout::layout_of_ty_query)]
#[salsa::cycle(crate::layout::layout_of_ty_recover)] #[salsa::cycle(crate::layout::layout_of_ty_recover)]
fn layout_of_ty( fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>;
&self,
ty: Ty,
env: Arc<crate::TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::target_data_layout_query)] #[salsa::invoke(crate::layout::target_data_layout_query)]
fn target_data_layout(&self, krate: CrateId) -> Option<Arc<TargetDataLayout>>; fn target_data_layout(&self, krate: CrateId) -> Option<Arc<TargetDataLayout>>;
@ -121,7 +117,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::method_resolution::lookup_impl_method_query)] #[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
fn lookup_impl_method( fn lookup_impl_method(
&self, &self,
env: Arc<crate::TraitEnvironment>, env: Arc<TraitEnvironment>,
func: FunctionId, func: FunctionId,
fn_subst: Substitution, fn_subst: Substitution,
) -> (FunctionId, Substitution); ) -> (FunctionId, Substitution);
@ -149,10 +145,10 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::lower::trait_environment_for_body_query)] #[salsa::invoke(crate::lower::trait_environment_for_body_query)]
#[salsa::transparent] #[salsa::transparent]
fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc<crate::TraitEnvironment>; fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc<TraitEnvironment>;
#[salsa::invoke(crate::lower::trait_environment_query)] #[salsa::invoke(crate::lower::trait_environment_query)]
fn trait_environment(&self, def: GenericDefId) -> Arc<crate::TraitEnvironment>; fn trait_environment(&self, def: GenericDefId) -> Arc<TraitEnvironment>;
#[salsa::invoke(crate::lower::generic_defaults_query)] #[salsa::invoke(crate::lower::generic_defaults_query)]
#[salsa::cycle(crate::lower::generic_defaults_recover)] #[salsa::cycle(crate::lower::generic_defaults_recover)]
@ -249,7 +245,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
fn normalize_projection( fn normalize_projection(
&self, &self,
projection: crate::ProjectionTy, projection: crate::ProjectionTy,
env: Arc<crate::TraitEnvironment>, env: Arc<TraitEnvironment>,
) -> Ty; ) -> Ty;
#[salsa::invoke(trait_solve_wait)] #[salsa::invoke(trait_solve_wait)]

View file

@ -11,9 +11,3 @@ pub use crate::diagnostics::{
}, },
unsafe_check::{missing_unsafe, unsafe_expressions, UnsafeExpr}, unsafe_check::{missing_unsafe, unsafe_expressions, UnsafeExpr},
}; };
#[derive(Debug, PartialEq, Eq)]
pub struct IncoherentImpl {
pub file_id: hir_expand::HirFileId,
pub impl_: syntax::AstPtr<syntax::ast::Impl>,
}

View file

@ -945,6 +945,7 @@ impl HirDisplay for Ty {
ItemInNs::Types((*def_id).into()), ItemInNs::Types((*def_id).into()),
module_id, module_id,
false, false,
true,
) { ) {
write!(f, "{}", path.display(f.db.upcast()))?; write!(f, "{}", path.display(f.db.upcast()))?;
} else { } else {

View file

@ -1,5 +1,7 @@
//! Compute the binary representation of a type //! Compute the binary representation of a type
use std::fmt;
use chalk_ir::{AdtId, FloatTy, IntTy, TyKind, UintTy}; use chalk_ir::{AdtId, FloatTy, IntTy, TyKind, UintTy};
use hir_def::{ use hir_def::{
layout::{ layout::{
@ -26,12 +28,6 @@ pub use self::{
target::target_data_layout_query, target::target_data_layout_query,
}; };
macro_rules! user_error {
($it: expr) => {
return Err(LayoutError::UserError(format!($it).into()))
};
}
mod adt; mod adt;
mod target; mod target;
@ -73,13 +69,38 @@ pub type Variants = hir_def::layout::Variants<RustcFieldIdx, RustcEnumVariantIdx
#[derive(Debug, PartialEq, Eq, Clone)] #[derive(Debug, PartialEq, Eq, Clone)]
pub enum LayoutError { pub enum LayoutError {
UserError(Box<str>), HasErrorConst,
HasErrorType,
HasPlaceholder,
InvalidSimdType,
NotImplemented,
RecursiveTypeWithoutIndirection,
SizeOverflow, SizeOverflow,
TargetLayoutNotAvailable, TargetLayoutNotAvailable,
HasPlaceholder,
HasErrorType,
NotImplemented,
Unknown, Unknown,
UserReprTooSmall,
}
impl std::error::Error for LayoutError {}
impl fmt::Display for LayoutError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
LayoutError::HasErrorConst => write!(f, "type contains an unevaluatable const"),
LayoutError::HasErrorType => write!(f, "type contains an error"),
LayoutError::HasPlaceholder => write!(f, "type contains placeholders"),
LayoutError::InvalidSimdType => write!(f, "invalid simd type definition"),
LayoutError::NotImplemented => write!(f, "not implemented"),
LayoutError::RecursiveTypeWithoutIndirection => {
write!(f, "recursive type without indirection")
}
LayoutError::SizeOverflow => write!(f, "size overflow"),
LayoutError::TargetLayoutNotAvailable => write!(f, "target layout not available"),
LayoutError::Unknown => write!(f, "unknown"),
LayoutError::UserReprTooSmall => {
write!(f, "the `#[repr]` hint is too small to hold the discriminants of the enum")
}
}
}
} }
struct LayoutCx<'a> { struct LayoutCx<'a> {
@ -118,9 +139,7 @@ fn layout_of_simd_ty(
let f0_ty = match fields.iter().next() { let f0_ty = match fields.iter().next() {
Some(it) => it.1.clone().substitute(Interner, subst), Some(it) => it.1.clone().substitute(Interner, subst),
None => { None => return Err(LayoutError::InvalidSimdType),
user_error!("simd type with zero fields");
}
}; };
// The element type and number of elements of the SIMD vector // The element type and number of elements of the SIMD vector
@ -134,7 +153,7 @@ fn layout_of_simd_ty(
// Extract the number of elements from the layout of the array field: // Extract the number of elements from the layout of the array field:
let FieldsShape::Array { count, .. } = db.layout_of_ty(f0_ty.clone(), env.clone())?.fields let FieldsShape::Array { count, .. } = db.layout_of_ty(f0_ty.clone(), env.clone())?.fields
else { else {
user_error!("Array with non array layout"); return Err(LayoutError::Unknown);
}; };
(e_ty.clone(), count, true) (e_ty.clone(), count, true)
@ -146,7 +165,7 @@ fn layout_of_simd_ty(
// Compute the ABI of the element type: // Compute the ABI of the element type:
let e_ly = db.layout_of_ty(e_ty, env.clone())?; let e_ly = db.layout_of_ty(e_ty, env.clone())?;
let Abi::Scalar(e_abi) = e_ly.abi else { let Abi::Scalar(e_abi) = e_ly.abi else {
user_error!("simd type with inner non scalar type"); return Err(LayoutError::Unknown);
}; };
// Compute the size and alignment of the vector: // Compute the size and alignment of the vector:
@ -259,9 +278,7 @@ pub fn layout_of_ty_query(
cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)? cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)?
} }
TyKind::Array(element, count) => { TyKind::Array(element, count) => {
let count = try_const_usize(db, &count).ok_or(LayoutError::UserError(Box::from( let count = try_const_usize(db, &count).ok_or(LayoutError::HasErrorConst)? as u64;
"unevaluated or mistyped const generic parameter",
)))? as u64;
let element = db.layout_of_ty(element.clone(), trait_env.clone())?; let element = db.layout_of_ty(element.clone(), trait_env.clone())?;
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?; let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;
@ -352,7 +369,7 @@ pub fn layout_of_ty_query(
let mut unit = layout_of_unit(&cx, dl)?; let mut unit = layout_of_unit(&cx, dl)?;
match unit.abi { match unit.abi {
Abi::Aggregate { ref mut sized } => *sized = false, Abi::Aggregate { ref mut sized } => *sized = false,
_ => user_error!("bug"), _ => return Err(LayoutError::Unknown),
} }
unit unit
} }
@ -418,7 +435,7 @@ pub fn layout_of_ty_recover(
_: &Ty, _: &Ty,
_: &Arc<TraitEnvironment>, _: &Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> { ) -> Result<Arc<Layout>, LayoutError> {
user_error!("infinite sized recursive type"); Err(LayoutError::RecursiveTypeWithoutIndirection)
} }
fn layout_of_unit(cx: &LayoutCx<'_>, dl: &TargetDataLayout) -> Result<Layout, LayoutError> { fn layout_of_unit(cx: &LayoutCx<'_>, dl: &TargetDataLayout) -> Result<Layout, LayoutError> {

View file

@ -145,7 +145,7 @@ pub fn layout_of_adt_recover(
_: &Substitution, _: &Substitution,
_: &Arc<TraitEnvironment>, _: &Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> { ) -> Result<Arc<Layout>, LayoutError> {
user_error!("infinite sized recursive type"); Err(LayoutError::RecursiveTypeWithoutIndirection)
} }
/// Finds the appropriate Integer type and signedness for the given /// Finds the appropriate Integer type and signedness for the given
@ -169,11 +169,7 @@ fn repr_discr(
let discr = Integer::from_attr(dl, ity); let discr = Integer::from_attr(dl, ity);
let fit = if ity.is_signed() { signed_fit } else { unsigned_fit }; let fit = if ity.is_signed() { signed_fit } else { unsigned_fit };
if discr < fit { if discr < fit {
return Err(LayoutError::UserError( return Err(LayoutError::UserReprTooSmall);
"Integer::repr_discr: `#[repr]` hint too small for \
discriminant range of enum "
.into(),
));
} }
return Ok((discr, ity.is_signed())); return Ok((discr, ity.is_signed()));
} }

View file

@ -210,16 +210,13 @@ fn recursive() {
struct BoxLike<T: ?Sized>(*mut T); struct BoxLike<T: ?Sized>(*mut T);
struct Goal(BoxLike<Goal>); struct Goal(BoxLike<Goal>);
} }
check_fail( check_fail(r#"struct Goal(Goal);"#, LayoutError::RecursiveTypeWithoutIndirection);
r#"struct Goal(Goal);"#,
LayoutError::UserError("infinite sized recursive type".into()),
);
check_fail( check_fail(
r#" r#"
struct Foo<T>(Foo<T>); struct Foo<T>(Foo<T>);
struct Goal(Foo<i32>); struct Goal(Foo<i32>);
"#, "#,
LayoutError::UserError("infinite sized recursive type".into()), LayoutError::RecursiveTypeWithoutIndirection,
); );
} }

View file

@ -81,6 +81,7 @@ pub use mapping::{
lt_from_placeholder_idx, to_assoc_type_id, to_chalk_trait_id, to_foreign_def_id, lt_from_placeholder_idx, to_assoc_type_id, to_chalk_trait_id, to_foreign_def_id,
to_placeholder_idx, to_placeholder_idx,
}; };
pub use method_resolution::check_orphan_rules;
pub use traits::TraitEnvironment; pub use traits::TraitEnvironment;
pub use utils::{all_super_traits, is_fn_unsafe_to_call}; pub use utils::{all_super_traits, is_fn_unsafe_to_call};

View file

@ -1383,51 +1383,50 @@ pub(crate) fn generic_predicates_for_param_query(
let ctx = TyLoweringContext::new(db, &resolver, def.into()) let ctx = TyLoweringContext::new(db, &resolver, def.into())
.with_type_param_mode(ParamLoweringMode::Variable); .with_type_param_mode(ParamLoweringMode::Variable);
let generics = generics(db.upcast(), def); let generics = generics(db.upcast(), def);
// we have to filter out all other predicates *first*, before attempting to lower them
let predicate = |pred: &&_| match pred {
WherePredicate::ForLifetime { target, bound, .. }
| WherePredicate::TypeBound { target, bound, .. } => {
let invalid_target = match target {
WherePredicateTypeTarget::TypeRef(type_ref) => {
ctx.lower_ty_only_param(type_ref) != Some(param_id)
}
&WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
let target_id = TypeOrConstParamId { parent: def, local_id };
target_id != param_id
}
};
if invalid_target {
return false;
}
match &**bound {
TypeBound::ForLifetime(_, path) | TypeBound::Path(path, _) => {
// Only lower the bound if the trait could possibly define the associated
// type we're looking for.
let Some(assoc_name) = &assoc_name else { return true };
let Some(TypeNs::TraitId(tr)) =
resolver.resolve_path_in_type_ns_fully(db.upcast(), path)
else {
return false;
};
all_super_traits(db.upcast(), tr).iter().any(|tr| {
db.trait_data(*tr).items.iter().any(|(name, item)| {
matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name
})
})
}
TypeBound::Lifetime(_) | TypeBound::Error => false,
}
}
WherePredicate::Lifetime { .. } => false,
};
let mut predicates: Vec<_> = resolver let mut predicates: Vec<_> = resolver
.where_predicates_in_scope() .where_predicates_in_scope()
// we have to filter out all other predicates *first*, before attempting to lower them .filter(predicate)
.filter(|pred| match pred {
WherePredicate::ForLifetime { target, bound, .. }
| WherePredicate::TypeBound { target, bound, .. } => {
match target {
WherePredicateTypeTarget::TypeRef(type_ref) => {
if ctx.lower_ty_only_param(type_ref) != Some(param_id) {
return false;
}
}
&WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
let target_id = TypeOrConstParamId { parent: def, local_id };
if target_id != param_id {
return false;
}
}
};
match &**bound {
TypeBound::ForLifetime(_, path) | TypeBound::Path(path, _) => {
// Only lower the bound if the trait could possibly define the associated
// type we're looking for.
let assoc_name = match &assoc_name {
Some(it) => it,
None => return true,
};
let tr = match resolver.resolve_path_in_type_ns_fully(db.upcast(), path) {
Some(TypeNs::TraitId(tr)) => tr,
_ => return false,
};
all_super_traits(db.upcast(), tr).iter().any(|tr| {
db.trait_data(*tr).items.iter().any(|(name, item)| {
matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name
})
})
}
TypeBound::Lifetime(_) | TypeBound::Error => false,
}
}
WherePredicate::Lifetime { .. } => false,
})
.flat_map(|pred| { .flat_map(|pred| {
ctx.lower_where_predicate(pred, true).map(|p| make_binders(db, &generics, p)) ctx.lower_where_predicate(pred, true).map(|p| make_binders(db, &generics, p))
}) })
@ -1519,7 +1518,12 @@ pub(crate) fn trait_environment_query(
let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses); let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses);
Arc::new(TraitEnvironment { krate, block: None, traits_from_clauses: traits_in_scope, env }) Arc::new(TraitEnvironment {
krate,
block: None,
traits_from_clauses: traits_in_scope.into_boxed_slice(),
env,
})
} }
/// Resolve the where clause(s) of an item with generics. /// Resolve the where clause(s) of an item with generics.

View file

@ -862,6 +862,62 @@ fn is_inherent_impl_coherent(
} }
} }
/// Checks whether the impl satisfies the orphan rules.
///
/// Given `impl<P1..=Pn> Trait<T1..=Tn> for T0`, an `impl`` is valid only if at least one of the following is true:
/// - Trait is a local trait
/// - All of
/// - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type.
/// - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
pub fn check_orphan_rules(db: &dyn HirDatabase, impl_: ImplId) -> bool {
let substs = TyBuilder::placeholder_subst(db, impl_);
let Some(impl_trait) = db.impl_trait(impl_) else {
// not a trait impl
return true;
};
let local_crate = impl_.lookup(db.upcast()).container.krate();
let is_local = |tgt_crate| tgt_crate == local_crate;
let trait_ref = impl_trait.substitute(Interner, &substs);
let trait_id = from_chalk_trait_id(trait_ref.trait_id);
if is_local(trait_id.module(db.upcast()).krate()) {
// trait to be implemented is local
return true;
}
let unwrap_fundamental = |ty: Ty| match ty.kind(Interner) {
TyKind::Ref(_, _, referenced) => referenced.clone(),
&TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), ref subs) => {
let struct_data = db.struct_data(s);
if struct_data.flags.contains(StructFlags::IS_FUNDAMENTAL) {
let next = subs.type_parameters(Interner).next();
match next {
Some(ty) => ty,
None => ty,
}
} else {
ty
}
}
_ => ty,
};
// - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type.
let is_not_orphan = trait_ref.substitution.type_parameters(Interner).any(|ty| {
match unwrap_fundamental(ty).kind(Interner) {
&TyKind::Adt(AdtId(id), _) => is_local(id.module(db.upcast()).krate()),
TyKind::Error => true,
TyKind::Dyn(it) => it.principal().map_or(false, |trait_ref| {
is_local(from_chalk_trait_id(trait_ref.trait_id).module(db.upcast()).krate())
}),
_ => false,
}
});
// FIXME: param coverage
// - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
is_not_orphan
}
pub fn iterate_path_candidates( pub fn iterate_path_candidates(
ty: &Canonical<Ty>, ty: &Canonical<Ty>,
db: &dyn HirDatabase, db: &dyn HirDatabase,

View file

@ -4439,42 +4439,42 @@ fn test(v: S<i32>) {
fn associated_type_in_argument() { fn associated_type_in_argument() {
check( check(
r#" r#"
trait A { trait A {
fn m(&self) -> i32; fn m(&self) -> i32;
} }
fn x<T: B>(k: &<T as B>::Ty) { fn x<T: B>(k: &<T as B>::Ty) {
k.m(); k.m();
} }
struct X; struct X;
struct Y; struct Y;
impl A for X { impl A for X {
fn m(&self) -> i32 { fn m(&self) -> i32 {
8 8
}
} }
}
impl A for Y { impl A for Y {
fn m(&self) -> i32 { fn m(&self) -> i32 {
32 32
}
} }
}
trait B { trait B {
type Ty: A; type Ty: A;
} }
impl B for u16 { impl B for u16 {
type Ty = X; type Ty = X;
} }
fn ttt() { fn ttt() {
let inp = Y; let inp = Y;
x::<u16>(&inp); x::<u16>(&inp);
//^^^^ expected &X, got &Y //^^^^ expected &X, got &Y
} }
"#, "#,
); );
} }

View file

@ -48,7 +48,7 @@ pub struct TraitEnvironment {
pub krate: CrateId, pub krate: CrateId,
pub block: Option<BlockId>, pub block: Option<BlockId>,
// FIXME make this a BTreeMap // FIXME make this a BTreeMap
pub(crate) traits_from_clauses: Vec<(Ty, TraitId)>, pub(crate) traits_from_clauses: Box<[(Ty, TraitId)]>,
pub env: chalk_ir::Environment<Interner>, pub env: chalk_ir::Environment<Interner>,
} }
@ -57,7 +57,7 @@ impl TraitEnvironment {
TraitEnvironment { TraitEnvironment {
krate, krate,
block: None, block: None,
traits_from_clauses: Vec::new(), traits_from_clauses: Box::default(),
env: chalk_ir::Environment::new(Interner), env: chalk_ir::Environment::new(Interner),
} }
} }

View file

@ -3,7 +3,7 @@
//! //!
//! This probably isn't the best way to do this -- ideally, diagnostics should //! This probably isn't the best way to do this -- ideally, diagnostics should
//! be expressed in terms of hir types themselves. //! be expressed in terms of hir types themselves.
pub use hir_ty::diagnostics::{CaseType, IncoherentImpl, IncorrectCase}; pub use hir_ty::diagnostics::{CaseType, IncorrectCase};
use base_db::CrateId; use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions}; use cfg::{CfgExpr, CfgOptions};
@ -53,6 +53,9 @@ diagnostics![
PrivateAssocItem, PrivateAssocItem,
PrivateField, PrivateField,
ReplaceFilterMapNextWithFindMap, ReplaceFilterMapNextWithFindMap,
TraitImplIncorrectSafety,
TraitImplMissingAssocItems,
TraitImplOrphan,
TypedHole, TypedHole,
TypeMismatch, TypeMismatch,
UndeclaredLabel, UndeclaredLabel,
@ -280,3 +283,30 @@ pub struct MovedOutOfRef {
pub ty: Type, pub ty: Type,
pub span: InFile<SyntaxNodePtr>, pub span: InFile<SyntaxNodePtr>,
} }
#[derive(Debug, PartialEq, Eq)]
pub struct IncoherentImpl {
pub file_id: HirFileId,
pub impl_: AstPtr<ast::Impl>,
}
#[derive(Debug, PartialEq, Eq)]
pub struct TraitImplOrphan {
pub file_id: HirFileId,
pub impl_: AstPtr<ast::Impl>,
}
// FIXME: Split this off into the corresponding 4 rustc errors
#[derive(Debug, PartialEq, Eq)]
pub struct TraitImplIncorrectSafety {
pub file_id: HirFileId,
pub impl_: AstPtr<ast::Impl>,
pub should_be_safe: bool,
}
#[derive(Debug, PartialEq, Eq)]
pub struct TraitImplMissingAssocItems {
pub file_id: HirFileId,
pub impl_: AstPtr<ast::Impl>,
pub missing: Vec<(Name, AssocItem)>,
}

View file

@ -616,7 +616,7 @@ impl HirDisplay for TypeAlias {
write_where_clause(def_id, f)?; write_where_clause(def_id, f)?;
if !data.bounds.is_empty() { if !data.bounds.is_empty() {
f.write_str(": ")?; f.write_str(": ")?;
f.write_joined(&data.bounds, " + ")?; f.write_joined(data.bounds.iter(), " + ")?;
} }
if let Some(ty) = &data.type_ref { if let Some(ty) = &data.type_ref {
f.write_str(" = ")?; f.write_str(" = ")?;

View file

@ -34,7 +34,7 @@ pub mod symbols;
mod display; mod display;
use std::{iter, ops::ControlFlow}; use std::{iter, mem::discriminant, ops::ControlFlow};
use arrayvec::ArrayVec; use arrayvec::ArrayVec;
use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId, ProcMacroKind}; use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId, ProcMacroKind};
@ -54,14 +54,14 @@ use hir_def::{
resolver::{HasResolver, Resolver}, resolver::{HasResolver, Resolver},
src::HasSource as _, src::HasSource as _,
AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId,
EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, HasModule, ImplId, EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule,
InTypeConstId, ItemContainerId, LifetimeParamId, LocalEnumVariantId, LocalFieldId, Lookup, ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalEnumVariantId, LocalFieldId,
MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId,
TypeOrConstParamId, TypeParamId, UnionId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
}; };
use hir_expand::{name::name, MacroCallKind}; use hir_expand::{name::name, MacroCallKind};
use hir_ty::{ use hir_ty::{
all_super_traits, autoderef, all_super_traits, autoderef, check_orphan_rules,
consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt}, consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt},
diagnostics::BodyValidationDiagnostic, diagnostics::BodyValidationDiagnostic,
known_const_to_ast, known_const_to_ast,
@ -90,17 +90,7 @@ use crate::db::{DefDatabase, HirDatabase};
pub use crate::{ pub use crate::{
attrs::{resolve_doc_path_on, HasAttrs}, attrs::{resolve_doc_path_on, HasAttrs},
diagnostics::{ diagnostics::*,
AnyDiagnostic, BreakOutsideOfLoop, CaseType, ExpectedFunction, InactiveCode,
IncoherentImpl, IncorrectCase, InvalidDeriveTarget, MacroDefError, MacroError,
MacroExpansionParseError, MalformedDerive, MismatchedArgCount,
MismatchedTupleStructPatArgCount, MissingFields, MissingMatchArms, MissingUnsafe,
MovedOutOfRef, NeedMut, NoSuchField, PrivateAssocItem, PrivateField,
ReplaceFilterMapNextWithFindMap, TypeMismatch, TypedHole, UndeclaredLabel,
UnimplementedBuiltinMacro, UnreachableLabel, UnresolvedExternCrate, UnresolvedField,
UnresolvedImport, UnresolvedMacroCall, UnresolvedMethodCall, UnresolvedModule,
UnresolvedProcMacro, UnusedMut, UnusedVariable,
},
has_source::HasSource, has_source::HasSource,
semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits}, semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits},
}; };
@ -604,6 +594,7 @@ impl Module {
let inherent_impls = db.inherent_impls_in_crate(self.id.krate()); let inherent_impls = db.inherent_impls_in_crate(self.id.krate());
let mut impl_assoc_items_scratch = vec![];
for impl_def in self.impl_defs(db) { for impl_def in self.impl_defs(db) {
let loc = impl_def.id.lookup(db.upcast()); let loc = impl_def.id.lookup(db.upcast());
let tree = loc.id.item_tree(db.upcast()); let tree = loc.id.item_tree(db.upcast());
@ -614,19 +605,109 @@ impl Module {
// FIXME: Once we diagnose the inputs to builtin derives, we should at least extract those diagnostics somehow // FIXME: Once we diagnose the inputs to builtin derives, we should at least extract those diagnostics somehow
continue; continue;
} }
let ast_id_map = db.ast_id_map(file_id);
for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() { for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() {
emit_def_diagnostic(db, acc, diag); emit_def_diagnostic(db, acc, diag);
} }
if inherent_impls.invalid_impls().contains(&impl_def.id) { if inherent_impls.invalid_impls().contains(&impl_def.id) {
let ast_id_map = db.ast_id_map(file_id);
acc.push(IncoherentImpl { impl_: ast_id_map.get(node.ast_id()), file_id }.into()) acc.push(IncoherentImpl { impl_: ast_id_map.get(node.ast_id()), file_id }.into())
} }
for item in impl_def.items(db) { if !impl_def.check_orphan_rules(db) {
let def: DefWithBody = match item { acc.push(TraitImplOrphan { impl_: ast_id_map.get(node.ast_id()), file_id }.into())
}
let trait_ = impl_def.trait_(db);
let trait_is_unsafe = trait_.map_or(false, |t| t.is_unsafe(db));
let impl_is_negative = impl_def.is_negative(db);
let impl_is_unsafe = impl_def.is_unsafe(db);
let drop_maybe_dangle = (|| {
// FIXME: This can be simplified a lot by exposing hir-ty's utils.rs::Generics helper
let trait_ = trait_?;
let drop_trait = db.lang_item(self.krate().into(), LangItem::Drop)?.as_trait()?;
if drop_trait != trait_.into() {
return None;
}
let parent = impl_def.id.into();
let generic_params = db.generic_params(parent);
let lifetime_params = generic_params.lifetimes.iter().map(|(local_id, _)| {
GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id })
});
let type_params = generic_params
.iter()
.filter(|(_, it)| it.type_param().is_some())
.map(|(local_id, _)| {
GenericParamId::TypeParamId(TypeParamId::from_unchecked(
TypeOrConstParamId { parent, local_id },
))
});
let res = type_params
.chain(lifetime_params)
.any(|p| db.attrs(AttrDefId::GenericParamId(p)).by_key("may_dangle").exists());
Some(res)
})()
.unwrap_or(false);
match (impl_is_unsafe, trait_is_unsafe, impl_is_negative, drop_maybe_dangle) {
// unsafe negative impl
(true, _, true, _) |
// unsafe impl for safe trait
(true, false, _, false) => acc.push(TraitImplIncorrectSafety { impl_: ast_id_map.get(node.ast_id()), file_id, should_be_safe: true }.into()),
// safe impl for unsafe trait
(false, true, false, _) |
// safe impl of dangling drop
(false, false, _, true) => acc.push(TraitImplIncorrectSafety { impl_: ast_id_map.get(node.ast_id()), file_id, should_be_safe: false }.into()),
_ => (),
};
if let Some(trait_) = trait_ {
let items = &db.trait_data(trait_.into()).items;
let required_items = items.iter().filter(|&(_, assoc)| match *assoc {
AssocItemId::FunctionId(it) => !db.function_data(it).has_body(),
AssocItemId::ConstId(_) => true,
AssocItemId::TypeAliasId(it) => db.type_alias_data(it).type_ref.is_none(),
});
impl_assoc_items_scratch.extend(db.impl_data(impl_def.id).items.iter().map(
|&item| {
(
item,
match item {
AssocItemId::FunctionId(it) => db.function_data(it).name.clone(),
AssocItemId::ConstId(it) => {
db.const_data(it).name.as_ref().unwrap().clone()
}
AssocItemId::TypeAliasId(it) => db.type_alias_data(it).name.clone(),
},
)
},
));
let missing: Vec<_> = required_items
.filter(|(name, id)| {
!impl_assoc_items_scratch.iter().any(|(impl_item, impl_name)| {
discriminant(impl_item) == discriminant(id) && impl_name == name
})
})
.map(|(name, item)| (name.clone(), AssocItem::from(*item)))
.collect();
if !missing.is_empty() {
acc.push(
TraitImplMissingAssocItems {
impl_: ast_id_map.get(node.ast_id()),
file_id,
missing,
}
.into(),
)
}
impl_assoc_items_scratch.clear();
}
for &item in &db.impl_data(impl_def.id).items {
let def: DefWithBody = match AssocItem::from(item) {
AssocItem::Function(it) => it.into(), AssocItem::Function(it) => it.into(),
AssocItem::Const(it) => it.into(), AssocItem::Const(it) => it.into(),
AssocItem::TypeAlias(_) => continue, AssocItem::TypeAlias(_) => continue,
@ -665,8 +746,15 @@ impl Module {
db: &dyn DefDatabase, db: &dyn DefDatabase,
item: impl Into<ItemInNs>, item: impl Into<ItemInNs>,
prefer_no_std: bool, prefer_no_std: bool,
prefer_prelude: bool,
) -> Option<ModPath> { ) -> Option<ModPath> {
hir_def::find_path::find_path(db, item.into().into(), self.into(), prefer_no_std) hir_def::find_path::find_path(
db,
item.into().into(),
self.into(),
prefer_no_std,
prefer_prelude,
)
} }
/// Finds a path that can be used to refer to the given item from within /// Finds a path that can be used to refer to the given item from within
@ -677,6 +765,7 @@ impl Module {
item: impl Into<ItemInNs>, item: impl Into<ItemInNs>,
prefix_kind: PrefixKind, prefix_kind: PrefixKind,
prefer_no_std: bool, prefer_no_std: bool,
prefer_prelude: bool,
) -> Option<ModPath> { ) -> Option<ModPath> {
hir_def::find_path::find_path_prefixed( hir_def::find_path::find_path_prefixed(
db, db,
@ -684,6 +773,7 @@ impl Module {
self.into(), self.into(),
prefix_kind, prefix_kind,
prefer_no_std, prefer_no_std,
prefer_prelude,
) )
} }
} }
@ -3390,6 +3480,10 @@ impl Impl {
db.impl_data(self.id).is_negative db.impl_data(self.id).is_negative
} }
pub fn is_unsafe(self, db: &dyn HirDatabase) -> bool {
db.impl_data(self.id).is_unsafe
}
pub fn module(self, db: &dyn HirDatabase) -> Module { pub fn module(self, db: &dyn HirDatabase) -> Module {
self.id.lookup(db.upcast()).container.into() self.id.lookup(db.upcast()).container.into()
} }
@ -3398,6 +3492,10 @@ impl Impl {
let src = self.source(db)?; let src = self.source(db)?;
src.file_id.as_builtin_derive_attr_node(db.upcast()) src.file_id.as_builtin_derive_attr_node(db.upcast())
} }
pub fn check_orphan_rules(self, db: &dyn HirDatabase) -> bool {
check_orphan_rules(db, self.id)
}
} }
#[derive(Clone, PartialEq, Eq, Debug, Hash)] #[derive(Clone, PartialEq, Eq, Debug, Hash)]

View file

@ -14,5 +14,6 @@ pub struct AssistConfig {
pub allowed: Option<Vec<AssistKind>>, pub allowed: Option<Vec<AssistKind>>,
pub insert_use: InsertUseConfig, pub insert_use: InsertUseConfig,
pub prefer_no_std: bool, pub prefer_no_std: bool,
pub prefer_prelude: bool,
pub assist_emit_must_use: bool, pub assist_emit_must_use: bool,
} }

View file

@ -2245,6 +2245,37 @@ impl b::LocalTrait for B {
fn no_skip_default_2() -> Option<()> { fn no_skip_default_2() -> Option<()> {
todo!() todo!()
} }
}
"#,
)
}
#[test]
fn doc_hidden_nondefault_member() {
check_assist(
add_missing_impl_members,
r#"
//- /lib.rs crate:b new_source_root:local
trait LocalTrait {
#[doc(hidden)]
fn no_skip_non_default() -> Option<()>;
#[doc(hidden)]
fn skip_default() -> Option<()> {
todo!()
}
}
//- /main.rs crate:a deps:b
struct B;
impl b::Loc$0alTrait for B {}
"#,
r#"
struct B;
impl b::LocalTrait for B {
fn no_skip_non_default() -> Option<()> {
${0:todo!()}
}
} }
"#, "#,
) )

View file

@ -88,7 +88,13 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
.into_iter() .into_iter()
.filter_map(|variant| { .filter_map(|variant| {
Some(( Some((
build_pat(ctx.db(), module, variant, ctx.config.prefer_no_std)?, build_pat(
ctx.db(),
module,
variant,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)?,
variant.should_be_hidden(ctx.db(), module.krate()), variant.should_be_hidden(ctx.db(), module.krate()),
)) ))
}) })
@ -140,7 +146,13 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
.iter() .iter()
.any(|variant| variant.should_be_hidden(ctx.db(), module.krate())); .any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
let patterns = variants.into_iter().filter_map(|variant| { let patterns = variants.into_iter().filter_map(|variant| {
build_pat(ctx.db(), module, variant, ctx.config.prefer_no_std) build_pat(
ctx.db(),
module,
variant,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
}); });
(ast::Pat::from(make::tuple_pat(patterns)), is_hidden) (ast::Pat::from(make::tuple_pat(patterns)), is_hidden)
@ -173,7 +185,13 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
.iter() .iter()
.any(|variant| variant.should_be_hidden(ctx.db(), module.krate())); .any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
let patterns = variants.into_iter().filter_map(|variant| { let patterns = variants.into_iter().filter_map(|variant| {
build_pat(ctx.db(), module, variant.clone(), ctx.config.prefer_no_std) build_pat(
ctx.db(),
module,
variant.clone(),
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
}); });
(ast::Pat::from(make::slice_pat(patterns)), is_hidden) (ast::Pat::from(make::slice_pat(patterns)), is_hidden)
}) })
@ -440,11 +458,16 @@ fn build_pat(
module: hir::Module, module: hir::Module,
var: ExtendedVariant, var: ExtendedVariant,
prefer_no_std: bool, prefer_no_std: bool,
prefer_prelude: bool,
) -> Option<ast::Pat> { ) -> Option<ast::Pat> {
match var { match var {
ExtendedVariant::Variant(var) => { ExtendedVariant::Variant(var) => {
let path = let path = mod_path_to_ast(&module.find_use_path(
mod_path_to_ast(&module.find_use_path(db, ModuleDef::from(var), prefer_no_std)?); db,
ModuleDef::from(var),
prefer_no_std,
prefer_prelude,
)?);
// FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though // FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though
Some(match var.source(db)?.value.kind() { Some(match var.source(db)?.value.kind() {

View file

@ -93,6 +93,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
&ctx.sema, &ctx.sema,
ctx.config.insert_use.prefix_kind, ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std, ctx.config.prefer_no_std,
ctx.config.prefer_no_std,
); );
if proposed_imports.is_empty() { if proposed_imports.is_empty() {
return None; return None;

View file

@ -348,6 +348,7 @@ fn augment_references_with_imports(
ModuleDef::Module(*target_module), ModuleDef::Module(*target_module),
ctx.config.insert_use.prefix_kind, ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std, ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
) )
.map(|mod_path| { .map(|mod_path| {
make::path_concat(mod_path_to_ast(&mod_path), make::path_from_text("Bool")) make::path_concat(mod_path_to_ast(&mod_path), make::path_from_text("Bool"))

View file

@ -50,7 +50,12 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) -
_ => return None, _ => return None,
}; };
mod_path_to_ast(&module.find_use_path(ctx.db(), src_type_def, ctx.config.prefer_no_std)?) mod_path_to_ast(&module.find_use_path(
ctx.db(),
src_type_def,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)?)
}; };
let dest_type = match &ast_trait { let dest_type = match &ast_trait {

View file

@ -205,6 +205,7 @@ fn augment_references_with_imports(
ModuleDef::Module(*target_module), ModuleDef::Module(*target_module),
ctx.config.insert_use.prefix_kind, ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std, ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
) )
.map(|mod_path| { .map(|mod_path| {
make::path_concat( make::path_concat(

View file

@ -163,6 +163,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
ModuleDef::from(control_flow_enum), ModuleDef::from(control_flow_enum),
ctx.config.insert_use.prefix_kind, ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std, ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
); );
if let Some(mod_path) = mod_path { if let Some(mod_path) = mod_path {

View file

@ -384,6 +384,7 @@ fn process_references(
*enum_module_def, *enum_module_def,
ctx.config.insert_use.prefix_kind, ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std, ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
); );
if let Some(mut mod_path) = mod_path { if let Some(mut mod_path) = mod_path {
mod_path.pop_segment(); mod_path.pop_segment();

View file

@ -58,8 +58,12 @@ fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
let module = ctx.sema.to_def(&strukt)?.module(ctx.db()); let module = ctx.sema.to_def(&strukt)?.module(ctx.db());
let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?; let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?;
let trait_path = let trait_path = module.find_use_path(
module.find_use_path(ctx.db(), ModuleDef::Trait(trait_), ctx.config.prefer_no_std)?; ctx.db(),
ModuleDef::Trait(trait_),
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)?;
let field_type = field.ty()?; let field_type = field.ty()?;
let field_name = field.name()?; let field_name = field.name()?;
@ -99,8 +103,12 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()
let module = ctx.sema.to_def(&strukt)?.module(ctx.db()); let module = ctx.sema.to_def(&strukt)?.module(ctx.db());
let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?; let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?;
let trait_path = let trait_path = module.find_use_path(
module.find_use_path(ctx.db(), ModuleDef::Trait(trait_), ctx.config.prefer_no_std)?; ctx.db(),
ModuleDef::Trait(trait_),
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)?;
let field_type = field.ty()?; let field_type = field.ty()?;
let target = field.syntax().text_range(); let target = field.syntax().text_range();

View file

@ -67,6 +67,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
ctx.sema.db, ctx.sema.db,
item_for_path_search(ctx.sema.db, item_in_ns)?, item_for_path_search(ctx.sema.db, item_in_ns)?,
ctx.config.prefer_no_std, ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)?; )?;
let expr = use_trivial_constructor( let expr = use_trivial_constructor(

View file

@ -48,6 +48,7 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) ->
ctx.sema.db, ctx.sema.db,
item_for_path_search(ctx.sema.db, item_in_ns)?, item_for_path_search(ctx.sema.db, item_in_ns)?,
ctx.config.prefer_no_std, ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)?; )?;
let qualify_candidate = QualifyCandidate::ImplMethod(ctx.sema.db, call, resolved_call); let qualify_candidate = QualifyCandidate::ImplMethod(ctx.sema.db, call, resolved_call);

View file

@ -37,8 +37,11 @@ use crate::{
// ``` // ```
pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let (import_assets, syntax_under_caret) = find_importable_node(ctx)?; let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
let mut proposed_imports = let mut proposed_imports = import_assets.search_for_relative_paths(
import_assets.search_for_relative_paths(&ctx.sema, ctx.config.prefer_no_std); &ctx.sema,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
);
if proposed_imports.is_empty() { if proposed_imports.is_empty() {
return None; return None;
} }

View file

@ -82,7 +82,12 @@ pub(crate) fn replace_derive_with_manual_impl(
}) })
.flat_map(|trait_| { .flat_map(|trait_| {
current_module current_module
.find_use_path(ctx.sema.db, hir::ModuleDef::Trait(trait_), ctx.config.prefer_no_std) .find_use_path(
ctx.sema.db,
hir::ModuleDef::Trait(trait_),
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
.as_ref() .as_ref()
.map(mod_path_to_ast) .map(mod_path_to_ast)
.zip(Some(trait_)) .zip(Some(trait_))

View file

@ -68,6 +68,7 @@ pub(crate) fn replace_qualified_name_with_use(
module, module,
ctx.config.insert_use.prefix_kind, ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std, ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
) )
}) })
.flatten(); .flatten();

View file

@ -30,6 +30,7 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
skip_glob_imports: true, skip_glob_imports: true,
}, },
prefer_no_std: false, prefer_no_std: false,
prefer_prelude: true,
assist_emit_must_use: false, assist_emit_must_use: false,
}; };
@ -44,6 +45,7 @@ pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
skip_glob_imports: true, skip_glob_imports: true,
}, },
prefer_no_std: false, prefer_no_std: false,
prefer_prelude: true,
assist_emit_must_use: false, assist_emit_must_use: false,
}; };

View file

@ -106,8 +106,18 @@ pub fn filter_assoc_items(
.iter() .iter()
.copied() .copied()
.filter(|assoc_item| { .filter(|assoc_item| {
!(ignore_items == IgnoreAssocItems::DocHiddenAttrPresent if ignore_items == IgnoreAssocItems::DocHiddenAttrPresent
&& assoc_item.attrs(sema.db).has_doc_hidden()) && assoc_item.attrs(sema.db).has_doc_hidden()
{
if let hir::AssocItem::Function(f) = assoc_item {
if !f.has_body(sema.db) {
return true;
}
}
return false;
}
return true;
}) })
// Note: This throws away items with no source. // Note: This throws away items with no source.
.filter_map(|assoc_item| { .filter_map(|assoc_item| {

View file

@ -626,6 +626,7 @@ fn enum_variants_with_paths(
ctx.db, ctx.db,
hir::ModuleDef::from(variant), hir::ModuleDef::from(variant),
ctx.config.prefer_no_std, ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
) { ) {
// Variants with trivial paths are already added by the existing completion logic, // Variants with trivial paths are already added by the existing completion logic,
// so we should avoid adding these twice // so we should avoid adding these twice

View file

@ -175,6 +175,7 @@ pub(crate) fn complete_expr_path(
ctx.db, ctx.db,
hir::ModuleDef::from(strukt), hir::ModuleDef::from(strukt),
ctx.config.prefer_no_std, ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
) )
.filter(|it| it.len() > 1); .filter(|it| it.len() > 1);
@ -197,6 +198,7 @@ pub(crate) fn complete_expr_path(
ctx.db, ctx.db,
hir::ModuleDef::from(un), hir::ModuleDef::from(un),
ctx.config.prefer_no_std, ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
) )
.filter(|it| it.len() > 1); .filter(|it| it.len() > 1);

View file

@ -257,7 +257,12 @@ fn import_on_the_fly(
let user_input_lowercased = potential_import_name.to_lowercase(); let user_input_lowercased = potential_import_name.to_lowercase();
import_assets import_assets
.search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind, ctx.config.prefer_no_std) .search_for_imports(
&ctx.sema,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
.into_iter() .into_iter()
.filter(ns_filter) .filter(ns_filter)
.filter(|import| { .filter(|import| {
@ -299,7 +304,12 @@ fn import_on_the_fly_pat_(
let user_input_lowercased = potential_import_name.to_lowercase(); let user_input_lowercased = potential_import_name.to_lowercase();
import_assets import_assets
.search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind, ctx.config.prefer_no_std) .search_for_imports(
&ctx.sema,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
.into_iter() .into_iter()
.filter(ns_filter) .filter(ns_filter)
.filter(|import| { .filter(|import| {
@ -336,7 +346,12 @@ fn import_on_the_fly_method(
let user_input_lowercased = potential_import_name.to_lowercase(); let user_input_lowercased = potential_import_name.to_lowercase();
import_assets import_assets
.search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind, ctx.config.prefer_no_std) .search_for_imports(
&ctx.sema,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
.into_iter() .into_iter()
.filter(|import| { .filter(|import| {
!ctx.is_item_hidden(&import.item_to_import) !ctx.is_item_hidden(&import.item_to_import)

View file

@ -19,6 +19,7 @@ pub struct CompletionConfig {
pub snippet_cap: Option<SnippetCap>, pub snippet_cap: Option<SnippetCap>,
pub insert_use: InsertUseConfig, pub insert_use: InsertUseConfig,
pub prefer_no_std: bool, pub prefer_no_std: bool,
pub prefer_prelude: bool,
pub snippets: Vec<Snippet>, pub snippets: Vec<Snippet>,
pub limit: Option<usize>, pub limit: Option<usize>,
} }

View file

@ -263,6 +263,7 @@ pub fn resolve_completion_edits(
candidate, candidate,
config.insert_use.prefix_kind, config.insert_use.prefix_kind,
config.prefer_no_std, config.prefer_no_std,
config.prefer_prelude,
) )
}) })
.find(|mod_path| mod_path.display(db).to_string() == full_import_path); .find(|mod_path| mod_path.display(db).to_string() == full_import_path);

View file

@ -179,6 +179,7 @@ fn import_edits(ctx: &CompletionContext<'_>, requires: &[GreenNode]) -> Option<V
item, item,
ctx.config.insert_use.prefix_kind, ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std, ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)?; )?;
Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item, None))) Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item, None)))
}; };

View file

@ -68,6 +68,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
callable: Some(CallableSnippets::FillArguments), callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true), snippet_cap: SnippetCap::new(true),
prefer_no_std: false, prefer_no_std: false,
prefer_prelude: true,
insert_use: InsertUseConfig { insert_use: InsertUseConfig {
granularity: ImportGranularity::Crate, granularity: ImportGranularity::Crate,
prefix_kind: PrefixKind::Plain, prefix_kind: PrefixKind::Plain,

View file

@ -220,9 +220,10 @@ impl ImportAssets {
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
prefix_kind: PrefixKind, prefix_kind: PrefixKind,
prefer_no_std: bool, prefer_no_std: bool,
prefer_prelude: bool,
) -> Vec<LocatedImport> { ) -> Vec<LocatedImport> {
let _p = profile::span("import_assets::search_for_imports"); let _p = profile::span("import_assets::search_for_imports");
self.search_for(sema, Some(prefix_kind), prefer_no_std) self.search_for(sema, Some(prefix_kind), prefer_no_std, prefer_prelude)
} }
/// This may return non-absolute paths if a part of the returned path is already imported into scope. /// This may return non-absolute paths if a part of the returned path is already imported into scope.
@ -230,9 +231,10 @@ impl ImportAssets {
&self, &self,
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
prefer_no_std: bool, prefer_no_std: bool,
prefer_prelude: bool,
) -> Vec<LocatedImport> { ) -> Vec<LocatedImport> {
let _p = profile::span("import_assets::search_for_relative_paths"); let _p = profile::span("import_assets::search_for_relative_paths");
self.search_for(sema, None, prefer_no_std) self.search_for(sema, None, prefer_no_std, prefer_prelude)
} }
/// Requires imports to by prefix instead of fuzzily. /// Requires imports to by prefix instead of fuzzily.
@ -270,6 +272,7 @@ impl ImportAssets {
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
prefixed: Option<PrefixKind>, prefixed: Option<PrefixKind>,
prefer_no_std: bool, prefer_no_std: bool,
prefer_prelude: bool,
) -> Vec<LocatedImport> { ) -> Vec<LocatedImport> {
let _p = profile::span("import_assets::search_for"); let _p = profile::span("import_assets::search_for");
@ -281,6 +284,7 @@ impl ImportAssets {
&self.module_with_candidate, &self.module_with_candidate,
prefixed, prefixed,
prefer_no_std, prefer_no_std,
prefer_prelude,
) )
}; };
@ -594,11 +598,18 @@ fn get_mod_path(
module_with_candidate: &Module, module_with_candidate: &Module,
prefixed: Option<PrefixKind>, prefixed: Option<PrefixKind>,
prefer_no_std: bool, prefer_no_std: bool,
prefer_prelude: bool,
) -> Option<ModPath> { ) -> Option<ModPath> {
if let Some(prefix_kind) = prefixed { if let Some(prefix_kind) = prefixed {
module_with_candidate.find_use_path_prefixed(db, item_to_search, prefix_kind, prefer_no_std) module_with_candidate.find_use_path_prefixed(
db,
item_to_search,
prefix_kind,
prefer_no_std,
prefer_prelude,
)
} else { } else {
module_with_candidate.find_use_path(db, item_to_search, prefer_no_std) module_with_candidate.find_use_path(db, item_to_search, prefer_no_std, prefer_prelude)
} }
} }

View file

@ -277,6 +277,7 @@ impl Ctx<'_> {
self.source_scope.db.upcast(), self.source_scope.db.upcast(),
hir::ModuleDef::Trait(trait_ref), hir::ModuleDef::Trait(trait_ref),
false, false,
true,
)?; )?;
match make::ty_path(mod_path_to_ast(&found_path)) { match make::ty_path(mod_path_to_ast(&found_path)) {
ast::Type::PathType(path_ty) => Some(path_ty), ast::Type::PathType(path_ty) => Some(path_ty),
@ -311,8 +312,12 @@ impl Ctx<'_> {
} }
} }
let found_path = let found_path = self.target_module.find_use_path(
self.target_module.find_use_path(self.source_scope.db.upcast(), def, false)?; self.source_scope.db.upcast(),
def,
false,
true,
)?;
let res = mod_path_to_ast(&found_path).clone_for_update(); let res = mod_path_to_ast(&found_path).clone_for_update();
if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) { if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) {
if let Some(segment) = res.segment() { if let Some(segment) = res.segment() {

View file

@ -584,7 +584,7 @@ impl<'a> FindUsages<'a> {
) -> bool { ) -> bool {
match NameRefClass::classify(self.sema, name_ref) { match NameRefClass::classify(self.sema, name_ref) {
Some(NameRefClass::Definition(Definition::SelfType(impl_))) Some(NameRefClass::Definition(Definition::SelfType(impl_)))
if impl_.self_ty(self.sema.db) == *self_ty => if impl_.self_ty(self.sema.db).as_adt() == self_ty.as_adt() =>
{ {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference { let reference = FileReference {

View file

@ -136,6 +136,7 @@ pub(crate) fn json_in_items(
it, it,
config.insert_use.prefix_kind, config.insert_use.prefix_kind,
config.prefer_no_std, config.prefer_no_std,
config.prefer_prelude,
) { ) {
insert_use(&scope, mod_path_to_ast(&it), &config.insert_use); insert_use(&scope, mod_path_to_ast(&it), &config.insert_use);
} }
@ -148,6 +149,7 @@ pub(crate) fn json_in_items(
it, it,
config.insert_use.prefix_kind, config.insert_use.prefix_kind,
config.prefer_no_std, config.prefer_no_std,
config.prefer_prelude,
) { ) {
insert_use(&scope, mod_path_to_ast(&it), &config.insert_use); insert_use(&scope, mod_path_to_ast(&it), &config.insert_use);
} }

View file

@ -122,6 +122,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
ctx.sema.db, ctx.sema.db,
item_for_path_search(ctx.sema.db, item_in_ns)?, item_for_path_search(ctx.sema.db, item_in_ns)?,
ctx.config.prefer_no_std, ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)?; )?;
use_trivial_constructor( use_trivial_constructor(

View file

@ -0,0 +1,129 @@
use hir::InFile;
use syntax::ast;
use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
// Diagnostic: trait-impl-incorrect-safety
//
// Diagnoses incorrect safety annotations of trait impls.
pub(crate) fn trait_impl_incorrect_safety(
ctx: &DiagnosticsContext<'_>,
d: &hir::TraitImplIncorrectSafety,
) -> Diagnostic {
Diagnostic::new(
DiagnosticCode::Ra("trait-impl-incorrect-safety", Severity::Error),
if d.should_be_safe {
"unsafe impl for safe trait"
} else {
"impl for unsafe trait needs to be unsafe"
},
adjusted_display_range::<ast::Impl>(
ctx,
InFile { file_id: d.file_id, value: d.impl_.syntax_node_ptr() },
&|impl_| {
if d.should_be_safe {
Some(match (impl_.unsafe_token(), impl_.impl_token()) {
(None, None) => return None,
(None, Some(t)) | (Some(t), None) => t.text_range(),
(Some(t1), Some(t2)) => t1.text_range().cover(t2.text_range()),
})
} else {
impl_.impl_token().map(|t| t.text_range())
}
},
),
)
}
#[cfg(test)]
mod tests {
use crate::tests::check_diagnostics;
#[test]
fn simple() {
check_diagnostics(
r#"
trait Safe {}
unsafe trait Unsafe {}
impl Safe for () {}
impl Unsafe for () {}
//^^^^ error: impl for unsafe trait needs to be unsafe
unsafe impl Safe for () {}
//^^^^^^^^^^^ error: unsafe impl for safe trait
unsafe impl Unsafe for () {}
"#,
);
}
#[test]
fn drop_may_dangle() {
check_diagnostics(
r#"
#[lang = "drop"]
trait Drop {}
struct S<T>;
struct L<'l>;
impl<T> Drop for S<T> {}
impl<#[may_dangle] T> Drop for S<T> {}
//^^^^ error: impl for unsafe trait needs to be unsafe
unsafe impl<T> Drop for S<T> {}
//^^^^^^^^^^^ error: unsafe impl for safe trait
unsafe impl<#[may_dangle] T> Drop for S<T> {}
impl<'l> Drop for L<'l> {}
impl<#[may_dangle] 'l> Drop for L<'l> {}
//^^^^ error: impl for unsafe trait needs to be unsafe
unsafe impl<'l> Drop for L<'l> {}
//^^^^^^^^^^^ error: unsafe impl for safe trait
unsafe impl<#[may_dangle] 'l> Drop for L<'l> {}
"#,
);
}
#[test]
fn negative() {
check_diagnostics(
r#"
trait Trait {}
impl !Trait for () {}
unsafe impl !Trait for () {}
//^^^^^^^^^^^ error: unsafe impl for safe trait
unsafe trait UnsafeTrait {}
impl !UnsafeTrait for () {}
unsafe impl !UnsafeTrait for () {}
//^^^^^^^^^^^ error: unsafe impl for safe trait
"#,
);
}
#[test]
fn inherent() {
check_diagnostics(
r#"
struct S;
impl S {}
unsafe impl S {}
//^^^^^^^^^^^ error: unsafe impl for safe trait
"#,
);
}
}

View file

@ -0,0 +1,102 @@
use hir::InFile;
use itertools::Itertools;
use syntax::{ast, AstNode};
use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: trait-impl-missing-assoc_item
//
// Diagnoses missing trait items in a trait impl.
pub(crate) fn trait_impl_missing_assoc_item(
ctx: &DiagnosticsContext<'_>,
d: &hir::TraitImplMissingAssocItems,
) -> Diagnostic {
let missing = d.missing.iter().format_with(", ", |(name, item), f| {
f(&match *item {
hir::AssocItem::Function(_) => "`fn ",
hir::AssocItem::Const(_) => "`const ",
hir::AssocItem::TypeAlias(_) => "`type ",
})?;
f(&name.display(ctx.sema.db))?;
f(&"`")
});
Diagnostic::new(
DiagnosticCode::RustcHardError("E0046"),
format!("not all trait items implemented, missing: {missing}"),
adjusted_display_range::<ast::Impl>(
ctx,
InFile { file_id: d.file_id, value: d.impl_.syntax_node_ptr() },
&|impl_| impl_.trait_().map(|t| t.syntax().text_range()),
),
)
}
#[cfg(test)]
mod tests {
use crate::tests::check_diagnostics;
#[test]
fn simple() {
check_diagnostics(
r#"
trait Trait {
const C: ();
type T;
fn f();
}
impl Trait for () {
const C: () = ();
type T = ();
fn f() {}
}
impl Trait for () {
//^^^^^ error: not all trait items implemented, missing: `const C`
type T = ();
fn f() {}
}
impl Trait for () {
//^^^^^ error: not all trait items implemented, missing: `const C`, `type T`, `fn f`
}
"#,
);
}
#[test]
fn default() {
check_diagnostics(
r#"
trait Trait {
const C: ();
type T = ();
fn f() {}
}
impl Trait for () {
const C: () = ();
type T = ();
fn f() {}
}
impl Trait for () {
//^^^^^ error: not all trait items implemented, missing: `const C`
type T = ();
fn f() {}
}
impl Trait for () {
//^^^^^ error: not all trait items implemented, missing: `const C`
type T = ();
}
impl Trait for () {
//^^^^^ error: not all trait items implemented, missing: `const C`
}
"#,
);
}
}

View file

@ -0,0 +1,106 @@
use hir::InFile;
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: trait-impl-orphan
//
// Only traits defined in the current crate can be implemented for arbitrary types
pub(crate) fn trait_impl_orphan(
ctx: &DiagnosticsContext<'_>,
d: &hir::TraitImplOrphan,
) -> Diagnostic {
Diagnostic::new_with_syntax_node_ptr(
ctx,
DiagnosticCode::RustcHardError("E0117"),
format!("only traits defined in the current crate can be implemented for arbitrary types"),
InFile::new(d.file_id, d.impl_.clone().into()),
)
// Not yet checked for false positives
.experimental()
}
#[cfg(test)]
mod tests {
use crate::tests::check_diagnostics;
#[test]
fn simple() {
check_diagnostics(
r#"
//- /foo.rs crate:foo
pub trait Foo {}
//- /bar.rs crate:bar
pub struct Bar;
//- /main.rs crate:main deps:foo,bar
struct LocalType;
trait LocalTrait {}
impl foo::Foo for bar::Bar {}
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: only traits defined in the current crate can be implemented for arbitrary types
impl foo::Foo for LocalType {}
impl LocalTrait for bar::Bar {}
"#,
);
}
#[test]
fn generics() {
check_diagnostics(
r#"
//- /foo.rs crate:foo
pub trait Foo<T> {}
//- /bar.rs crate:bar
pub struct Bar<T>(T);
//- /main.rs crate:main deps:foo,bar
struct LocalType<T>;
trait LocalTrait<T> {}
impl<T> foo::Foo<T> for bar::Bar<T> {}
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: only traits defined in the current crate can be implemented for arbitrary types
impl<T> foo::Foo<T> for bar::Bar<LocalType<T>> {}
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: only traits defined in the current crate can be implemented for arbitrary types
impl<T> foo::Foo<LocalType<T>> for bar::Bar<T> {}
impl<T> foo::Foo<bar::Bar<LocalType<T>>> for bar::Bar<LocalType<T>> {}
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: only traits defined in the current crate can be implemented for arbitrary types
"#,
);
}
#[test]
fn fundamental() {
check_diagnostics(
r#"
//- /foo.rs crate:foo
pub trait Foo<T> {}
//- /bar.rs crate:bar
pub struct Bar<T>(T);
#[lang = "owned_box"]
#[fundamental]
pub struct Box<T>(T);
//- /main.rs crate:main deps:foo,bar
struct LocalType;
impl<T> foo::Foo<T> for bar::Box<T> {}
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: only traits defined in the current crate can be implemented for arbitrary types
impl<T> foo::Foo<T> for &LocalType {}
impl<T> foo::Foo<T> for bar::Box<LocalType> {}
"#,
);
}
#[test]
fn dyn_object() {
check_diagnostics(
r#"
//- /foo.rs crate:foo
pub trait Foo<T> {}
//- /bar.rs crate:bar
pub struct Bar;
//- /main.rs crate:main deps:foo,bar
trait LocalTrait {}
impl<T> foo::Foo<T> for dyn LocalTrait {}
impl<T> foo::Foo<dyn LocalTrait> for Bar {}
"#,
);
}
}

View file

@ -278,6 +278,7 @@ struct Foo;
struct Bar; struct Bar;
impl core::ops::Deref for Foo { impl core::ops::Deref for Foo {
type Target = Bar; type Target = Bar;
fn deref(&self) -> &Self::Target { loop {} }
} }
fn main() { fn main() {
@ -290,6 +291,7 @@ struct Foo;
struct Bar; struct Bar;
impl core::ops::Deref for Foo { impl core::ops::Deref for Foo {
type Target = Bar; type Target = Bar;
fn deref(&self) -> &Self::Target { loop {} }
} }
fn main() { fn main() {

View file

@ -44,6 +44,9 @@ mod handlers {
pub(crate) mod private_assoc_item; pub(crate) mod private_assoc_item;
pub(crate) mod private_field; pub(crate) mod private_field;
pub(crate) mod replace_filter_map_next_with_find_map; pub(crate) mod replace_filter_map_next_with_find_map;
pub(crate) mod trait_impl_orphan;
pub(crate) mod trait_impl_incorrect_safety;
pub(crate) mod trait_impl_missing_assoc_item;
pub(crate) mod typed_hole; pub(crate) mod typed_hole;
pub(crate) mod type_mismatch; pub(crate) mod type_mismatch;
pub(crate) mod unimplemented_builtin_macro; pub(crate) mod unimplemented_builtin_macro;
@ -225,6 +228,7 @@ pub struct DiagnosticsConfig {
// FIXME: We may want to include a whole `AssistConfig` here // FIXME: We may want to include a whole `AssistConfig` here
pub insert_use: InsertUseConfig, pub insert_use: InsertUseConfig,
pub prefer_no_std: bool, pub prefer_no_std: bool,
pub prefer_prelude: bool,
} }
impl DiagnosticsConfig { impl DiagnosticsConfig {
@ -247,6 +251,7 @@ impl DiagnosticsConfig {
skip_glob_imports: false, skip_glob_imports: false,
}, },
prefer_no_std: false, prefer_no_std: false,
prefer_prelude: true,
} }
} }
} }
@ -356,6 +361,9 @@ pub fn diagnostics(
AnyDiagnostic::PrivateAssocItem(d) => handlers::private_assoc_item::private_assoc_item(&ctx, &d), AnyDiagnostic::PrivateAssocItem(d) => handlers::private_assoc_item::private_assoc_item(&ctx, &d),
AnyDiagnostic::PrivateField(d) => handlers::private_field::private_field(&ctx, &d), AnyDiagnostic::PrivateField(d) => handlers::private_field::private_field(&ctx, &d),
AnyDiagnostic::ReplaceFilterMapNextWithFindMap(d) => handlers::replace_filter_map_next_with_find_map::replace_filter_map_next_with_find_map(&ctx, &d), AnyDiagnostic::ReplaceFilterMapNextWithFindMap(d) => handlers::replace_filter_map_next_with_find_map::replace_filter_map_next_with_find_map(&ctx, &d),
AnyDiagnostic::TraitImplIncorrectSafety(d) => handlers::trait_impl_incorrect_safety::trait_impl_incorrect_safety(&ctx, &d),
AnyDiagnostic::TraitImplMissingAssocItems(d) => handlers::trait_impl_missing_assoc_item::trait_impl_missing_assoc_item(&ctx, &d),
AnyDiagnostic::TraitImplOrphan(d) => handlers::trait_impl_orphan::trait_impl_orphan(&ctx, &d),
AnyDiagnostic::TypedHole(d) => handlers::typed_hole::typed_hole(&ctx, &d), AnyDiagnostic::TypedHole(d) => handlers::typed_hole::typed_hole(&ctx, &d),
AnyDiagnostic::TypeMismatch(d) => handlers::type_mismatch::type_mismatch(&ctx, &d), AnyDiagnostic::TypeMismatch(d) => handlers::type_mismatch::type_mismatch(&ctx, &d),
AnyDiagnostic::UndeclaredLabel(d) => handlers::undeclared_label::undeclared_label(&ctx, &d), AnyDiagnostic::UndeclaredLabel(d) => handlers::undeclared_label::undeclared_label(&ctx, &d),

View file

@ -5,7 +5,7 @@ use expect_test::Expect;
use ide_db::{ use ide_db::{
assists::AssistResolveStrategy, assists::AssistResolveStrategy,
base_db::{fixture::WithFixture, SourceDatabaseExt}, base_db::{fixture::WithFixture, SourceDatabaseExt},
RootDatabase, LineIndexDatabase, RootDatabase,
}; };
use stdx::trim_indent; use stdx::trim_indent;
use test_utils::{assert_eq_text, extract_annotations, MiniCore}; use test_utils::{assert_eq_text, extract_annotations, MiniCore};
@ -43,7 +43,8 @@ fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) {
super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id) super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id)
.pop() .pop()
.expect("no diagnostics"); .expect("no diagnostics");
let fix = &diagnostic.fixes.expect("diagnostic misses fixes")[nth]; let fix =
&diagnostic.fixes.expect(&format!("{:?} diagnostic misses fixes", diagnostic.code))[nth];
let actual = { let actual = {
let source_change = fix.source_change.as_ref().unwrap(); let source_change = fix.source_change.as_ref().unwrap();
let file_id = *source_change.source_file_edits.keys().next().unwrap(); let file_id = *source_change.source_file_edits.keys().next().unwrap();
@ -103,6 +104,7 @@ pub(crate) fn check_diagnostics(ra_fixture: &str) {
pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixture: &str) { pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixture: &str) {
let (db, files) = RootDatabase::with_many_files(ra_fixture); let (db, files) = RootDatabase::with_many_files(ra_fixture);
for file_id in files { for file_id in files {
let line_index = db.line_index(file_id);
let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id); let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
let expected = extract_annotations(&db.file_text(file_id)); let expected = extract_annotations(&db.file_text(file_id));
@ -136,8 +138,16 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur
} }
} }
if expected != actual { if expected != actual {
let fneg = expected.iter().filter(|x| !actual.contains(x)).collect::<Vec<_>>(); let fneg = expected
let fpos = actual.iter().filter(|x| !expected.contains(x)).collect::<Vec<_>>(); .iter()
.filter(|x| !actual.contains(x))
.map(|(range, s)| (line_index.line_col(range.start()), range, s))
.collect::<Vec<_>>();
let fpos = actual
.iter()
.filter(|x| !expected.contains(x))
.map(|(range, s)| (line_index.line_col(range.start()), range, s))
.collect::<Vec<_>>();
panic!("Diagnostic test failed.\nFalse negatives: {fneg:?}\nFalse positives: {fpos:?}"); panic!("Diagnostic test failed.\nFalse negatives: {fneg:?}\nFalse positives: {fpos:?}");
} }

View file

@ -651,7 +651,7 @@ impl Match {
for (path, resolved_path) in &template.resolved_paths { for (path, resolved_path) in &template.resolved_paths {
if let hir::PathResolution::Def(module_def) = resolved_path.resolution { if let hir::PathResolution::Def(module_def) = resolved_path.resolution {
let mod_path = let mod_path =
module.find_use_path(sema.db, module_def, false).ok_or_else(|| { module.find_use_path(sema.db, module_def, false, true).ok_or_else(|| {
match_error!("Failed to render template path `{}` at match location") match_error!("Failed to render template path `{}` at match location")
})?; })?;
self.rendered_template_paths.insert(path.clone(), mod_path); self.rendered_template_paths.insert(path.clone(), mod_path);

View file

@ -14,12 +14,12 @@ pub struct ExpandedMacro {
// Feature: Expand Macro Recursively // Feature: Expand Macro Recursively
// //
// Shows the full macro expansion of the macro at current cursor. // Shows the full macro expansion of the macro at the current caret position.
// //
// |=== // |===
// | Editor | Action Name // | Editor | Action Name
// //
// | VS Code | **rust-analyzer: Expand macro recursively** // | VS Code | **rust-analyzer: Expand macro recursively at caret**
// |=== // |===
// //
// image::https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif[] // image::https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif[]

View file

@ -563,6 +563,7 @@ mod tests {
use hir::ClosureStyle; use hir::ClosureStyle;
use itertools::Itertools; use itertools::Itertools;
use test_utils::extract_annotations; use test_utils::extract_annotations;
use text_edit::{TextRange, TextSize};
use crate::inlay_hints::{AdjustmentHints, AdjustmentHintsMode}; use crate::inlay_hints::{AdjustmentHints, AdjustmentHintsMode};
use crate::DiscriminantHints; use crate::DiscriminantHints;
@ -629,6 +630,22 @@ mod tests {
expect.assert_debug_eq(&inlay_hints) expect.assert_debug_eq(&inlay_hints)
} }
#[track_caller]
pub(super) fn check_expect_clear_loc(
config: InlayHintsConfig,
ra_fixture: &str,
expect: Expect,
) {
let (analysis, file_id) = fixture::file(ra_fixture);
let mut inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap();
inlay_hints.iter_mut().flat_map(|hint| &mut hint.label.parts).for_each(|hint| {
if let Some(loc) = &mut hint.linked_location {
loc.range = TextRange::empty(TextSize::from(0));
}
});
expect.assert_debug_eq(&inlay_hints)
}
/// Computes inlay hints for the fixture, applies all the provided text edits and then runs /// Computes inlay hints for the fixture, applies all the provided text edits and then runs
/// expect test. /// expect test.
#[track_caller] #[track_caller]

View file

@ -78,7 +78,9 @@ mod tests {
use expect_test::expect; use expect_test::expect;
use crate::{ use crate::{
inlay_hints::tests::{check_expect, check_with_config, DISABLED_CONFIG, TEST_CONFIG}, inlay_hints::tests::{
check_expect, check_expect_clear_loc, check_with_config, DISABLED_CONFIG, TEST_CONFIG,
},
InlayHintsConfig, InlayHintsConfig,
}; };
@ -444,7 +446,7 @@ fn main() {
#[test] #[test]
fn shorten_iterator_chaining_hints() { fn shorten_iterator_chaining_hints() {
check_expect( check_expect_clear_loc(
InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
r#" r#"
//- minicore: iterators //- minicore: iterators
@ -484,7 +486,7 @@ fn main() {
file_id: FileId( file_id: FileId(
1, 1,
), ),
range: 10752..10760, range: 0..0,
}, },
), ),
tooltip: "", tooltip: "",
@ -497,7 +499,7 @@ fn main() {
file_id: FileId( file_id: FileId(
1, 1,
), ),
range: 10784..10788, range: 0..0,
}, },
), ),
tooltip: "", tooltip: "",
@ -522,7 +524,7 @@ fn main() {
file_id: FileId( file_id: FileId(
1, 1,
), ),
range: 10752..10760, range: 0..0,
}, },
), ),
tooltip: "", tooltip: "",
@ -535,7 +537,7 @@ fn main() {
file_id: FileId( file_id: FileId(
1, 1,
), ),
range: 10784..10788, range: 0..0,
}, },
), ),
tooltip: "", tooltip: "",
@ -560,7 +562,7 @@ fn main() {
file_id: FileId( file_id: FileId(
1, 1,
), ),
range: 10752..10760, range: 0..0,
}, },
), ),
tooltip: "", tooltip: "",
@ -573,7 +575,7 @@ fn main() {
file_id: FileId( file_id: FileId(
1, 1,
), ),
range: 10784..10788, range: 0..0,
}, },
), ),
tooltip: "", tooltip: "",
@ -598,7 +600,7 @@ fn main() {
file_id: FileId( file_id: FileId(
0, 0,
), ),
range: 24..30, range: 0..0,
}, },
), ),
tooltip: "", tooltip: "",

View file

@ -683,6 +683,32 @@ enum Foo {
); );
} }
#[test]
fn test_self() {
check(
r#"
struct S$0<T> {
t: PhantomData<T>,
}
impl<T> S<T> {
fn new() -> Self {
Self {
t: Default::default(),
}
}
}
"#,
expect![[r#"
S Struct FileId(0) 0..38 7..8
FileId(0) 48..49
FileId(0) 71..75
FileId(0) 86..90
"#]],
)
}
#[test] #[test]
fn test_find_all_refs_two_modules() { fn test_find_all_refs_two_modules() {
check( check(

View file

@ -208,6 +208,7 @@ impl ProjectFolders {
let entry = { let entry = {
let mut dirs = vfs::loader::Directories::default(); let mut dirs = vfs::loader::Directories::default();
dirs.extensions.push("rs".into()); dirs.extensions.push("rs".into());
dirs.extensions.push("toml".into());
dirs.include.extend(root.include); dirs.include.extend(root.include);
dirs.exclude.extend(root.exclude); dirs.exclude.extend(root.exclude);
for excl in global_excludes { for excl in global_excludes {

View file

@ -762,7 +762,8 @@ impl flags::AnalysisStats {
group: true, group: true,
skip_glob_imports: true, skip_glob_imports: true,
}, },
prefer_no_std: Default::default(), prefer_no_std: false,
prefer_prelude: true,
}, },
ide::AssistResolveStrategy::All, ide::AssistResolveStrategy::All,
file_id, file_id,

View file

@ -352,7 +352,9 @@ config_data! {
/// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`. /// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
imports_merge_glob: bool = "true", imports_merge_glob: bool = "true",
/// Prefer to unconditionally use imports of the core and alloc crate, over the std crate. /// Prefer to unconditionally use imports of the core and alloc crate, over the std crate.
imports_prefer_no_std: bool = "false", imports_preferNoStd | imports_prefer_no_std: bool = "false",
/// Whether to prefer import paths containing a `prelude` module.
imports_preferPrelude: bool = "false",
/// The path structure for newly inserted paths to use. /// The path structure for newly inserted paths to use.
imports_prefix: ImportPrefixDef = "\"plain\"", imports_prefix: ImportPrefixDef = "\"plain\"",
@ -1117,7 +1119,8 @@ impl Config {
ExprFillDefaultDef::Default => ExprFillDefaultMode::Default, ExprFillDefaultDef::Default => ExprFillDefaultMode::Default,
}, },
insert_use: self.insert_use_config(), insert_use: self.insert_use_config(),
prefer_no_std: self.data.imports_prefer_no_std, prefer_no_std: self.data.imports_preferNoStd,
prefer_prelude: self.data.imports_preferPrelude,
} }
} }
@ -1486,7 +1489,8 @@ impl Config {
CallableCompletionDef::None => None, CallableCompletionDef::None => None,
}, },
insert_use: self.insert_use_config(), insert_use: self.insert_use_config(),
prefer_no_std: self.data.imports_prefer_no_std, prefer_no_std: self.data.imports_preferNoStd,
prefer_prelude: self.data.imports_preferPrelude,
snippet_cap: SnippetCap::new(try_or_def!( snippet_cap: SnippetCap::new(try_or_def!(
self.caps self.caps
.text_document .text_document
@ -1515,7 +1519,8 @@ impl Config {
snippet_cap: SnippetCap::new(self.experimental("snippetTextEdit")), snippet_cap: SnippetCap::new(self.experimental("snippetTextEdit")),
allowed: None, allowed: None,
insert_use: self.insert_use_config(), insert_use: self.insert_use_config(),
prefer_no_std: self.data.imports_prefer_no_std, prefer_no_std: self.data.imports_preferNoStd,
prefer_prelude: self.data.imports_preferPrelude,
assist_emit_must_use: self.data.assist_emitMustUse, assist_emit_must_use: self.data.assist_emitMustUse,
} }
} }

View file

@ -146,6 +146,7 @@ fn integrated_completion_benchmark() {
}, },
snippets: Vec::new(), snippets: Vec::new(),
prefer_no_std: false, prefer_no_std: false,
prefer_prelude: true,
limit: None, limit: None,
}; };
let position = let position =
@ -186,6 +187,7 @@ fn integrated_completion_benchmark() {
}, },
snippets: Vec::new(), snippets: Vec::new(),
prefer_no_std: false, prefer_no_std: false,
prefer_prelude: true,
limit: None, limit: None,
}; };
let position = let position =

View file

@ -44,7 +44,7 @@
//! panic: fmt //! panic: fmt
//! phantom_data: //! phantom_data:
//! pin: //! pin:
//! pointee: //! pointee: copy, send, sync, ord, hash, unpin
//! range: //! range:
//! result: //! result:
//! send: sized //! send: sized
@ -54,6 +54,7 @@
//! sync: sized //! sync: sized
//! transmute: //! transmute:
//! try: infallible //! try: infallible
//! unpin: sized
//! unsize: sized //! unsize: sized
#![rustc_coherence_is_core] #![rustc_coherence_is_core]
@ -89,6 +90,11 @@ pub mod marker {
pub trait Unsize<T: ?Sized> {} pub trait Unsize<T: ?Sized> {}
// endregion:unsize // endregion:unsize
// region:unpin
#[lang = "unpin"]
pub auto trait Unpin {}
// endregion:unpin
// region:copy // region:copy
#[lang = "copy"] #[lang = "copy"]
pub trait Copy: Clone {} pub trait Copy: Clone {}
@ -387,9 +393,10 @@ pub mod ptr {
// region:pointee // region:pointee
#[lang = "pointee_trait"] #[lang = "pointee_trait"]
#[rustc_deny_explicit_impl(implement_via_object = false)]
pub trait Pointee { pub trait Pointee {
#[lang = "metadata_type"] #[lang = "metadata_type"]
type Metadata; type Metadata: Copy + Send + Sync + Ord + Hash + Unpin;
} }
// endregion:pointee // endregion:pointee
// region:non_null // region:non_null

View file

@ -57,8 +57,9 @@ export interface TextDocumentEdit {
} }
``` ```
When applying such code action or text edit, the editor should insert snippet, with tab stops and placeholder. When applying such code action or text edit, the editor should insert snippet, with tab stops and placeholders.
At the moment, rust-analyzer guarantees that only a single edit will have `InsertTextFormat.Snippet`. At the moment, rust-analyzer guarantees that only a single `TextDocumentEdit` will have edits which can be `InsertTextFormat.Snippet`.
Any additional `TextDocumentEdit`s will only have edits which are `InsertTextFormat.PlainText`.
### Example ### Example

View file

@ -493,11 +493,16 @@ Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-i
-- --
Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`. Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
-- --
[[rust-analyzer.imports.prefer.no.std]]rust-analyzer.imports.prefer.no.std (default: `false`):: [[rust-analyzer.imports.preferNoStd]]rust-analyzer.imports.preferNoStd (default: `false`)::
+ +
-- --
Prefer to unconditionally use imports of the core and alloc crate, over the std crate. Prefer to unconditionally use imports of the core and alloc crate, over the std crate.
-- --
[[rust-analyzer.imports.preferPrelude]]rust-analyzer.imports.preferPrelude (default: `false`)::
+
--
Whether to prefer import paths containing a `prelude` module.
--
[[rust-analyzer.imports.prefix]]rust-analyzer.imports.prefix (default: `"plain"`):: [[rust-analyzer.imports.prefix]]rust-analyzer.imports.prefix (default: `"plain"`)::
+ +
-- --

View file

@ -946,7 +946,7 @@ Or it is possible to specify vars more granularly:
"rust-analyzer.runnables.extraEnv": [ "rust-analyzer.runnables.extraEnv": [
{ {
// "mask": null, // null mask means that this rule will be applied for all runnables // "mask": null, // null mask means that this rule will be applied for all runnables
env: { "env": {
"APP_ID": "1", "APP_ID": "1",
"APP_DATA": "asdf" "APP_DATA": "asdf"
} }
@ -968,7 +968,7 @@ If needed, you can set different values for different platforms:
"rust-analyzer.runnables.extraEnv": [ "rust-analyzer.runnables.extraEnv": [
{ {
"platform": "win32", // windows only "platform": "win32", // windows only
env: { "env": {
"APP_DATA": "windows specific data" "APP_DATA": "windows specific data"
} }
}, },

View file

@ -156,7 +156,7 @@
}, },
{ {
"command": "rust-analyzer.expandMacro", "command": "rust-analyzer.expandMacro",
"title": "Expand macro recursively", "title": "Expand macro recursively at caret",
"category": "rust-analyzer" "category": "rust-analyzer"
}, },
{ {
@ -1129,11 +1129,16 @@
"default": true, "default": true,
"type": "boolean" "type": "boolean"
}, },
"rust-analyzer.imports.prefer.no.std": { "rust-analyzer.imports.preferNoStd": {
"markdownDescription": "Prefer to unconditionally use imports of the core and alloc crate, over the std crate.", "markdownDescription": "Prefer to unconditionally use imports of the core and alloc crate, over the std crate.",
"default": false, "default": false,
"type": "boolean" "type": "boolean"
}, },
"rust-analyzer.imports.preferPrelude": {
"markdownDescription": "Whether to prefer import paths containing a `prelude` module.",
"default": false,
"type": "boolean"
},
"rust-analyzer.imports.prefix": { "rust-analyzer.imports.prefix": {
"markdownDescription": "The path structure for newly inserted paths to use.", "markdownDescription": "The path structure for newly inserted paths to use.",
"default": "plain", "default": "plain",