diff --git a/Cargo.lock b/Cargo.lock index c7d110eafb..6670e92f51 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -569,6 +569,7 @@ dependencies = [ "expect-test", "hir-def", "hir-expand", + "indexmap", "intern", "itertools", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/Cargo.toml b/Cargo.toml index 7054020086..4ee8064b5e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [workspace] members = ["xtask/", "lib/*", "crates/*"] -exclude = ["crates/proc-macro-srv/proc-macro-test/"] +exclude = ["crates/proc-macro-srv/proc-macro-test/imp"] resolver = "2" [workspace.package] @@ -138,4 +138,4 @@ dashmap = { version = "=5.5.3", features = ["raw-api"] } collapsible_if = "allow" needless_pass_by_value = "allow" nonminimal_bool = "allow" -redundant_pattern_matching = "allow" \ No newline at end of file +redundant_pattern_matching = "allow" diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index a45ec844ab..c728570d98 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -17,7 +17,7 @@ use smallvec::SmallVec; use syntax::{ ast::{ self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName, - SlicePatComponents, + RangeItem, SlicePatComponents, }, AstNode, AstPtr, SyntaxNodePtr, }; @@ -622,7 +622,8 @@ impl ExprCollector<'_> { ast::Expr::IndexExpr(e) => { let base = self.collect_expr_opt(e.base()); let index = self.collect_expr_opt(e.index()); - self.alloc_expr(Expr::Index { base, index }, syntax_ptr) + let is_assignee_expr = self.is_lowering_assignee_expr; + self.alloc_expr(Expr::Index { base, index, is_assignee_expr }, syntax_ptr) } ast::Expr::RangeExpr(e) => { let lhs = e.start().map(|lhs| self.collect_expr(lhs)); @@ -1609,7 +1610,7 @@ impl ExprCollector<'_> { |name| self.alloc_expr_desugared(Expr::Path(Path::from(name))), |name, span| { if let Some(span) = span { - mappings.push((span, name.clone())) + mappings.push((span, name)) } }, ), diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index 6ecf1c20d6..02b19ade44 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -376,7 +376,7 @@ impl Printer<'_> { w!(self, ") "); } } - Expr::Index { base, index } => { + Expr::Index { base, index, is_assignee_expr: _ } => { self.print_expr(*base); w!(self, "["); self.print_expr(*index); diff --git a/crates/hir-def/src/data/adt.rs b/crates/hir-def/src/data/adt.rs index b163112db9..a95b78614e 100644 --- a/crates/hir-def/src/data/adt.rs +++ b/crates/hir-def/src/data/adt.rs @@ -128,7 +128,7 @@ fn parse_repr_tt(tt: &Subtree) -> Option { } else { 0 }; - let pack = Align::from_bytes(pack).unwrap(); + let pack = Align::from_bytes(pack).unwrap_or(Align::ONE); min_pack = Some(if let Some(min_pack) = min_pack { min_pack.min(pack) } else { pack }); ReprFlags::empty() diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs index 591ee77c70..5890e818c4 100644 --- a/crates/hir-def/src/hir.rs +++ b/crates/hir-def/src/hir.rs @@ -265,6 +265,7 @@ pub enum Expr { Index { base: ExprId, index: ExprId, + is_assignee_expr: bool, }, Closure { args: Box<[PatId]>, @@ -432,7 +433,7 @@ impl Expr { f(rhs); } } - Expr::Index { base, index } => { + Expr::Index { base, index, .. } => { f(*base); f(*index); } diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs index aea7229bd6..989bbc7bfb 100644 --- a/crates/hir-def/src/import_map.rs +++ b/crates/hir-def/src/import_map.rs @@ -3,13 +3,13 @@ use std::{fmt, hash::BuildHasherDefault}; use base_db::CrateId; -use fst::{self, raw::IndexedValue, Streamer}; +use fst::{self, raw::IndexedValue, Automaton, Streamer}; use hir_expand::name::Name; use indexmap::IndexMap; use itertools::Itertools; use rustc_hash::{FxHashSet, FxHasher}; use smallvec::SmallVec; -use stdx::format_to; +use stdx::{format_to, TupleExt}; use triomphe::Arc; use crate::{ @@ -20,12 +20,10 @@ use crate::{ AssocItemId, ModuleDefId, ModuleId, TraitId, }; -type FxIndexMap = IndexMap>; - /// Item import details stored in the `ImportMap`. #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] pub struct ImportInfo { - /// A name that can be used to import the item, relative to the crate's root. + /// A name that can be used to import the item, relative to the container. pub name: Name, /// The module containing this item. pub container: ModuleId, @@ -35,22 +33,22 @@ pub struct ImportInfo { pub is_unstable: bool, } -type ImportMapIndex = FxIndexMap, IsTraitAssocItem)>; - /// A map from publicly exported items to its name. /// -/// Reexports of items are taken into account, ie. if something is exported under multiple -/// names, the one with the shortest import path will be used. +/// Reexports of items are taken into account. #[derive(Default)] pub struct ImportMap { - map: ImportMapIndex, - /// List of keys stored in `map`, sorted lexicographically by their `ModPath`. Indexed by the - /// values returned by running `fst`. + /// Maps from `ItemInNs` to information of imports that bring the item into scope. + item_to_info_map: ImportMapIndex, + /// List of keys stored in [`Self::item_to_info_map`], sorted lexicographically by their + /// [`Name`]. Indexed by the values returned by running `fst`. /// - /// Since a name can refer to multiple items due to namespacing, we store all items with the - /// same name right after each other. This allows us to find all items after the FST gives us - /// the index of the first one. - importables: Vec, + /// Since a name can refer to multiple items due to namespacing and import aliases, we store all + /// items with the same name right after each other. This allows us to find all items after the + /// fst gives us the index of the first one. + /// + /// The [`u32`] is the index into the smallvec in the value of [`Self::item_to_info_map`]. + importables: Vec<(ItemInNs, u32)>, fst: fst::Map>, } @@ -60,10 +58,13 @@ enum IsTraitAssocItem { No, } +type FxIndexMap = IndexMap>; +type ImportMapIndex = FxIndexMap, IsTraitAssocItem)>; + impl ImportMap { pub fn dump(&self, db: &dyn DefDatabase) -> String { let mut out = String::new(); - for (k, v) in self.map.iter() { + for (k, v) in self.item_to_info_map.iter() { format_to!(out, "{:?} ({:?}) -> ", k, v.1); for v in &v.0 { format_to!(out, "{}:{:?}, ", v.name.display(db.upcast()), v.container); @@ -76,177 +77,191 @@ impl ImportMap { pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc { let _p = profile::span("import_map_query"); - let map = collect_import_map(db, krate); + let map = Self::collect_import_map(db, krate); let mut importables: Vec<_> = map .iter() - // We've only collected items, whose name cannot be tuple field. - .flat_map(|(&item, (info, is_assoc))| { - info.iter().map(move |info| { - (item, *is_assoc, info.name.as_str().unwrap().to_ascii_lowercase()) - }) + // We've only collected items, whose name cannot be tuple field so unwrapping is fine. + .flat_map(|(&item, (info, _))| { + info.iter() + .enumerate() + .map(move |(idx, info)| (item, info.name.to_smol_str(), idx as u32)) }) .collect(); - importables.sort_by(|(_, l_is_assoc, lhs_name), (_, r_is_assoc, rhs_name)| { - lhs_name.cmp(rhs_name).then_with(|| l_is_assoc.cmp(r_is_assoc)) + importables.sort_by(|(_, l_info, _), (_, r_info, _)| { + let lhs_chars = l_info.chars().map(|c| c.to_ascii_lowercase()); + let rhs_chars = r_info.chars().map(|c| c.to_ascii_lowercase()); + lhs_chars.cmp(rhs_chars) }); importables.dedup(); // Build the FST, taking care not to insert duplicate values. let mut builder = fst::MapBuilder::memory(); - let iter = importables + let mut iter = importables .iter() .enumerate() - .dedup_by(|(_, (_, _, lhs)), (_, (_, _, rhs))| lhs == rhs); - for (start_idx, (_, _, name)) in iter { - let _ = builder.insert(name, start_idx as u64); + .dedup_by(|&(_, (_, lhs, _)), &(_, (_, rhs, _))| lhs.eq_ignore_ascii_case(rhs)); + + let mut insert = |name: &str, start, end| { + builder.insert(name.to_ascii_lowercase(), ((start as u64) << 32) | end as u64).unwrap() + }; + + if let Some((mut last, (_, name, _))) = iter.next() { + debug_assert_eq!(last, 0); + let mut last_name = name; + for (next, (_, next_name, _)) in iter { + insert(last_name, last, next); + last = next; + last_name = next_name; + } + insert(last_name, last, importables.len()); } - Arc::new(ImportMap { - map, - fst: builder.into_map(), - importables: importables.into_iter().map(|(item, _, _)| item).collect(), - }) + let importables = importables.into_iter().map(|(item, _, idx)| (item, idx)).collect(); + Arc::new(ImportMap { item_to_info_map: map, fst: builder.into_map(), importables }) } pub fn import_info_for(&self, item: ItemInNs) -> Option<&[ImportInfo]> { - self.map.get(&item).map(|(info, _)| &**info) + self.item_to_info_map.get(&item).map(|(info, _)| &**info) } -} -fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex { - let _p = profile::span("collect_import_map"); + fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex { + let _p = profile::span("collect_import_map"); - let def_map = db.crate_def_map(krate); - let mut map = FxIndexMap::default(); + let def_map = db.crate_def_map(krate); + let mut map = FxIndexMap::default(); - // We look only into modules that are public(ly reexported), starting with the crate root. - let root = def_map.module_id(DefMap::ROOT); - let mut worklist = vec![root]; - let mut visited = FxHashSet::default(); + // We look only into modules that are public(ly reexported), starting with the crate root. + let root = def_map.module_id(DefMap::ROOT); + let mut worklist = vec![root]; + let mut visited = FxHashSet::default(); - while let Some(module) = worklist.pop() { - if !visited.insert(module) { - continue; - } - let ext_def_map; - let mod_data = if module.krate == krate { - &def_map[module.local_id] - } else { - // The crate might reexport a module defined in another crate. - ext_def_map = module.def_map(db); - &ext_def_map[module.local_id] - }; - - let visible_items = mod_data.scope.entries().filter_map(|(name, per_ns)| { - let per_ns = per_ns.filter_visibility(|vis| vis == Visibility::Public); - if per_ns.is_none() { None } else { Some((name, per_ns)) } - }); - - for (name, per_ns) in visible_items { - for (item, import) in per_ns.iter_items() { - let attr_id = if let Some(import) = import { - match import { - ImportOrExternCrate::ExternCrate(id) => Some(id.into()), - ImportOrExternCrate::Import(id) => Some(id.import.into()), - } - } else { - match item { - ItemInNs::Types(id) | ItemInNs::Values(id) => id.try_into().ok(), - ItemInNs::Macros(id) => Some(id.into()), - } - }; - let (is_doc_hidden, is_unstable) = attr_id.map_or((false, false), |attr_id| { - let attrs = db.attrs(attr_id); - (attrs.has_doc_hidden(), attrs.is_unstable()) - }); - - let import_info = ImportInfo { - name: name.clone(), - container: module, - is_doc_hidden, - is_unstable, - }; - - if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() { - collect_trait_assoc_items( - db, - &mut map, - tr, - matches!(item, ItemInNs::Types(_)), - &import_info, - ); - } - - let (infos, _) = - map.entry(item).or_insert_with(|| (SmallVec::new(), IsTraitAssocItem::No)); - infos.reserve_exact(1); - infos.push(import_info); - - // If we've just added a module, descend into it. - if let Some(ModuleDefId::ModuleId(mod_id)) = item.as_module_def_id() { - worklist.push(mod_id); - } - } - } - } - map.shrink_to_fit(); - map -} - -fn collect_trait_assoc_items( - db: &dyn DefDatabase, - map: &mut ImportMapIndex, - tr: TraitId, - is_type_in_ns: bool, - trait_import_info: &ImportInfo, -) { - let _p = profile::span("collect_trait_assoc_items"); - for &(ref assoc_item_name, item) in &db.trait_data(tr).items { - let module_def_id = match item { - AssocItemId::FunctionId(f) => ModuleDefId::from(f), - AssocItemId::ConstId(c) => ModuleDefId::from(c), - // cannot use associated type aliases directly: need a `::TypeAlias` - // qualifier, ergo no need to store it for imports in import_map - AssocItemId::TypeAliasId(_) => { - cov_mark::hit!(type_aliases_ignored); + while let Some(module) = worklist.pop() { + if !visited.insert(module) { continue; } - }; - let assoc_item = if is_type_in_ns { - ItemInNs::Types(module_def_id) - } else { - ItemInNs::Values(module_def_id) - }; + let ext_def_map; + let mod_data = if module.krate == krate { + &def_map[module.local_id] + } else { + // The crate might reexport a module defined in another crate. + ext_def_map = module.def_map(db); + &ext_def_map[module.local_id] + }; - let attrs = &db.attrs(item.into()); - let assoc_item_info = ImportInfo { - container: trait_import_info.container, - name: assoc_item_name.clone(), - is_doc_hidden: attrs.has_doc_hidden(), - is_unstable: attrs.is_unstable(), - }; + let visible_items = mod_data.scope.entries().filter_map(|(name, per_ns)| { + let per_ns = per_ns.filter_visibility(|vis| vis == Visibility::Public); + if per_ns.is_none() { + None + } else { + Some((name, per_ns)) + } + }); - let (infos, _) = - map.entry(assoc_item).or_insert_with(|| (SmallVec::new(), IsTraitAssocItem::Yes)); - infos.reserve_exact(1); - infos.push(assoc_item_info); + for (name, per_ns) in visible_items { + for (item, import) in per_ns.iter_items() { + let attr_id = if let Some(import) = import { + match import { + ImportOrExternCrate::ExternCrate(id) => Some(id.into()), + ImportOrExternCrate::Import(id) => Some(id.import.into()), + } + } else { + match item { + ItemInNs::Types(id) | ItemInNs::Values(id) => id.try_into().ok(), + ItemInNs::Macros(id) => Some(id.into()), + } + }; + let (is_doc_hidden, is_unstable) = attr_id.map_or((false, false), |attr_id| { + let attrs = db.attrs(attr_id); + (attrs.has_doc_hidden(), attrs.is_unstable()) + }); + + let import_info = ImportInfo { + name: name.clone(), + container: module, + is_doc_hidden, + is_unstable, + }; + + if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() { + Self::collect_trait_assoc_items( + db, + &mut map, + tr, + matches!(item, ItemInNs::Types(_)), + &import_info, + ); + } + + let (infos, _) = + map.entry(item).or_insert_with(|| (SmallVec::new(), IsTraitAssocItem::No)); + infos.reserve_exact(1); + infos.push(import_info); + + // If we've just added a module, descend into it. + if let Some(ModuleDefId::ModuleId(mod_id)) = item.as_module_def_id() { + worklist.push(mod_id); + } + } + } + } + map.shrink_to_fit(); + map } -} -impl PartialEq for ImportMap { - fn eq(&self, other: &Self) -> bool { - // `fst` and `importables` are built from `map`, so we don't need to compare them. - self.map == other.map + fn collect_trait_assoc_items( + db: &dyn DefDatabase, + map: &mut ImportMapIndex, + tr: TraitId, + is_type_in_ns: bool, + trait_import_info: &ImportInfo, + ) { + let _p = profile::span("collect_trait_assoc_items"); + for &(ref assoc_item_name, item) in &db.trait_data(tr).items { + let module_def_id = match item { + AssocItemId::FunctionId(f) => ModuleDefId::from(f), + AssocItemId::ConstId(c) => ModuleDefId::from(c), + // cannot use associated type aliases directly: need a `::TypeAlias` + // qualifier, ergo no need to store it for imports in import_map + AssocItemId::TypeAliasId(_) => { + cov_mark::hit!(type_aliases_ignored); + continue; + } + }; + let assoc_item = if is_type_in_ns { + ItemInNs::Types(module_def_id) + } else { + ItemInNs::Values(module_def_id) + }; + + let attrs = &db.attrs(item.into()); + let assoc_item_info = ImportInfo { + container: trait_import_info.container, + name: assoc_item_name.clone(), + is_doc_hidden: attrs.has_doc_hidden(), + is_unstable: attrs.is_unstable(), + }; + + let (infos, _) = + map.entry(assoc_item).or_insert_with(|| (SmallVec::new(), IsTraitAssocItem::Yes)); + infos.reserve_exact(1); + infos.push(assoc_item_info); + } } } impl Eq for ImportMap {} +impl PartialEq for ImportMap { + fn eq(&self, other: &Self) -> bool { + // `fst` and `importables` are built from `map`, so we don't need to compare them. + self.item_to_info_map == other.item_to_info_map + } +} impl fmt::Debug for ImportMap { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut importable_names: Vec<_> = self - .map + .item_to_info_map .iter() .map(|(item, (infos, _))| { let l = infos.len(); @@ -264,8 +279,8 @@ impl fmt::Debug for ImportMap { } /// A way to match import map contents against the search query. -#[derive(Copy, Clone, Debug)] -enum SearchMode { +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum SearchMode { /// Import map entry should strictly match the query string. Exact, /// Import map entry should contain all letters from the query string, @@ -275,6 +290,42 @@ enum SearchMode { Prefix, } +impl SearchMode { + pub fn check(self, query: &str, case_sensitive: bool, candidate: &str) -> bool { + match self { + SearchMode::Exact if case_sensitive => candidate == query, + SearchMode::Exact => candidate.eq_ignore_ascii_case(&query), + SearchMode::Prefix => { + query.len() <= candidate.len() && { + let prefix = &candidate[..query.len() as usize]; + if case_sensitive { + prefix == query + } else { + prefix.eq_ignore_ascii_case(&query) + } + } + } + SearchMode::Fuzzy => { + let mut name = candidate; + query.chars().all(|query_char| { + let m = if case_sensitive { + name.match_indices(query_char).next() + } else { + name.match_indices([query_char, query_char.to_ascii_uppercase()]).next() + }; + match m { + Some((index, _)) => { + name = &name[index + 1..]; + true + } + None => false, + } + }) + } + } + } +} + /// Three possible ways to search for the name in associated and/or other items. #[derive(Debug, Clone, Copy)] pub enum AssocSearchMode { @@ -293,7 +344,6 @@ pub struct Query { search_mode: SearchMode, assoc_mode: AssocSearchMode, case_sensitive: bool, - limit: usize, } impl Query { @@ -305,7 +355,6 @@ impl Query { search_mode: SearchMode::Exact, assoc_mode: AssocSearchMode::Include, case_sensitive: false, - limit: usize::MAX, } } @@ -327,11 +376,6 @@ impl Query { Self { assoc_mode, ..self } } - /// Limits the returned number of items to `limit`. - pub fn limit(self, limit: usize) -> Self { - Self { limit, ..self } - } - /// Respect casing of the query string when matching. pub fn case_sensitive(self) -> Self { Self { case_sensitive: true, ..self } @@ -344,39 +388,6 @@ impl Query { _ => true, } } - - /// Checks whether the import map entry matches the query. - fn import_matches(&self, import: &ImportInfo, enforce_lowercase: bool) -> bool { - let _p = profile::span("import_map::Query::import_matches"); - - // FIXME: Can we get rid of the alloc here? - let input = import.name.to_smol_str(); - let mut _s_slot; - let case_insensitive = enforce_lowercase || !self.case_sensitive; - let input = if case_insensitive { - _s_slot = String::from(input); - _s_slot.make_ascii_lowercase(); - &*_s_slot - } else { - &*input - }; - - let query_string = if case_insensitive { &self.lowercased } else { &self.query }; - - match self.search_mode { - SearchMode::Exact => input == *query_string, - SearchMode::Prefix => input.starts_with(query_string), - SearchMode::Fuzzy => { - let mut input_chars = input.chars(); - for query_char in query_string.chars() { - if !input_chars.any(|it| it == query_char) { - return false; - } - } - true - } - } - } } /// Searches dependencies of `krate` for an importable name matching `query`. @@ -394,73 +405,66 @@ pub fn search_dependencies( let import_maps: Vec<_> = graph[krate].dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect(); - let automaton = fst::automaton::Subsequence::new(&query.lowercased); - let mut op = fst::map::OpBuilder::new(); - for map in &import_maps { - op = op.add(map.fst.search(&automaton)); + + match query.search_mode { + SearchMode::Exact => { + let automaton = fst::automaton::Str::new(&query.lowercased); + + for map in &import_maps { + op = op.add(map.fst.search(&automaton)); + } + search_maps(&import_maps, op.union(), query) + } + SearchMode::Fuzzy => { + let automaton = fst::automaton::Subsequence::new(&query.lowercased); + + for map in &import_maps { + op = op.add(map.fst.search(&automaton)); + } + search_maps(&import_maps, op.union(), query) + } + SearchMode::Prefix => { + let automaton = fst::automaton::Str::new(&query.lowercased).starts_with(); + + for map in &import_maps { + op = op.add(map.fst.search(&automaton)); + } + search_maps(&import_maps, op.union(), query) + } } +} - let mut stream = op.union(); - +fn search_maps( + import_maps: &[Arc], + mut stream: fst::map::Union<'_>, + query: &Query, +) -> FxHashSet { let mut res = FxHashSet::default(); - let mut common_importable_data_scratch = vec![]; - // FIXME: Improve this, its rather unreadable and does duplicate amount of work while let Some((_, indexed_values)) = stream.next() { - for &IndexedValue { index, value } in indexed_values { - let import_map = &import_maps[index]; - let importables @ [importable, ..] = &import_map.importables[value as usize..] else { - continue; - }; - let &(ref importable_data, is_trait_assoc_item) = &import_map.map[importable]; - if !query.matches_assoc_mode(is_trait_assoc_item) { - continue; - } + for &IndexedValue { index: import_map_idx, value } in indexed_values { + let end = (value & 0xFFFF_FFFF) as usize; + let start = (value >> 32) as usize; + let ImportMap { item_to_info_map, importables, .. } = &*import_maps[import_map_idx]; + let importables = &importables[start as usize..end]; - // Fetch all the known names of this importable item (to handle import aliases/renames) - common_importable_data_scratch.extend( - importable_data - .iter() - .filter(|&info| query.import_matches(info, true)) - // Name shared by the importable items in this group. - .map(|info| info.name.to_smol_str()), - ); - if common_importable_data_scratch.is_empty() { - continue; - } - common_importable_data_scratch.sort(); - common_importable_data_scratch.dedup(); - - let iter = - common_importable_data_scratch.drain(..).flat_map(|common_importable_name| { - // Add the items from this name group. Those are all subsequent items in - // `importables` whose name match `common_importable_name`. - - importables - .iter() - .copied() - .take_while(move |item| { - let &(ref import_infos, assoc_mode) = &import_map.map[item]; - query.matches_assoc_mode(assoc_mode) - && import_infos.iter().any(|info| { - info.name - .to_smol_str() - .eq_ignore_ascii_case(&common_importable_name) - }) - }) - .filter(move |item| { - !query.case_sensitive || { - // we've already checked the common importables name case-insensitively - let &(ref import_infos, _) = &import_map.map[item]; - import_infos.iter().any(|info| query.import_matches(info, false)) - } - }) + let iter = importables + .iter() + .copied() + .filter_map(|(item, info_idx)| { + let (import_infos, assoc_mode) = &item_to_info_map[&item]; + query + .matches_assoc_mode(*assoc_mode) + .then(|| (item, &import_infos[info_idx as usize])) + }) + .filter(|&(_, info)| { + query.search_mode.check( + &query.query, + query.case_sensitive, + &info.name.to_smol_str(), + ) }); - res.extend(iter); - - if res.len() >= query.limit { - return res; - } + res.extend(iter.map(TupleExt::head)); } } @@ -480,7 +484,7 @@ mod tests { impl ImportMap { fn fmt_for_test(&self, db: &dyn DefDatabase) -> String { let mut importable_paths: Vec<_> = self - .map + .item_to_info_map .iter() .flat_map(|(item, (info, _))| info.iter().map(move |info| (item, info))) .map(|(item, info)| { @@ -907,28 +911,28 @@ mod tests { #[test] fn search_mode() { let ra_fixture = r#" - //- /main.rs crate:main deps:dep - //- /dep.rs crate:dep deps:tdep - use tdep::fmt as fmt_dep; - pub mod fmt { - pub trait Display { - fn fmt(); - } - } - #[macro_export] - macro_rules! Fmt { - () => {}; - } - pub struct Fmt; +//- /main.rs crate:main deps:dep +//- /dep.rs crate:dep deps:tdep +use tdep::fmt as fmt_dep; +pub mod fmt { + pub trait Display { + fn fmt(); + } +} +#[macro_export] +macro_rules! Fmt { + () => {}; +} +pub struct Fmt; - pub fn format() {} - pub fn no() {} +pub fn format() {} +pub fn no() {} - //- /tdep.rs crate:tdep - pub mod fmt { - pub struct NotImportableFromMain; - } - "#; +//- /tdep.rs crate:tdep +pub mod fmt { + pub struct NotImportableFromMain; +} +"#; check_search( ra_fixture, @@ -996,19 +1000,6 @@ mod tests { dep::fmt::Display::fmt (a) "#]], ); - - check_search( - ra_fixture, - "main", - Query::new("fmt".to_string()), - expect![[r#" - dep::Fmt (m) - dep::Fmt (t) - dep::Fmt (v) - dep::fmt (t) - dep::fmt::Display::fmt (a) - "#]], - ); } #[test] @@ -1043,32 +1034,4 @@ mod tests { "#]], ); } - - #[test] - fn search_limit() { - check_search( - r#" - //- /main.rs crate:main deps:dep - //- /dep.rs crate:dep - pub mod fmt { - pub trait Display { - fn fmt(); - } - } - #[macro_export] - macro_rules! Fmt { - () => {}; - } - pub struct Fmt; - - pub fn format() {} - pub fn no() {} - "#, - "main", - Query::new("".to_string()).fuzzy().limit(1), - expect![[r#" - dep::fmt::Display (t) - "#]], - ); - } } diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 22ba3aab4e..250d7b677b 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -307,6 +307,15 @@ pub struct FieldId { pub type LocalFieldId = Idx; +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct TupleId(pub u32); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct TupleFieldId { + pub tuple: TupleId, + pub index: u32, +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct ConstId(salsa::InternId); type ConstLoc = AssocItemLoc; diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 3763bfcbcf..a18ac4b28c 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -1397,7 +1397,7 @@ impl DefCollector<'_> { always!(krate == loc.def.krate); DefDiagnostic::unresolved_proc_macro(module_id, loc.kind.clone(), loc.def.krate) } - _ => DefDiagnostic::macro_error(module_id, loc.kind.clone(), err.to_string()), + _ => DefDiagnostic::macro_error(module_id, loc.kind, err.to_string()), }; self.def_map.diagnostics.push(diag); diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index f7a26e436d..ed04582cb0 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -390,7 +390,13 @@ fn parse_macro_expansion( let expand_to = loc.expand_to(); let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc); - let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to); + let (parse, rev_token_map) = token_tree_to_syntax_node( + match &tt { + CowArc::Arc(it) => it, + CowArc::Owned(it) => it, + }, + expand_to, + ); ExpandResult { value: (parse, Arc::new(rev_token_map)), err } } @@ -669,15 +675,20 @@ fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander { } } +enum CowArc { + Arc(Arc), + Owned(T), +} + fn macro_expand( db: &dyn ExpandDatabase, macro_call_id: MacroCallId, loc: MacroCallLoc, -) -> ExpandResult> { +) -> ExpandResult> { let _p = profile::span("macro_expand"); let ExpandResult { value: tt, mut err } = match loc.def.kind { - MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id), + MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id).map(CowArc::Arc), MacroDefKind::BuiltInDerive(expander, ..) => { let (root, map) = parse_with_map(db, loc.kind.file_id()); let root = root.syntax_node(); @@ -692,7 +703,7 @@ fn macro_expand( let ValueResult { value, err } = db.macro_arg(macro_call_id); let Some((macro_arg, undo_info)) = value else { return ExpandResult { - value: Arc::new(tt::Subtree { + value: CowArc::Owned(tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(loc.call_site), token_trees: Vec::new(), }), @@ -718,7 +729,7 @@ fn macro_expand( // As such we just return the input subtree here. MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => { return ExpandResult { - value: macro_arg.clone(), + value: CowArc::Arc(macro_arg.clone()), err: err.map(|err| { let mut buf = String::new(); for err in &**err { @@ -752,12 +763,17 @@ fn macro_expand( // Skip checking token tree limit for include! macro call if !loc.def.is_include() { // Set a hard limit for the expanded tt - if let Err(value) = check_tt_count(&tt, loc.call_site) { - return value; + if let Err(value) = check_tt_count(&tt) { + return value.map(|()| { + CowArc::Owned(tt::Subtree { + delimiter: tt::Delimiter::invisible_spanned(loc.call_site), + token_trees: vec![], + }) + }); } } - ExpandResult { value: Arc::new(tt), err } + ExpandResult { value: CowArc::Owned(tt), err } } fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult> { @@ -796,8 +812,13 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult Result<(), ExpandResult>> { +fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> { let count = tt.count(); if TOKEN_LIMIT.check(count).is_err() { Err(ExpandResult { - value: Arc::new(tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(call_site), - token_trees: vec![], - }), + value: (), err: Some(ExpandError::other(format!( "macro invocation exceeds token limit: produced {} tokens, limit is {}", count, diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index b5197d4c25..6a122e0859 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -220,6 +220,8 @@ pub enum MacroCallKind { }, Attr { ast_id: AstId, + // FIXME: This is being interned, subtrees can very quickly differ just slightly causing + // leakage problems here attr_args: Option>, /// Syntactical index of the invoking `#[attribute]`. /// diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index 1873e7bfe6..c7807bcf9a 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -32,6 +32,7 @@ once_cell = "1.17.0" triomphe.workspace = true nohash-hasher.workspace = true typed-arena = "2.0.1" +indexmap.workspace = true rustc-dependencies.workspace = true @@ -60,4 +61,4 @@ test-fixture.workspace = true in-rust-tree = ["rustc-dependencies/in-rust-tree"] [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs index 9792d945eb..5528ad3ab4 100644 --- a/crates/hir-ty/src/consteval.rs +++ b/crates/hir-ty/src/consteval.rs @@ -142,15 +142,15 @@ pub fn intern_const_ref( LiteralConstRef::Int(i) => { // FIXME: We should handle failure of layout better. let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16); - ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default()) + ConstScalar::Bytes(i.to_le_bytes()[0..size].into(), MemoryMap::default()) } LiteralConstRef::UInt(i) => { let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16); - ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default()) + ConstScalar::Bytes(i.to_le_bytes()[0..size].into(), MemoryMap::default()) } - LiteralConstRef::Bool(b) => ConstScalar::Bytes(vec![*b as u8], MemoryMap::default()), + LiteralConstRef::Bool(b) => ConstScalar::Bytes(Box::new([*b as u8]), MemoryMap::default()), LiteralConstRef::Char(c) => { - ConstScalar::Bytes((*c as u32).to_le_bytes().to_vec(), MemoryMap::default()) + ConstScalar::Bytes((*c as u32).to_le_bytes().into(), MemoryMap::default()) } LiteralConstRef::Unknown => ConstScalar::Unknown, }; diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index d81926f7c9..23d9515422 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -515,7 +515,7 @@ fn render_const_scalar( TyKind::Dyn(_) => { let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap()); let ty_id = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap()); - let Ok(t) = memory_map.vtable.ty(ty_id) else { + let Ok(t) = memory_map.vtable_ty(ty_id) else { return f.write_str(""); }; let Ok(layout) = f.db.layout_of_ty(t.clone(), trait_env) else { @@ -609,7 +609,7 @@ fn render_const_scalar( } hir_def::AdtId::EnumId(e) => { let Some((var_id, var_layout)) = - detect_variant_from_bytes(&layout, f.db, trait_env.clone(), b, e) + detect_variant_from_bytes(&layout, f.db, trait_env, b, e) else { return f.write_str(""); }; diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 8053300ad2..a78e3e7dc2 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -41,9 +41,10 @@ use hir_def::{ resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs}, type_ref::TypeRef, AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, ItemContainerId, Lookup, - TraitId, TypeAliasId, VariantId, + TraitId, TupleFieldId, TupleId, TypeAliasId, VariantId, }; use hir_expand::name::{name, Name}; +use indexmap::IndexSet; use la_arena::{ArenaMap, Entry}; use rustc_hash::{FxHashMap, FxHashSet}; use stdx::{always, never}; @@ -403,11 +404,15 @@ pub struct InferenceResult { /// For each method call expr, records the function it resolves to. method_resolutions: FxHashMap, /// For each field access expr, records the field it resolves to. - field_resolutions: FxHashMap, + field_resolutions: FxHashMap>, /// For each struct literal or pattern, records the variant it resolves to. variant_resolutions: FxHashMap, /// For each associated item record what it resolves to assoc_resolutions: FxHashMap, + /// Whenever a tuple field expression access a tuple field, we allocate a tuple id in + /// [`InferenceContext`] and store the tuples substitution there. This map is the reverse of + /// that which allows us to resolve a [`TupleFieldId`]s type. + pub tuple_field_access_types: FxHashMap, pub diagnostics: Vec, pub type_of_expr: ArenaMap, /// For each pattern record the type it resolves to. @@ -447,7 +452,7 @@ impl InferenceResult { pub fn method_resolution(&self, expr: ExprId) -> Option<(FunctionId, Substitution)> { self.method_resolutions.get(&expr).cloned() } - pub fn field_resolution(&self, expr: ExprId) -> Option { + pub fn field_resolution(&self, expr: ExprId) -> Option> { self.field_resolutions.get(&expr).copied() } pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option { @@ -517,6 +522,8 @@ pub(crate) struct InferenceContext<'a> { /// The traits in scope, disregarding block modules. This is used for caching purposes. traits_in_scope: FxHashSet, pub(crate) result: InferenceResult, + tuple_field_accesses_rev: + IndexSet>, /// The return type of the function being inferred, the closure or async block if we're /// currently within one. /// @@ -598,6 +605,7 @@ impl<'a> InferenceContext<'a> { InferenceContext { result: InferenceResult::default(), table: unify::InferenceTable::new(db, trait_env), + tuple_field_accesses_rev: Default::default(), return_ty: TyKind::Error.intern(Interner), // set in collect_* calls resume_yield_tys: None, return_coercion: None, @@ -621,7 +629,13 @@ impl<'a> InferenceContext<'a> { // used this function for another workaround, mention it here. If you really need this function and believe that // there is no problem in it being `pub(crate)`, remove this comment. pub(crate) fn resolve_all(self) -> InferenceResult { - let InferenceContext { mut table, mut result, deferred_cast_checks, .. } = self; + let InferenceContext { + mut table, + mut result, + deferred_cast_checks, + tuple_field_accesses_rev, + .. + } = self; // Destructure every single field so whenever new fields are added to `InferenceResult` we // don't forget to handle them here. let InferenceResult { @@ -645,6 +659,7 @@ impl<'a> InferenceContext<'a> { // to resolve them here. closure_info: _, mutated_bindings_in_closure: _, + tuple_field_access_types: _, } = &mut result; table.fallback_if_possible(); @@ -720,6 +735,11 @@ impl<'a> InferenceContext<'a> { for adjustment in pat_adjustments.values_mut().flatten() { *adjustment = table.resolve_completely(adjustment.clone()); } + result.tuple_field_access_types = tuple_field_accesses_rev + .into_iter() + .enumerate() + .map(|(idx, subst)| (TupleId(idx as u32), table.resolve_completely(subst))) + .collect(); result } diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs index af74df1032..118b9c0149 100644 --- a/crates/hir-ty/src/infer/closure.rs +++ b/crates/hir-ty/src/infer/closure.rs @@ -1,18 +1,19 @@ //! Inference of closure parameter types based on the closure's expected type. -use std::{cmp, collections::HashMap, convert::Infallible, mem}; +use std::{cmp, convert::Infallible, mem}; use chalk_ir::{ cast::Cast, fold::{FallibleTypeFolder, TypeFoldable}, AliasEq, AliasTy, BoundVar, DebruijnIndex, FnSubst, Mutability, TyKind, WhereClause, }; +use either::Either; use hir_def::{ data::adt::VariantData, hir::{Array, BinaryOp, BindingId, CaptureBy, Expr, ExprId, Pat, PatId, Statement, UnaryOp}, lang_item::LangItem, resolver::{resolver_for_expr, ResolveValueResult, ValueNs}, - DefWithBodyId, FieldId, HasModule, VariantId, + DefWithBodyId, FieldId, HasModule, TupleFieldId, TupleId, VariantId, }; use hir_expand::name; use rustc_hash::FxHashMap; @@ -129,7 +130,7 @@ impl HirPlace { ctx.owner.module(ctx.db.upcast()).krate(), ); } - ty.clone() + ty } fn capture_kind_of_truncated_place( @@ -186,7 +187,7 @@ impl CapturedItem { result = format!("*{result}"); field_need_paren = true; } - ProjectionElem::Field(f) => { + ProjectionElem::Field(Either::Left(f)) => { if field_need_paren { result = format!("({result})"); } @@ -207,7 +208,15 @@ impl CapturedItem { result = format!("{result}.{field}"); field_need_paren = false; } - &ProjectionElem::TupleOrClosureField(field) => { + ProjectionElem::Field(Either::Right(f)) => { + let field = f.index; + if field_need_paren { + result = format!("({result})"); + } + result = format!("{result}.{field}"); + field_need_paren = false; + } + &ProjectionElem::ClosureField(field) => { if field_need_paren { result = format!("({result})"); } @@ -236,7 +245,7 @@ pub(crate) struct CapturedItemWithoutTy { impl CapturedItemWithoutTy { fn with_ty(self, ctx: &mut InferenceContext<'_>) -> CapturedItem { - let ty = self.place.ty(ctx).clone(); + let ty = self.place.ty(ctx); let ty = match &self.kind { CaptureKind::ByValue => ty, CaptureKind::ByRef(bk) => { @@ -329,15 +338,10 @@ impl InferenceContext<'_> { } } } - Expr::Field { expr, name } => { + Expr::Field { expr, name: _ } => { let mut place = self.place_of_expr(*expr)?; - if let TyKind::Tuple(..) = self.expr_ty(*expr).kind(Interner) { - let index = name.as_tuple_index()?; - place.projections.push(ProjectionElem::TupleOrClosureField(index)) - } else { - let field = self.result.field_resolution(tgt_expr)?; - place.projections.push(ProjectionElem::Field(field)); - } + let field = self.result.field_resolution(tgt_expr)?; + place.projections.push(ProjectionElem::Field(field)); return Some(place); } Expr::UnaryOp { expr, op: UnaryOp::Deref } => { @@ -392,7 +396,7 @@ impl InferenceContext<'_> { fn consume_place(&mut self, place: HirPlace, span: MirSpan) { if self.is_upvar(&place) { - let ty = place.ty(self).clone(); + let ty = place.ty(self); let kind = if self.is_ty_copy(ty) { CaptureKind::ByRef(BorrowKind::Shared) } else { @@ -598,7 +602,7 @@ impl InferenceContext<'_> { self.consume_expr(expr); } } - Expr::Index { base, index } => { + Expr::Index { base, index, is_assignee_expr: _ } => { self.select_from_expr(*base); self.consume_expr(*index); } @@ -774,7 +778,7 @@ impl InferenceContext<'_> { fn minimize_captures(&mut self) { self.current_captures.sort_by_key(|it| it.place.projections.len()); - let mut hash_map = HashMap::::new(); + let mut hash_map = FxHashMap::::default(); let result = mem::take(&mut self.current_captures); for item in result { let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] }; @@ -825,7 +829,10 @@ impl InferenceContext<'_> { let it = al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev())); for (arg, i) in it { let mut p = place.clone(); - p.projections.push(ProjectionElem::TupleOrClosureField(i)); + p.projections.push(ProjectionElem::Field(Either::Right(TupleFieldId { + tuple: TupleId(!0), // dummy this, as its unused anyways + index: i as u32, + }))); self.consume_with_pat(p, *arg); } } @@ -850,10 +857,10 @@ impl InferenceContext<'_> { continue; }; let mut p = place.clone(); - p.projections.push(ProjectionElem::Field(FieldId { + p.projections.push(ProjectionElem::Field(Either::Left(FieldId { parent: variant.into(), local_id, - })); + }))); self.consume_with_pat(p, arg); } } @@ -894,10 +901,10 @@ impl InferenceContext<'_> { al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev())); for (arg, (i, _)) in it { let mut p = place.clone(); - p.projections.push(ProjectionElem::Field(FieldId { + p.projections.push(ProjectionElem::Field(Either::Left(FieldId { parent: variant.into(), local_id: i, - })); + }))); self.consume_with_pat(p, *arg); } } diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 84954ca7e9..db631c8517 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -6,6 +6,7 @@ use std::{ }; use chalk_ir::{cast::Cast, fold::Shift, DebruijnIndex, Mutability, TyVariableKind}; +use either::Either; use hir_def::{ generics::TypeOrConstParamData, hir::{ @@ -13,7 +14,7 @@ use hir_def::{ }, lang_item::{LangItem, LangItemTarget}, path::{GenericArg, GenericArgs}, - BlockId, ConstParamId, FieldId, ItemContainerId, Lookup, + BlockId, ConstParamId, FieldId, ItemContainerId, Lookup, TupleFieldId, TupleId, }; use hir_expand::name::{name, Name}; use stdx::always; @@ -744,7 +745,7 @@ impl InferenceContext<'_> { (RangeOp::Inclusive, _, None) => self.err_ty(), } } - Expr::Index { base, index } => { + Expr::Index { base, index, is_assignee_expr } => { let base_ty = self.infer_expr_inner(*base, &Expectation::none()); let index_ty = self.infer_expr(*index, &Expectation::none()); @@ -772,11 +773,24 @@ impl InferenceContext<'_> { .build(); self.write_method_resolution(tgt_expr, func, substs); } - self.resolve_associated_type_with_params( - self_ty, - self.resolve_ops_index_output(), - &[index_ty.cast(Interner)], - ) + let assoc = self.resolve_ops_index_output(); + let res = self.resolve_associated_type_with_params( + self_ty.clone(), + assoc, + &[index_ty.clone().cast(Interner)], + ); + + if *is_assignee_expr { + if let Some(index_trait) = self.resolve_lang_trait(LangItem::IndexMut) { + let trait_ref = TyBuilder::trait_ref(self.db, index_trait) + .push(self_ty) + .fill(|_| index_ty.clone().cast(Interner)) + .build(); + self.push_obligation(trait_ref.cast(Interner)); + } + } + + res } else { self.err_ty() } @@ -964,7 +978,7 @@ impl InferenceContext<'_> { .push(callee_ty.clone()) .push(TyBuilder::tuple_with(params.iter().cloned())) .build(); - self.write_method_resolution(tgt_expr, func, subst.clone()); + self.write_method_resolution(tgt_expr, func, subst); } } @@ -1393,7 +1407,7 @@ impl InferenceContext<'_> { &mut self, receiver_ty: &Ty, name: &Name, - ) -> Option<(Ty, Option, Vec, bool)> { + ) -> Option<(Ty, Either, Vec, bool)> { let mut autoderef = Autoderef::new(&mut self.table, receiver_ty.clone(), false); let mut private_field = None; let res = autoderef.by_ref().find_map(|(derefed_ty, _)| { @@ -1405,7 +1419,20 @@ impl InferenceContext<'_> { .get(idx) .map(|a| a.assert_ty_ref(Interner)) .cloned() - .map(|ty| (None, ty)) + .map(|ty| { + ( + Either::Right(TupleFieldId { + tuple: TupleId( + self.tuple_field_accesses_rev + .insert_full(substs.clone()) + .0 + as u32, + ), + index: idx as u32, + }), + ty, + ) + }) }); } TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => { @@ -1431,7 +1458,7 @@ impl InferenceContext<'_> { let ty = self.db.field_types(field_id.parent)[field_id.local_id] .clone() .substitute(Interner, ¶meters); - Some((Some(field_id), ty)) + Some((Either::Left(field_id), ty)) }); Some(match res { @@ -1451,7 +1478,7 @@ impl InferenceContext<'_> { let ty = self.insert_type_vars(ty); let ty = self.normalize_associated_types_in(ty); - (ty, Some(field_id), adjustments, false) + (ty, Either::Left(field_id), adjustments, false) } }) } @@ -1474,11 +1501,9 @@ impl InferenceContext<'_> { match self.lookup_field(&receiver_ty, name) { Some((ty, field_id, adjustments, is_public)) => { self.write_expr_adj(receiver, adjustments); - if let Some(field_id) = field_id { - self.result.field_resolutions.insert(tgt_expr, field_id); - } + self.result.field_resolutions.insert(tgt_expr, field_id); if !is_public { - if let Some(field) = field_id { + if let Either::Left(field) = field_id { // FIXME: Merge this diagnostic into UnresolvedField? self.result .diagnostics @@ -1568,9 +1593,7 @@ impl InferenceContext<'_> { { Some((ty, field_id, adjustments, _public)) => { self.write_expr_adj(receiver, adjustments); - if let Some(field_id) = field_id { - self.result.field_resolutions.insert(tgt_expr, field_id); - } + self.result.field_resolutions.insert(tgt_expr, field_id); Some(ty) } None => None, diff --git a/crates/hir-ty/src/infer/mutability.rs b/crates/hir-ty/src/infer/mutability.rs index b8a1af96fb..663ea85323 100644 --- a/crates/hir-ty/src/infer/mutability.rs +++ b/crates/hir-ty/src/infer/mutability.rs @@ -96,7 +96,7 @@ impl InferenceContext<'_> { Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => { self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread)) } - &Expr::Index { base, index } => { + &Expr::Index { base, index, is_assignee_expr: _ } => { if mutability == Mutability::Mut { if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) { if let Some(index_trait) = self diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs index acdb540289..1bf8babe83 100644 --- a/crates/hir-ty/src/infer/pat.rs +++ b/crates/hir-ty/src/infer/pat.rs @@ -233,7 +233,6 @@ impl InferenceContext<'_> { }; let mut expectations_iter = expectations .iter() - .cloned() .map(|a| a.assert_ty_ref(Interner).clone()) .chain(repeat_with(|| self.table.new_type_var())); @@ -336,7 +335,7 @@ impl InferenceContext<'_> { &Pat::Lit(expr) => { // Don't emit type mismatches again, the expression lowering already did that. let ty = self.infer_lit_pat(expr, &expected); - self.write_pat_ty(pat, ty.clone()); + self.write_pat_ty(pat, ty); return self.pat_ty_after_adjustment(pat); } Pat::Box { inner } => match self.resolve_boxed_box() { diff --git a/crates/hir-ty/src/interner.rs b/crates/hir-ty/src/interner.rs index e4dd4b86cf..eb6296f7a0 100644 --- a/crates/hir-ty/src/interner.rs +++ b/crates/hir-ty/src/interner.rs @@ -1,9 +1,15 @@ //! Implementation of the Chalk `Interner` trait, which allows customizing the //! representation of the various objects Chalk deals with (types, goals etc.). -use crate::{chalk_db, tls, ConstScalar, GenericArg}; +use crate::{ + chalk_db, tls, AliasTy, CanonicalVarKind, CanonicalVarKinds, ClosureId, Const, ConstData, + ConstScalar, Constraint, Constraints, FnDefId, GenericArg, GenericArgData, Goal, GoalData, + Goals, InEnvironment, Lifetime, LifetimeData, OpaqueTy, OpaqueTyId, ProgramClause, + ProgramClauseData, ProgramClauses, ProjectionTy, QuantifiedWhereClause, QuantifiedWhereClauses, + Substitution, Ty, TyData, TyKind, VariableKind, VariableKinds, +}; use base_db::salsa::InternId; -use chalk_ir::{Goal, GoalData}; +use chalk_ir::{ProgramClauseImplication, SeparatorTraitRef, Variance}; use hir_def::TypeAliasId; use intern::{impl_internable, Interned}; use smallvec::SmallVec; @@ -31,36 +37,37 @@ impl std::ops::Deref for InternedWrapper { } impl_internable!( - InternedWrapper>>, + InternedWrapper>, InternedWrapper>, - InternedWrapper>, - InternedWrapper>, - InternedWrapper>, + InternedWrapper, + InternedWrapper, + InternedWrapper, InternedWrapper, - InternedWrapper>>, - InternedWrapper>>, - InternedWrapper>>, - InternedWrapper>, + InternedWrapper>, + InternedWrapper>, + InternedWrapper>, + InternedWrapper>, ); impl chalk_ir::interner::Interner for Interner { - type InternedType = Interned>>; - type InternedLifetime = Interned>>; - type InternedConst = Interned>>; + type InternedType = Interned>; + type InternedLifetime = Interned>; + type InternedConst = Interned>; type InternedConcreteConst = ConstScalar; - type InternedGenericArg = chalk_ir::GenericArgData; - type InternedGoal = Arc>; - type InternedGoals = Vec>; + type InternedGenericArg = GenericArgData; + // We could do the following, but that saves "only" 20mb on self while increasing inferecene + // time by ~2.5% + // type InternedGoal = Interned>; + type InternedGoal = Arc; + type InternedGoals = Vec; type InternedSubstitution = Interned>>; - type InternedProgramClauses = Interned>>>; - type InternedProgramClause = chalk_ir::ProgramClauseData; - type InternedQuantifiedWhereClauses = - Interned>>>; - type InternedVariableKinds = Interned>>>; - type InternedCanonicalVarKinds = - Interned>>>; - type InternedConstraints = Vec>>; - type InternedVariances = Interned>>; + type InternedProgramClauses = Interned>>; + type InternedProgramClause = ProgramClauseData; + type InternedQuantifiedWhereClauses = Interned>>; + type InternedVariableKinds = Interned>>; + type InternedCanonicalVarKinds = Interned>>; + type InternedConstraints = Vec>; + type InternedVariances = SmallVec<[Variance; 16]>; type DefId = InternId; type InternedAdtId = hir_def::AdtId; type Identifier = TypeAliasId; @@ -88,68 +95,51 @@ impl chalk_ir::interner::Interner for Interner { } fn debug_opaque_ty_id( - opaque_ty_id: chalk_ir::OpaqueTyId, + opaque_ty_id: OpaqueTyId, fmt: &mut fmt::Formatter<'_>, ) -> Option { Some(write!(fmt, "OpaqueTy#{}", opaque_ty_id.0)) } - fn debug_fn_def_id( - fn_def_id: chalk_ir::FnDefId, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { + fn debug_fn_def_id(fn_def_id: FnDefId, fmt: &mut fmt::Formatter<'_>) -> Option { tls::with_current_program(|prog| Some(prog?.debug_fn_def_id(fn_def_id, fmt))) } fn debug_closure_id( - _fn_def_id: chalk_ir::ClosureId, + _fn_def_id: ClosureId, _fmt: &mut fmt::Formatter<'_>, ) -> Option { None } - fn debug_alias( - alias: &chalk_ir::AliasTy, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { + fn debug_alias(alias: &AliasTy, fmt: &mut fmt::Formatter<'_>) -> Option { use std::fmt::Debug; match alias { - chalk_ir::AliasTy::Projection(projection_ty) => { - Interner::debug_projection_ty(projection_ty, fmt) - } - chalk_ir::AliasTy::Opaque(opaque_ty) => Some(opaque_ty.fmt(fmt)), + AliasTy::Projection(projection_ty) => Interner::debug_projection_ty(projection_ty, fmt), + AliasTy::Opaque(opaque_ty) => Some(opaque_ty.fmt(fmt)), } } fn debug_projection_ty( - proj: &chalk_ir::ProjectionTy, + proj: &ProjectionTy, fmt: &mut fmt::Formatter<'_>, ) -> Option { tls::with_current_program(|prog| Some(prog?.debug_projection_ty(proj, fmt))) } - fn debug_opaque_ty( - opaque_ty: &chalk_ir::OpaqueTy, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { + fn debug_opaque_ty(opaque_ty: &OpaqueTy, fmt: &mut fmt::Formatter<'_>) -> Option { Some(write!(fmt, "{:?}", opaque_ty.opaque_ty_id)) } - fn debug_ty(ty: &chalk_ir::Ty, fmt: &mut fmt::Formatter<'_>) -> Option { + fn debug_ty(ty: &Ty, fmt: &mut fmt::Formatter<'_>) -> Option { Some(write!(fmt, "{:?}", ty.data(Interner))) } - fn debug_lifetime( - lifetime: &chalk_ir::Lifetime, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { + fn debug_lifetime(lifetime: &Lifetime, fmt: &mut fmt::Formatter<'_>) -> Option { Some(write!(fmt, "{:?}", lifetime.data(Interner))) } - fn debug_const( - constant: &chalk_ir::Const, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { + fn debug_const(constant: &Const, fmt: &mut fmt::Formatter<'_>) -> Option { Some(write!(fmt, "{:?}", constant.data(Interner))) } @@ -161,102 +151,99 @@ impl chalk_ir::interner::Interner for Interner { } fn debug_variable_kinds( - variable_kinds: &chalk_ir::VariableKinds, + variable_kinds: &VariableKinds, fmt: &mut fmt::Formatter<'_>, ) -> Option { Some(write!(fmt, "{:?}", variable_kinds.as_slice(Interner))) } fn debug_variable_kinds_with_angles( - variable_kinds: &chalk_ir::VariableKinds, + variable_kinds: &VariableKinds, fmt: &mut fmt::Formatter<'_>, ) -> Option { Some(write!(fmt, "{:?}", variable_kinds.inner_debug(Interner))) } fn debug_canonical_var_kinds( - canonical_var_kinds: &chalk_ir::CanonicalVarKinds, + canonical_var_kinds: &CanonicalVarKinds, fmt: &mut fmt::Formatter<'_>, ) -> Option { Some(write!(fmt, "{:?}", canonical_var_kinds.as_slice(Interner))) } - fn debug_goal(goal: &Goal, fmt: &mut fmt::Formatter<'_>) -> Option { + fn debug_goal(goal: &Goal, fmt: &mut fmt::Formatter<'_>) -> Option { let goal_data = goal.data(Interner); Some(write!(fmt, "{goal_data:?}")) } - fn debug_goals( - goals: &chalk_ir::Goals, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { + fn debug_goals(goals: &Goals, fmt: &mut fmt::Formatter<'_>) -> Option { Some(write!(fmt, "{:?}", goals.debug(Interner))) } fn debug_program_clause_implication( - pci: &chalk_ir::ProgramClauseImplication, + pci: &ProgramClauseImplication, fmt: &mut fmt::Formatter<'_>, ) -> Option { Some(write!(fmt, "{:?}", pci.debug(Interner))) } fn debug_program_clause( - clause: &chalk_ir::ProgramClause, + clause: &ProgramClause, fmt: &mut fmt::Formatter<'_>, ) -> Option { Some(write!(fmt, "{:?}", clause.data(Interner))) } fn debug_program_clauses( - clauses: &chalk_ir::ProgramClauses, + clauses: &ProgramClauses, fmt: &mut fmt::Formatter<'_>, ) -> Option { Some(write!(fmt, "{:?}", clauses.as_slice(Interner))) } fn debug_substitution( - substitution: &chalk_ir::Substitution, + substitution: &Substitution, fmt: &mut fmt::Formatter<'_>, ) -> Option { Some(write!(fmt, "{:?}", substitution.debug(Interner))) } fn debug_separator_trait_ref( - separator_trait_ref: &chalk_ir::SeparatorTraitRef<'_, Interner>, + separator_trait_ref: &SeparatorTraitRef<'_, Interner>, fmt: &mut fmt::Formatter<'_>, ) -> Option { Some(write!(fmt, "{:?}", separator_trait_ref.debug(Interner))) } fn debug_quantified_where_clauses( - clauses: &chalk_ir::QuantifiedWhereClauses, + clauses: &QuantifiedWhereClauses, fmt: &mut fmt::Formatter<'_>, ) -> Option { Some(write!(fmt, "{:?}", clauses.as_slice(Interner))) } fn debug_constraints( - _clauses: &chalk_ir::Constraints, + _clauses: &Constraints, _fmt: &mut fmt::Formatter<'_>, ) -> Option { None } - fn intern_ty(self, kind: chalk_ir::TyKind) -> Self::InternedType { + fn intern_ty(self, kind: TyKind) -> Self::InternedType { let flags = kind.compute_flags(self); - Interned::new(InternedWrapper(chalk_ir::TyData { kind, flags })) + Interned::new(InternedWrapper(TyData { kind, flags })) } - fn ty_data(self, ty: &Self::InternedType) -> &chalk_ir::TyData { + fn ty_data(self, ty: &Self::InternedType) -> &TyData { &ty.0 } - fn intern_lifetime(self, lifetime: chalk_ir::LifetimeData) -> Self::InternedLifetime { + fn intern_lifetime(self, lifetime: LifetimeData) -> Self::InternedLifetime { Interned::new(InternedWrapper(lifetime)) } - fn lifetime_data(self, lifetime: &Self::InternedLifetime) -> &chalk_ir::LifetimeData { + fn lifetime_data(self, lifetime: &Self::InternedLifetime) -> &LifetimeData { &lifetime.0 } - fn intern_const(self, constant: chalk_ir::ConstData) -> Self::InternedConst { + fn intern_const(self, constant: ConstData) -> Self::InternedConst { Interned::new(InternedWrapper(constant)) } - fn const_data(self, constant: &Self::InternedConst) -> &chalk_ir::ConstData { + fn const_data(self, constant: &Self::InternedConst) -> &ConstData { &constant.0 } @@ -269,36 +256,33 @@ impl chalk_ir::interner::Interner for Interner { !matches!(c1, ConstScalar::Bytes(..)) || !matches!(c2, ConstScalar::Bytes(..)) || (c1 == c2) } - fn intern_generic_arg( - self, - parameter: chalk_ir::GenericArgData, - ) -> Self::InternedGenericArg { + fn intern_generic_arg(self, parameter: GenericArgData) -> Self::InternedGenericArg { parameter } - fn generic_arg_data( - self, - parameter: &Self::InternedGenericArg, - ) -> &chalk_ir::GenericArgData { + fn generic_arg_data(self, parameter: &Self::InternedGenericArg) -> &GenericArgData { parameter } - fn intern_goal(self, goal: GoalData) -> Self::InternedGoal { + fn intern_goal(self, goal: GoalData) -> Self::InternedGoal { Arc::new(goal) } - fn goal_data(self, goal: &Self::InternedGoal) -> &GoalData { + fn goal_data(self, goal: &Self::InternedGoal) -> &GoalData { goal } fn intern_goals( self, - data: impl IntoIterator, E>>, + data: impl IntoIterator>, ) -> Result { + // let hash = + // std::hash::BuildHasher::hash_one(&BuildHasherDefault::::default(), &goal); + // Interned::new(InternedWrapper(PreHashedWrapper(goal, hash))) data.into_iter().collect() } - fn goals_data(self, goals: &Self::InternedGoals) -> &[Goal] { + fn goals_data(self, goals: &Self::InternedGoals) -> &[Goal] { goals } @@ -313,37 +297,28 @@ impl chalk_ir::interner::Interner for Interner { &substitution.as_ref().0 } - fn intern_program_clause( - self, - data: chalk_ir::ProgramClauseData, - ) -> Self::InternedProgramClause { + fn intern_program_clause(self, data: ProgramClauseData) -> Self::InternedProgramClause { data } - fn program_clause_data( - self, - clause: &Self::InternedProgramClause, - ) -> &chalk_ir::ProgramClauseData { + fn program_clause_data(self, clause: &Self::InternedProgramClause) -> &ProgramClauseData { clause } fn intern_program_clauses( self, - data: impl IntoIterator, E>>, + data: impl IntoIterator>, ) -> Result { Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) } - fn program_clauses_data( - self, - clauses: &Self::InternedProgramClauses, - ) -> &[chalk_ir::ProgramClause] { + fn program_clauses_data(self, clauses: &Self::InternedProgramClauses) -> &[ProgramClause] { clauses } fn intern_quantified_where_clauses( self, - data: impl IntoIterator, E>>, + data: impl IntoIterator>, ) -> Result { Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) } @@ -351,27 +326,24 @@ impl chalk_ir::interner::Interner for Interner { fn quantified_where_clauses_data( self, clauses: &Self::InternedQuantifiedWhereClauses, - ) -> &[chalk_ir::QuantifiedWhereClause] { + ) -> &[QuantifiedWhereClause] { clauses } fn intern_generic_arg_kinds( self, - data: impl IntoIterator, E>>, + data: impl IntoIterator>, ) -> Result { Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) } - fn variable_kinds_data( - self, - parameter_kinds: &Self::InternedVariableKinds, - ) -> &[chalk_ir::VariableKind] { + fn variable_kinds_data(self, parameter_kinds: &Self::InternedVariableKinds) -> &[VariableKind] { ¶meter_kinds.as_ref().0 } fn intern_canonical_var_kinds( self, - data: impl IntoIterator, E>>, + data: impl IntoIterator>, ) -> Result { Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) } @@ -379,30 +351,30 @@ impl chalk_ir::interner::Interner for Interner { fn canonical_var_kinds_data( self, canonical_var_kinds: &Self::InternedCanonicalVarKinds, - ) -> &[chalk_ir::CanonicalVarKind] { + ) -> &[CanonicalVarKind] { canonical_var_kinds } fn intern_constraints( self, - data: impl IntoIterator>, E>>, + data: impl IntoIterator, E>>, ) -> Result { data.into_iter().collect() } fn constraints_data( self, constraints: &Self::InternedConstraints, - ) -> &[chalk_ir::InEnvironment>] { + ) -> &[InEnvironment] { constraints } fn intern_variances( self, - data: impl IntoIterator>, + data: impl IntoIterator>, ) -> Result { - Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) + data.into_iter().collect::>() } - fn variances_data(self, variances: &Self::InternedVariances) -> &[chalk_ir::Variance] { + fn variances_data(self, variances: &Self::InternedVariances) -> &[Variance] { variances } } diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs index bfc4f1383e..68619bb8b1 100644 --- a/crates/hir-ty/src/layout.rs +++ b/crates/hir-ty/src/layout.rs @@ -164,7 +164,7 @@ fn layout_of_simd_ty( }; // Compute the ABI of the element type: - let e_ly = db.layout_of_ty(e_ty, env.clone())?; + let e_ly = db.layout_of_ty(e_ty, env)?; let Abi::Scalar(e_abi) = e_ly.abi else { return Err(LayoutError::Unknown); }; @@ -204,17 +204,17 @@ pub fn layout_of_ty_query( }; let cx = LayoutCx { target: &target }; let dl = &*cx.current_data_layout(); - let ty = normalize(db, trait_env.clone(), ty.clone()); + let ty = normalize(db, trait_env.clone(), ty); let result = match ty.kind(Interner) { TyKind::Adt(AdtId(def), subst) => { if let hir_def::AdtId::StructId(s) = def { let data = db.struct_data(*s); let repr = data.repr.unwrap_or_default(); if repr.simd() { - return layout_of_simd_ty(db, *s, subst, trait_env.clone(), &target); + return layout_of_simd_ty(db, *s, subst, trait_env, &target); } }; - return db.layout_of_adt(*def, subst.clone(), trait_env.clone()); + return db.layout_of_adt(*def, subst.clone(), trait_env); } TyKind::Scalar(s) => match s { chalk_ir::Scalar::Bool => Layout::scalar( @@ -280,7 +280,7 @@ pub fn layout_of_ty_query( } TyKind::Array(element, count) => { let count = try_const_usize(db, &count).ok_or(LayoutError::HasErrorConst)? as u64; - let element = db.layout_of_ty(element.clone(), trait_env.clone())?; + let element = db.layout_of_ty(element.clone(), trait_env)?; let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?; let abi = if count != 0 && matches!(element.abi, Abi::Uninhabited) { @@ -303,7 +303,7 @@ pub fn layout_of_ty_query( } } TyKind::Slice(element) => { - let element = db.layout_of_ty(element.clone(), trait_env.clone())?; + let element = db.layout_of_ty(element.clone(), trait_env)?; Layout { variants: Variants::Single { index: struct_variant_idx() }, fields: FieldsShape::Array { stride: element.size, count: 0 }, @@ -345,7 +345,7 @@ pub fn layout_of_ty_query( })) .intern(Interner); } - unsized_part = normalize(db, trait_env.clone(), unsized_part); + unsized_part = normalize(db, trait_env, unsized_part); let metadata = match unsized_part.kind(Interner) { TyKind::Slice(_) | TyKind::Str => { scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false)) @@ -384,7 +384,7 @@ pub fn layout_of_ty_query( match impl_trait_id { crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => { let infer = db.infer(func.into()); - return db.layout_of_ty(infer.type_of_rpit[idx].clone(), trait_env.clone()); + return db.layout_of_ty(infer.type_of_rpit[idx].clone(), trait_env); } crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => { return Err(LayoutError::NotImplemented) diff --git a/crates/hir-ty/src/layout/tests.rs b/crates/hir-ty/src/layout/tests.rs index 9937113685..ef0be7ab2d 100644 --- a/crates/hir-ty/src/layout/tests.rs +++ b/crates/hir-ty/src/layout/tests.rs @@ -220,6 +220,36 @@ fn recursive() { ); } +#[test] +fn repr_packed() { + size_and_align! { + #[repr(packed)] + struct Goal; + } + size_and_align! { + #[repr(packed(2))] + struct Goal; + } + size_and_align! { + #[repr(packed(4))] + struct Goal; + } + size_and_align! { + #[repr(packed)] + struct Goal(i32); + } + size_and_align! { + #[repr(packed(2))] + struct Goal(i32); + } + size_and_align! { + #[repr(packed(4))] + struct Goal(i32); + } + + check_size_and_align("#[repr(packed(5))] struct Goal(i32);", "", 4, 1); +} + #[test] fn generic() { size_and_align! { diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index cf174feed2..793b52b49f 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -37,22 +37,22 @@ mod tests; mod test_db; use std::{ - collections::{hash_map::Entry, HashMap}, - hash::Hash, + collections::hash_map::Entry, + hash::{BuildHasherDefault, Hash}, }; use chalk_ir::{ fold::{Shift, TypeFoldable}, interner::HasInterner, visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor}, - NoSolution, TyData, + NoSolution, }; use either::Either; use hir_def::{hir::ExprId, type_ref::Rawness, GeneralConstId, TypeOrConstParamId}; use hir_expand::name; use la_arena::{Arena, Idx}; use mir::{MirEvalError, VTableMap}; -use rustc_hash::FxHashSet; +use rustc_hash::{FxHashMap, FxHashSet}; use syntax::ast::{make, ConstArg}; use traits::FnTrait; use triomphe::Arc; @@ -152,32 +152,64 @@ pub type DomainGoal = chalk_ir::DomainGoal; pub type Goal = chalk_ir::Goal; pub type AliasEq = chalk_ir::AliasEq; pub type Solution = chalk_solve::Solution; +pub type Constraint = chalk_ir::Constraint; +pub type Constraints = chalk_ir::Constraints; pub type ConstrainedSubst = chalk_ir::ConstrainedSubst; pub type Guidance = chalk_solve::Guidance; pub type WhereClause = chalk_ir::WhereClause; +pub type CanonicalVarKind = chalk_ir::CanonicalVarKind; +pub type GoalData = chalk_ir::GoalData; +pub type Goals = chalk_ir::Goals; +pub type ProgramClauseData = chalk_ir::ProgramClauseData; +pub type ProgramClause = chalk_ir::ProgramClause; +pub type ProgramClauses = chalk_ir::ProgramClauses; +pub type TyData = chalk_ir::TyData; +pub type Variances = chalk_ir::Variances; + /// A constant can have reference to other things. Memory map job is holding /// the necessary bits of memory of the const eval session to keep the constant /// meaningful. #[derive(Debug, Default, Clone, PartialEq, Eq)] -pub struct MemoryMap { - pub memory: HashMap>, - pub vtable: VTableMap, +pub enum MemoryMap { + #[default] + Empty, + Simple(Box<[u8]>), + Complex(Box), } -impl MemoryMap { - fn insert(&mut self, addr: usize, x: Vec) { +#[derive(Debug, Default, Clone, PartialEq, Eq)] +pub struct ComplexMemoryMap { + memory: FxHashMap>, + vtable: VTableMap, +} + +impl ComplexMemoryMap { + fn insert(&mut self, addr: usize, val: Box<[u8]>) { match self.memory.entry(addr) { Entry::Occupied(mut e) => { - if e.get().len() < x.len() { - e.insert(x); + if e.get().len() < val.len() { + e.insert(val); } } Entry::Vacant(e) => { - e.insert(x); + e.insert(val); } } } +} + +impl MemoryMap { + pub fn vtable_ty(&self, id: usize) -> Result<&Ty, MirEvalError> { + match self { + MemoryMap::Empty | MemoryMap::Simple(_) => Err(MirEvalError::InvalidVTableId(id)), + MemoryMap::Complex(cm) => cm.vtable.ty(id), + } + } + + fn simple(v: Box<[u8]>) -> Self { + MemoryMap::Simple(v) + } /// This functions convert each address by a function `f` which gets the byte intervals and assign an address /// to them. It is useful when you want to load a constant with a memory map in a new memory. You can pass an @@ -185,22 +217,33 @@ impl MemoryMap { fn transform_addresses( &self, mut f: impl FnMut(&[u8], usize) -> Result, - ) -> Result, MirEvalError> { - self.memory - .iter() - .map(|x| { - let addr = *x.0; - let align = if addr == 0 { 64 } else { (addr - (addr & (addr - 1))).min(64) }; - Ok((addr, f(x.1, align)?)) - }) - .collect() + ) -> Result, MirEvalError> { + let mut transform = |(addr, val): (&usize, &Box<[u8]>)| { + let addr = *addr; + let align = if addr == 0 { 64 } else { (addr - (addr & (addr - 1))).min(64) }; + f(val, align).and_then(|it| Ok((addr, it))) + }; + match self { + MemoryMap::Empty => Ok(Default::default()), + MemoryMap::Simple(m) => transform((&0, m)).map(|(addr, val)| { + let mut map = FxHashMap::with_capacity_and_hasher(1, BuildHasherDefault::default()); + map.insert(addr, val); + map + }), + MemoryMap::Complex(cm) => cm.memory.iter().map(transform).collect(), + } } - fn get<'a>(&'a self, addr: usize, size: usize) -> Option<&'a [u8]> { + fn get(&self, addr: usize, size: usize) -> Option<&[u8]> { if size == 0 { Some(&[]) } else { - self.memory.get(&addr)?.get(0..size) + match self { + MemoryMap::Empty => Some(&[]), + MemoryMap::Simple(m) if addr == 0 => m.get(0..size), + MemoryMap::Simple(_) => None, + MemoryMap::Complex(cm) => cm.memory.get(&addr)?.get(0..size), + } } } } @@ -208,7 +251,7 @@ impl MemoryMap { /// A concrete constant value #[derive(Debug, Clone, PartialEq, Eq)] pub enum ConstScalar { - Bytes(Vec, MemoryMap), + Bytes(Box<[u8]>, MemoryMap), // FIXME: this is a hack to get around chalk not being able to represent unevaluatable // constants UnevaluatedConst(GeneralConstId, Substitution), diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index 041d61c1b1..33619edfee 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -1350,7 +1350,7 @@ pub(crate) fn resolve_indexing_op( ty: Canonical, index_trait: TraitId, ) -> Option { - let mut table = InferenceTable::new(db, env.clone()); + let mut table = InferenceTable::new(db, env); let ty = table.instantiate_canonical(ty); let deref_chain = autoderef_method_receiver(&mut table, ty); for (ty, adj) in deref_chain { diff --git a/crates/hir-ty/src/mir.rs b/crates/hir-ty/src/mir.rs index f1795e71d9..7bef6f0d0f 100644 --- a/crates/hir-ty/src/mir.rs +++ b/crates/hir-ty/src/mir.rs @@ -14,9 +14,10 @@ use crate::{ }; use base_db::CrateId; use chalk_ir::Mutability; +use either::Either; use hir_def::{ hir::{BindingId, Expr, ExprId, Ordering, PatId}, - DefWithBodyId, FieldId, StaticId, UnionId, VariantId, + DefWithBodyId, FieldId, StaticId, TupleFieldId, UnionId, VariantId, }; use la_arena::{Arena, ArenaMap, Idx, RawIdx}; @@ -97,16 +98,16 @@ pub enum Operand { } impl Operand { - fn from_concrete_const(data: Vec, memory_map: MemoryMap, ty: Ty) -> Self { + fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap, ty: Ty) -> Self { Operand::Constant(intern_const_scalar(ConstScalar::Bytes(data, memory_map), ty)) } - fn from_bytes(data: Vec, ty: Ty) -> Self { + fn from_bytes(data: Box<[u8]>, ty: Ty) -> Self { Operand::from_concrete_const(data, MemoryMap::default(), ty) } fn const_zst(ty: Ty) -> Operand { - Self::from_bytes(vec![], ty) + Self::from_bytes(Box::default(), ty) } fn from_fn( @@ -117,16 +118,16 @@ impl Operand { let ty = chalk_ir::TyKind::FnDef(CallableDefId::FunctionId(func_id).to_chalk(db), generic_args) .intern(Interner); - Operand::from_bytes(vec![], ty) + Operand::from_bytes(Box::default(), ty) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum ProjectionElem { Deref, - Field(FieldId), + Field(Either), // FIXME: get rid of this, and use FieldId for tuples and closures - TupleOrClosureField(usize), + ClosureField(usize), Index(V), ConstantIndex { offset: u64, from_end: bool }, Subslice { from: u64, to: u64 }, @@ -161,7 +162,7 @@ impl ProjectionElem { return TyKind::Error.intern(Interner); } }, - ProjectionElem::Field(f) => match &base.kind(Interner) { + ProjectionElem::Field(Either::Left(f)) => match &base.kind(Interner) { TyKind::Adt(_, subst) => { db.field_types(f.parent)[f.local_id].clone().substitute(Interner, subst) } @@ -170,19 +171,25 @@ impl ProjectionElem { return TyKind::Error.intern(Interner); } }, - ProjectionElem::TupleOrClosureField(f) => match &base.kind(Interner) { + ProjectionElem::Field(Either::Right(f)) => match &base.kind(Interner) { TyKind::Tuple(_, subst) => subst .as_slice(Interner) - .get(*f) + .get(f.index as usize) .map(|x| x.assert_ty_ref(Interner)) .cloned() .unwrap_or_else(|| { never!("Out of bound tuple field"); TyKind::Error.intern(Interner) }), + _ => { + never!("Only tuple has tuple field"); + return TyKind::Error.intern(Interner); + } + }, + ProjectionElem::ClosureField(f) => match &base.kind(Interner) { TyKind::Closure(id, subst) => closure_field(*id, subst, *f), _ => { - never!("Only tuple or closure has tuple or closure field"); + never!("Only closure has closure field"); return TyKind::Error.intern(Interner); } }, diff --git a/crates/hir-ty/src/mir/borrowck.rs b/crates/hir-ty/src/mir/borrowck.rs index 74c5efd6c3..e79c87a02f 100644 --- a/crates/hir-ty/src/mir/borrowck.rs +++ b/crates/hir-ty/src/mir/borrowck.rs @@ -205,7 +205,7 @@ fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> Projectio | ProjectionElem::ConstantIndex { .. } | ProjectionElem::Subslice { .. } | ProjectionElem::Field(_) - | ProjectionElem::TupleOrClosureField(_) + | ProjectionElem::ClosureField(_) | ProjectionElem::Index(_) => { is_part_of = true; } diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index fbfb6ff8cd..16075d9073 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -1,13 +1,6 @@ //! This module provides a MIR interpreter, which is used in const eval. -use std::{ - borrow::Cow, - cell::RefCell, - collections::{HashMap, HashSet}, - fmt::Write, - iter, mem, - ops::Range, -}; +use std::{borrow::Cow, cell::RefCell, fmt::Write, iter, mem, ops::Range}; use base_db::{CrateId, FileId}; use chalk_ir::{cast::Cast, Mutability}; @@ -40,8 +33,8 @@ use crate::{ name, static_lifetime, traits::FnTrait, utils::{detect_variant_from_bytes, ClosureSubst}, - CallableDefId, ClosureId, Const, ConstScalar, FnDefId, Interner, MemoryMap, Substitution, - TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, + CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstScalar, FnDefId, Interner, MemoryMap, + Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, }; use super::{ @@ -98,6 +91,15 @@ impl VTableMap { let id = from_bytes!(usize, bytes); self.ty(id) } + + pub fn shrink_to_fit(&mut self) { + self.id_to_ty.shrink_to_fit(); + self.ty_to_id.shrink_to_fit(); + } + + fn is_empty(&self) -> bool { + self.id_to_ty.is_empty() && self.ty_to_id.is_empty() + } } #[derive(Debug, Default, Clone, PartialEq, Eq)] @@ -251,13 +253,6 @@ impl From for IntervalOrOwned { } impl IntervalOrOwned { - pub(crate) fn to_vec(self, memory: &Evaluator<'_>) -> Result> { - Ok(match self { - IntervalOrOwned::Owned(o) => o, - IntervalOrOwned::Borrowed(b) => b.get(memory)?.to_vec(), - }) - } - fn get<'a>(&'a self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> { Ok(match self { IntervalOrOwned::Owned(o) => o, @@ -291,8 +286,8 @@ impl Address { } } - fn to_bytes(&self) -> Vec { - usize::to_le_bytes(self.to_usize()).to_vec() + fn to_bytes(&self) -> [u8; mem::size_of::()] { + usize::to_le_bytes(self.to_usize()) } fn to_usize(&self) -> usize { @@ -391,7 +386,7 @@ impl MirEvalError { write!( f, "Layout for type `{}` is not available due {err:?}", - ty.display(db).with_closure_style(ClosureStyle::ClosureWithId).to_string() + ty.display(db).with_closure_style(ClosureStyle::ClosureWithId) )?; } MirEvalError::MirLowerError(func, err) => { @@ -510,6 +505,20 @@ struct Locals { drop_flags: DropFlags, } +pub struct MirOutput { + stdout: Vec, + stderr: Vec, +} + +impl MirOutput { + pub fn stdout(&self) -> Cow<'_, str> { + String::from_utf8_lossy(&self.stdout) + } + pub fn stderr(&self) -> Cow<'_, str> { + String::from_utf8_lossy(&self.stderr) + } +} + pub fn interpret_mir( db: &dyn HirDatabase, body: Arc, @@ -520,27 +529,31 @@ pub fn interpret_mir( // (and probably should) do better here, for example by excluding bindings outside of the target expression. assert_placeholder_ty_is_unused: bool, trait_env: Option>, -) -> (Result, String, String) { +) -> (Result, MirOutput) { let ty = body.locals[return_slot()].ty.clone(); let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env); let it: Result = (|| { if evaluator.ptr_size() != std::mem::size_of::() { not_supported!("targets with different pointer size from host"); } - let bytes = evaluator.interpret_mir(body.clone(), None.into_iter())?; + let interval = evaluator.interpret_mir(body.clone(), None.into_iter())?; + let bytes = interval.get(&evaluator)?; let mut memory_map = evaluator.create_memory_map( - &bytes, + bytes, &ty, &Locals { ptr: ArenaMap::new(), body, drop_flags: DropFlags::default() }, )?; - memory_map.vtable = evaluator.vtable_map.clone(); + let bytes = bytes.into(); + let memory_map = if memory_map.memory.is_empty() && evaluator.vtable_map.is_empty() { + MemoryMap::Empty + } else { + memory_map.vtable = mem::take(&mut evaluator.vtable_map); + memory_map.vtable.shrink_to_fit(); + MemoryMap::Complex(Box::new(memory_map)) + }; return Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty)); })(); - ( - it, - String::from_utf8_lossy(&evaluator.stdout).into_owned(), - String::from_utf8_lossy(&evaluator.stderr).into_owned(), - ) + (it, MirOutput { stdout: evaluator.stdout, stderr: evaluator.stderr }) } #[cfg(test)] @@ -562,7 +575,7 @@ impl Evaluator<'_> { code_stack: vec![], vtable_map: VTableMap::default(), thread_local_storage: TlsData::default(), - static_locations: HashMap::default(), + static_locations: Default::default(), db, random_state: oorandom::Rand64::new(0), trait_env: trait_env.unwrap_or_else(|| db.trait_environment_for_body(owner)), @@ -573,11 +586,11 @@ impl Evaluator<'_> { stack_depth_limit: 100, execution_limit: EXECUTION_LIMIT, memory_limit: 1000_000_000, // 2GB, 1GB for stack and 1GB for heap - layout_cache: RefCell::new(HashMap::default()), - projected_ty_cache: RefCell::new(HashMap::default()), - not_special_fn_cache: RefCell::new(HashSet::default()), - mir_or_dyn_index_cache: RefCell::new(HashMap::default()), - unused_locals_store: RefCell::new(HashMap::default()), + layout_cache: RefCell::new(Default::default()), + projected_ty_cache: RefCell::new(Default::default()), + not_special_fn_cache: RefCell::new(Default::default()), + mir_or_dyn_index_cache: RefCell::new(Default::default()), + unused_locals_store: RefCell::new(Default::default()), cached_ptr_size: match db.target_data_layout(crate_id) { Some(it) => it.pointer_size.bytes_usize(), None => 8, @@ -720,13 +733,19 @@ impl Evaluator<'_> { self.size_of_sized(&inner_ty, locals, "array inner type should be sized")?; addr = addr.offset(ty_size * (from as usize)); } - &ProjectionElem::TupleOrClosureField(f) => { + &ProjectionElem::ClosureField(f) => { let layout = self.layout(&prev_ty)?; let offset = layout.fields.offset(f).bytes_usize(); addr = addr.offset(offset); - metadata = None; // tuple field is always sized + metadata = None; } - ProjectionElem::Field(f) => { + ProjectionElem::Field(Either::Right(f)) => { + let layout = self.layout(&prev_ty)?; + let offset = layout.fields.offset(f.index as usize).bytes_usize(); + addr = addr.offset(offset); + metadata = None; // tuple field is always sized FIXME: This is wrong, the tail can be unsized + } + ProjectionElem::Field(Either::Left(f)) => { let layout = self.layout(&prev_ty)?; let variant_layout = match &layout.variants { Variants::Single { .. } => &layout, @@ -797,11 +816,11 @@ impl Evaluator<'_> { }) } - fn interpret_mir( - &mut self, + fn interpret_mir<'slf>( + &'slf mut self, body: Arc, args: impl Iterator, - ) -> Result> { + ) -> Result { if let Some(it) = self.stack_depth_limit.checked_sub(1) { self.stack_depth_limit = it; } else { @@ -831,8 +850,8 @@ impl Evaluator<'_> { match &statement.kind { StatementKind::Assign(l, r) => { let addr = self.place_addr(l, &locals)?; - let result = self.eval_rvalue(r, &mut locals)?.to_vec(&self)?; - self.write_memory(addr, &result)?; + let result = self.eval_rvalue(r, &mut locals)?; + self.copy_from_interval_or_owned(addr, result)?; locals .drop_flags .add_place(l.clone(), &locals.body.projection_store); @@ -951,7 +970,7 @@ impl Evaluator<'_> { None => { self.code_stack = prev_code_stack; self.stack_depth_limit += 1; - return Ok(return_interval.get(self)?.to_vec()); + return Ok(return_interval); } Some(bb) => { // We don't support const promotion, so we can't truncate the stack yet. @@ -1044,7 +1063,7 @@ impl Evaluator<'_> { Rvalue::Use(it) => Borrowed(self.eval_operand(it, locals)?), Rvalue::Ref(_, p) => { let (addr, _, metadata) = self.place_addr_and_ty_and_metadata(p, locals)?; - let mut r = addr.to_bytes(); + let mut r = addr.to_bytes().to_vec(); if let Some(metadata) = metadata { r.extend(metadata.get(self)?); } @@ -1277,7 +1296,7 @@ impl Evaluator<'_> { not_supported!("unsized box initialization"); }; let addr = self.heap_allocate(size, align)?; - Owned(addr.to_bytes()) + Owned(addr.to_bytes().to_vec()) } Rvalue::CopyForDeref(_) => not_supported!("copy for deref"), Rvalue::Aggregate(kind, values) => { @@ -1514,7 +1533,7 @@ impl Evaluator<'_> { } }, TyKind::Dyn(_) => { - let vtable = self.vtable_map.id(current_ty.clone()); + let vtable = self.vtable_map.id(current_ty); let mut r = Vec::with_capacity(16); let addr = addr.get(self)?; r.extend(addr.iter().copied()); @@ -1709,7 +1728,18 @@ impl Evaluator<'_> { } let addr = self.heap_allocate(size, align)?; self.write_memory(addr, &v)?; - self.patch_addresses(&patch_map, &memory_map.vtable, addr, ty, locals)?; + self.patch_addresses( + &patch_map, + |bytes| match &memory_map { + MemoryMap::Empty | MemoryMap::Simple(_) => { + Err(MirEvalError::InvalidVTableId(from_bytes!(usize, bytes))) + } + MemoryMap::Complex(cm) => cm.vtable.ty_of_bytes(bytes), + }, + addr, + ty, + locals, + )?; Ok(Interval::new(addr, size)) } @@ -1761,6 +1791,13 @@ impl Evaluator<'_> { Ok(()) } + fn copy_from_interval_or_owned(&mut self, addr: Address, r: IntervalOrOwned) -> Result<()> { + match r { + IntervalOrOwned::Borrowed(r) => self.copy_from_interval(addr, r), + IntervalOrOwned::Owned(r) => self.write_memory(addr, &r), + } + } + fn copy_from_interval(&mut self, addr: Address, r: Interval) -> Result<()> { if r.size == 0 { return Ok(()); @@ -1881,13 +1918,18 @@ impl Evaluator<'_> { } } - fn create_memory_map(&self, bytes: &[u8], ty: &Ty, locals: &Locals) -> Result { + fn create_memory_map( + &self, + bytes: &[u8], + ty: &Ty, + locals: &Locals, + ) -> Result { fn rec( this: &Evaluator<'_>, bytes: &[u8], ty: &Ty, locals: &Locals, - mm: &mut MemoryMap, + mm: &mut ComplexMemoryMap, ) -> Result<()> { match ty.kind(Interner) { TyKind::Ref(_, _, t) => { @@ -1897,7 +1939,7 @@ impl Evaluator<'_> { let addr_usize = from_bytes!(usize, bytes); mm.insert( addr_usize, - this.read_memory(Address::from_usize(addr_usize), size)?.to_vec(), + this.read_memory(Address::from_usize(addr_usize), size)?.into(), ) } None => { @@ -1923,7 +1965,7 @@ impl Evaluator<'_> { let size = element_size * count; let addr = Address::from_bytes(addr)?; let b = this.read_memory(addr, size)?; - mm.insert(addr.to_usize(), b.to_vec()); + mm.insert(addr.to_usize(), b.into()); if let Some(ty) = check_inner { for i in 0..count { let offset = element_size * i; @@ -1996,15 +2038,15 @@ impl Evaluator<'_> { } Ok(()) } - let mut mm = MemoryMap::default(); - rec(self, bytes, ty, locals, &mut mm)?; + let mut mm = ComplexMemoryMap::default(); + rec(&self, bytes, ty, locals, &mut mm)?; Ok(mm) } - fn patch_addresses( + fn patch_addresses<'vtable>( &mut self, - patch_map: &HashMap, - old_vtable: &VTableMap, + patch_map: &FxHashMap, + ty_of_bytes: impl Fn(&[u8]) -> Result<&'vtable Ty> + Copy, addr: Address, ty: &Ty, locals: &Locals, @@ -2031,7 +2073,7 @@ impl Evaluator<'_> { } } TyKind::Function(_) => { - let ty = old_vtable.ty_of_bytes(self.read_memory(addr, my_size)?)?.clone(); + let ty = ty_of_bytes(self.read_memory(addr, my_size)?)?.clone(); let new_id = self.vtable_map.id(ty); self.write_memory(addr, &new_id.to_le_bytes())?; } @@ -2042,7 +2084,7 @@ impl Evaluator<'_> { let ty = ty.clone().substitute(Interner, subst); self.patch_addresses( patch_map, - old_vtable, + ty_of_bytes, addr.offset(offset), &ty, locals, @@ -2064,7 +2106,7 @@ impl Evaluator<'_> { let ty = ty.clone().substitute(Interner, subst); self.patch_addresses( patch_map, - old_vtable, + ty_of_bytes, addr.offset(offset), &ty, locals, @@ -2077,7 +2119,7 @@ impl Evaluator<'_> { for (id, ty) in subst.iter(Interner).enumerate() { let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument let offset = layout.fields.offset(id).bytes_usize(); - self.patch_addresses(patch_map, old_vtable, addr.offset(offset), ty, locals)?; + self.patch_addresses(patch_map, ty_of_bytes, addr.offset(offset), ty, locals)?; } } TyKind::Array(inner, len) => { @@ -2089,7 +2131,7 @@ impl Evaluator<'_> { for i in 0..len { self.patch_addresses( patch_map, - old_vtable, + ty_of_bytes, addr.offset(i * size), inner, locals, @@ -2160,14 +2202,14 @@ impl Evaluator<'_> { .map_err(|it| MirEvalError::MirLowerErrorForClosure(closure, it))?; let closure_data = if mir_body.locals[mir_body.param_locals[0]].ty.as_reference().is_some() { - closure_data.addr.to_bytes() + closure_data.addr.to_bytes().to_vec() } else { closure_data.get(self)?.to_owned() }; let arg_bytes = iter::once(Ok(closure_data)) .chain(args.iter().map(|it| Ok(it.get(&self)?.to_owned()))) .collect::>>()?; - let bytes = self + let interval = self .interpret_mir(mir_body, arg_bytes.into_iter().map(IntervalOrOwned::Owned)) .map_err(|e| { MirEvalError::InFunction( @@ -2175,7 +2217,7 @@ impl Evaluator<'_> { vec![(Either::Right(closure), span, locals.body.owner)], ) })?; - destination.write_from_bytes(self, &bytes)?; + destination.write_from_interval(self, interval)?; Ok(None) } @@ -2368,7 +2410,7 @@ impl Evaluator<'_> { vec![(Either::Left(def), span, locals.body.owner)], ) })?; - destination.write_from_bytes(self, &result)?; + destination.write_from_interval(self, result)?; None }) } @@ -2546,7 +2588,7 @@ impl Evaluator<'_> { body, locals, drop_fn, - [IntervalOrOwned::Owned(addr.to_bytes())].into_iter(), + iter::once(IntervalOrOwned::Owned(addr.to_bytes().to_vec())), span, Interval { addr: Address::Invalid(0), size: 0 }, None, @@ -2674,11 +2716,12 @@ pub fn render_const_using_debug_impl( ) else { not_supported!("std::fmt::format not found"); }; - let message_string = evaluator.interpret_mir( + let interval = evaluator.interpret_mir( db.mir_body(format_fn.into()).map_err(|e| MirEvalError::MirLowerError(format_fn, e))?, [IntervalOrOwned::Borrowed(Interval { addr: a3, size: evaluator.ptr_size() * 6 })] .into_iter(), )?; + let message_string = interval.get(&evaluator)?; let addr = Address::from_bytes(&message_string[evaluator.ptr_size()..2 * evaluator.ptr_size()])?; let size = from_bytes!(usize, message_string[2 * evaluator.ptr_size()..]); diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs index 2de99e4165..ff26a3d0be 100644 --- a/crates/hir-ty/src/mir/eval/shim.rs +++ b/crates/hir-ty/src/mir/eval/shim.rs @@ -322,12 +322,13 @@ impl Evaluator<'_> { let hir_def::resolver::ValueNs::FunctionId(format_fn) = format_fn else { not_supported!("std::fmt::format is not a function") }; - let message_string = self.interpret_mir( + let interval = self.interpret_mir( self.db .mir_body(format_fn.into()) .map_err(|e| MirEvalError::MirLowerError(format_fn, e))?, args.map(|x| IntervalOrOwned::Owned(x.clone())), )?; + let message_string = interval.get(self)?; let addr = Address::from_bytes(&message_string[self.ptr_size()..2 * self.ptr_size()])?; let size = from_bytes!(usize, message_string[2 * self.ptr_size()..]); diff --git a/crates/hir-ty/src/mir/eval/tests.rs b/crates/hir-ty/src/mir/eval/tests.rs index b0f929279a..6552bf4933 100644 --- a/crates/hir-ty/src/mir/eval/tests.rs +++ b/crates/hir-ty/src/mir/eval/tests.rs @@ -31,9 +31,9 @@ fn eval_main(db: &TestDB, file_id: FileId) -> Result<(String, String), MirEvalEr db.trait_environment(func_id.into()), ) .map_err(|e| MirEvalError::MirLowerError(func_id.into(), e))?; - let (result, stdout, stderr) = interpret_mir(db, body, false, None); + let (result, output) = interpret_mir(db, body, false, None); result?; - Ok((stdout, stderr)) + Ok((output.stdout().into_owned(), output.stderr().into_owned())) } fn check_pass(ra_fixture: &str) { diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 639fabc198..c02c5ef876 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -15,7 +15,7 @@ use hir_def::{ path::Path, resolver::{resolver_for_expr, HasResolver, ResolveValueResult, ValueNs}, AdtId, DefWithBodyId, EnumVariantId, GeneralConstId, HasModule, ItemContainerId, LocalFieldId, - Lookup, TraitId, TypeOrConstParamId, + Lookup, TraitId, TupleId, TypeOrConstParamId, }; use hir_expand::name::Name; use la_arena::ArenaMap; @@ -177,7 +177,7 @@ impl MirLowerError { )?; writeln!(f, "Provided args: [")?; for g in subst.iter(Interner) { - write!(f, " {},", g.display(db).to_string())?; + write!(f, " {},", g.display(db))?; } writeln!(f, "]")?; } @@ -540,7 +540,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.write_bytes_to_place( then_target, place.clone(), - vec![1], + Box::new([1]), TyBuilder::bool(), MirSpan::Unknown, )?; @@ -548,7 +548,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.write_bytes_to_place( else_target, place, - vec![0], + Box::new([0]), TyBuilder::bool(), MirSpan::Unknown, )?; @@ -602,7 +602,7 @@ impl<'ctx> MirLowerCtx<'ctx> { generic_args, ) .intern(Interner); - let func = Operand::from_bytes(vec![], ty); + let func = Operand::from_bytes(Box::default(), ty); return self.lower_call_and_args( func, iter::once(*callee).chain(args.iter().copied()), @@ -615,7 +615,7 @@ impl<'ctx> MirLowerCtx<'ctx> { let callee_ty = self.expr_ty_after_adjustments(*callee); match &callee_ty.kind(Interner) { chalk_ir::TyKind::FnDef(..) => { - let func = Operand::from_bytes(vec![], callee_ty.clone()); + let func = Operand::from_bytes(Box::default(), callee_ty.clone()); self.lower_call_and_args( func, args.iter().copied(), @@ -828,12 +828,12 @@ impl<'ctx> MirLowerCtx<'ctx> { Some(it) => it, None => { let p = sp.project( - ProjectionElem::Field(FieldId { + ProjectionElem::Field(Either::Left(FieldId { parent: variant_id, local_id: LocalFieldId::from_raw(RawIdx::from( i as u32, )), - }), + })), &mut self.result.projection_store, ); Operand::Copy(p) @@ -855,7 +855,10 @@ impl<'ctx> MirLowerCtx<'ctx> { let local_id = variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?; let place = place.project( - PlaceElem::Field(FieldId { parent: union_id.into(), local_id }), + PlaceElem::Field(Either::Left(FieldId { + parent: union_id.into(), + local_id, + })), &mut self.result.projection_store, ); self.lower_expr_to_place(*expr, place, current) @@ -1110,7 +1113,7 @@ impl<'ctx> MirLowerCtx<'ctx> { Some("start") => lp.take(), Some("end") => rp.take(), Some("exhausted") => { - Some(Operand::from_bytes(vec![0], TyBuilder::bool())) + Some(Operand::from_bytes(Box::new([0]), TyBuilder::bool())) } _ => None, }; @@ -1142,8 +1145,8 @@ impl<'ctx> MirLowerCtx<'ctx> { .map(|it| match it { ProjectionElem::Deref => ProjectionElem::Deref, ProjectionElem::Field(it) => ProjectionElem::Field(it), - ProjectionElem::TupleOrClosureField(it) => { - ProjectionElem::TupleOrClosureField(it) + ProjectionElem::ClosureField(it) => { + ProjectionElem::ClosureField(it) } ProjectionElem::ConstantIndex { offset, from_end } => { ProjectionElem::ConstantIndex { offset, from_end } @@ -1273,7 +1276,10 @@ impl<'ctx> MirLowerCtx<'ctx> { Expr::Tuple { exprs, is_assignee_expr: _ } => { for (i, expr) in exprs.iter().enumerate() { let rhs = rhs.project( - ProjectionElem::TupleOrClosureField(i), + ProjectionElem::Field(Either::Right(TupleFieldId { + tuple: TupleId(!0), // Dummy this as its unused + index: i as u32, + })), &mut self.result.projection_store, ); let Some(c) = self.lower_destructing_assignment(current, *expr, rhs, span)? @@ -1337,11 +1343,14 @@ impl<'ctx> MirLowerCtx<'ctx> { fn push_field_projection(&mut self, place: &mut Place, expr_id: ExprId) -> Result<()> { if let Expr::Field { expr, name } = &self.body[expr_id] { if let TyKind::Tuple(..) = self.expr_ty_after_adjustments(*expr).kind(Interner) { - let index = name - .as_tuple_index() - .ok_or(MirLowerError::TypeError("named field on tuple"))?; + let index = + name.as_tuple_index().ok_or(MirLowerError::TypeError("named field on tuple"))? + as u32; *place = place.project( - ProjectionElem::TupleOrClosureField(index), + ProjectionElem::Field(Either::Right(TupleFieldId { + tuple: TupleId(!0), // dummy as its unused + index, + })), &mut self.result.projection_store, ) } else { @@ -1386,46 +1395,43 @@ impl<'ctx> MirLowerCtx<'ctx> { } fn lower_literal_to_operand(&mut self, ty: Ty, l: &Literal) -> Result { - let size = self - .db - .layout_of_ty(ty.clone(), self.db.trait_environment_for_body(self.owner))? - .size - .bytes_usize(); - let bytes = match l { + let size = || { + self.db + .layout_of_ty(ty.clone(), self.db.trait_environment_for_body(self.owner)) + .map(|it| it.size.bytes_usize()) + }; + const USIZE_SIZE: usize = mem::size_of::(); + let bytes: Box<[_]> = match l { hir_def::hir::Literal::String(b) => { - let b = b.as_bytes(); - let mut data = Vec::with_capacity(mem::size_of::() * 2); - data.extend(0usize.to_le_bytes()); - data.extend(b.len().to_le_bytes()); - let mut mm = MemoryMap::default(); - mm.insert(0, b.to_vec()); - return Ok(Operand::from_concrete_const(data, mm, ty)); + let mut data = [0; { 2 * USIZE_SIZE }]; + data[..USIZE_SIZE].copy_from_slice(&0usize.to_le_bytes()); + data[USIZE_SIZE..].copy_from_slice(&b.len().to_le_bytes()); + let mm = MemoryMap::simple(b.as_bytes().into()); + return Ok(Operand::from_concrete_const(Box::new(data), mm, ty)); } hir_def::hir::Literal::CString(b) => { - let bytes = b.iter().copied().chain(iter::once(0)).collect::>(); + let bytes = b.iter().copied().chain(iter::once(0)).collect::>(); - let mut data = Vec::with_capacity(mem::size_of::() * 2); - data.extend(0usize.to_le_bytes()); - data.extend(bytes.len().to_le_bytes()); - let mut mm = MemoryMap::default(); - mm.insert(0, bytes); - return Ok(Operand::from_concrete_const(data, mm, ty)); + let mut data = [0; { 2 * USIZE_SIZE }]; + data[..USIZE_SIZE].copy_from_slice(&0usize.to_le_bytes()); + data[USIZE_SIZE..].copy_from_slice(&bytes.len().to_le_bytes()); + let mm = MemoryMap::simple(bytes); + return Ok(Operand::from_concrete_const(Box::new(data), mm, ty)); } hir_def::hir::Literal::ByteString(b) => { - let mut data = Vec::with_capacity(mem::size_of::() * 2); - data.extend(0usize.to_le_bytes()); - data.extend(b.len().to_le_bytes()); - let mut mm = MemoryMap::default(); - mm.insert(0, b.to_vec()); - return Ok(Operand::from_concrete_const(data, mm, ty)); + let mut data = [0; { 2 * USIZE_SIZE }]; + data[..USIZE_SIZE].copy_from_slice(&0usize.to_le_bytes()); + data[USIZE_SIZE..].copy_from_slice(&b.len().to_le_bytes()); + let mm = MemoryMap::simple(b.clone()); + return Ok(Operand::from_concrete_const(Box::new(data), mm, ty)); } - hir_def::hir::Literal::Char(c) => u32::from(*c).to_le_bytes().into(), - hir_def::hir::Literal::Bool(b) => vec![*b as u8], - hir_def::hir::Literal::Int(it, _) => it.to_le_bytes()[0..size].into(), - hir_def::hir::Literal::Uint(it, _) => it.to_le_bytes()[0..size].into(), - hir_def::hir::Literal::Float(f, _) => match size { - 8 => f.into_f64().to_le_bytes().into(), - 4 => f.into_f32().to_le_bytes().into(), + hir_def::hir::Literal::Char(c) => Box::new(u32::from(*c).to_le_bytes()), + hir_def::hir::Literal::Bool(b) => Box::new([*b as u8]), + hir_def::hir::Literal::Int(it, _) => Box::from(&it.to_le_bytes()[0..size()?]), + hir_def::hir::Literal::Uint(it, _) => Box::from(&it.to_le_bytes()[0..size()?]), + hir_def::hir::Literal::Float(f, _) => match size()? { + 8 => Box::new(f.into_f64().to_le_bytes()), + 4 => Box::new(f.into_f32().to_le_bytes()), _ => { return Err(MirLowerError::TypeError("float with size other than 4 or 8 bytes")) } @@ -1474,7 +1480,7 @@ impl<'ctx> MirLowerCtx<'ctx> { &mut self, prev_block: BasicBlockId, place: Place, - cv: Vec, + cv: Box<[u8]>, ty: Ty, span: MirSpan, ) -> Result<()> { @@ -2041,10 +2047,11 @@ pub fn mir_body_for_closure_query( match (it, y) { (ProjectionElem::Deref, ProjectionElem::Deref) => (), (ProjectionElem::Field(it), ProjectionElem::Field(y)) if it == y => (), - ( - ProjectionElem::TupleOrClosureField(it), - ProjectionElem::TupleOrClosureField(y), - ) if it == y => (), + (ProjectionElem::ClosureField(it), ProjectionElem::ClosureField(y)) + if it == y => + { + () + } _ => return false, } } @@ -2054,7 +2061,7 @@ pub fn mir_body_for_closure_query( Some(it) => { p.local = closure_local; let mut next_projs = closure_projection.clone(); - next_projs.push(PlaceElem::TupleOrClosureField(it.1)); + next_projs.push(PlaceElem::ClosureField(it.1)); let prev_projs = p.projection; if it.0.kind != CaptureKind::ByValue { next_projs.push(ProjectionElem::Deref); @@ -2063,8 +2070,8 @@ pub fn mir_body_for_closure_query( prev_projs .lookup(&store) .iter() - .cloned() - .skip(it.0.place.projections.len()), + .skip(it.0.place.projections.len()) + .cloned(), ); p.projection = store.intern(next_projs.into()); } diff --git a/crates/hir-ty/src/mir/lower/as_place.rs b/crates/hir-ty/src/mir/lower/as_place.rs index 8c078eb4ad..cb5588a5c1 100644 --- a/crates/hir-ty/src/mir/lower/as_place.rs +++ b/crates/hir-ty/src/mir/lower/as_place.rs @@ -218,7 +218,7 @@ impl MirLowerCtx<'_> { self.push_field_projection(&mut r, expr_id)?; Ok(Some((r, current))) } - Expr::Index { base, index } => { + Expr::Index { base, index, is_assignee_expr: _ } => { let base_ty = self.expr_ty_after_adjustments(*base); let index_ty = self.expr_ty_after_adjustments(*index); if index_ty != TyBuilder::usize() diff --git a/crates/hir-ty/src/mir/lower/pattern_matching.rs b/crates/hir-ty/src/mir/lower/pattern_matching.rs index 1120bb1c11..98c2e7c63b 100644 --- a/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -108,7 +108,12 @@ impl MirLowerCtx<'_> { current_else, args, *ellipsis, - (0..subst.len(Interner)).map(|i| PlaceElem::TupleOrClosureField(i)), + (0..subst.len(Interner)).map(|i| { + PlaceElem::Field(Either::Right(TupleFieldId { + tuple: TupleId(!0), // Dummy as it is unused + index: i as u32, + })) + }), &(&mut cond_place), mode, )? @@ -239,7 +244,7 @@ impl MirLowerCtx<'_> { ); } else { let c = Operand::from_concrete_const( - pattern_len.to_le_bytes().to_vec(), + pattern_len.to_le_bytes().into(), MemoryMap::default(), TyBuilder::usize(), ); @@ -566,7 +571,10 @@ impl MirLowerCtx<'_> { let field_id = variant_data.field(&x.name).ok_or(MirLowerError::UnresolvedField)?; Ok(( - PlaceElem::Field(FieldId { parent: v.into(), local_id: field_id }), + PlaceElem::Field(Either::Left(FieldId { + parent: v.into(), + local_id: field_id, + })), x.pat, )) }) @@ -574,10 +582,9 @@ impl MirLowerCtx<'_> { self.pattern_match_adt(current, current_else, it.into_iter(), cond_place, mode)? } AdtPatternShape::Tuple { args, ellipsis } => { - let fields = variant_data - .fields() - .iter() - .map(|(x, _)| PlaceElem::Field(FieldId { parent: v.into(), local_id: x })); + let fields = variant_data.fields().iter().map(|(x, _)| { + PlaceElem::Field(Either::Left(FieldId { parent: v.into(), local_id: x })) + }); self.pattern_match_tuple_like( current, current_else, diff --git a/crates/hir-ty/src/mir/pretty.rs b/crates/hir-ty/src/mir/pretty.rs index a91f90bc24..366c2f662b 100644 --- a/crates/hir-ty/src/mir/pretty.rs +++ b/crates/hir-ty/src/mir/pretty.rs @@ -5,6 +5,7 @@ use std::{ mem, }; +use either::Either; use hir_def::{body::Body, hir::BindingId}; use hir_expand::name::Name; use la_arena::ArenaMap; @@ -298,7 +299,7 @@ impl<'a> MirPrettyCtx<'a> { f(this, local, head); w!(this, ")"); } - ProjectionElem::Field(field) => { + ProjectionElem::Field(Either::Left(field)) => { let variant_data = field.parent.variant_data(this.db.upcast()); let name = &variant_data.fields()[field.local_id].name; match field.parent { @@ -320,7 +321,11 @@ impl<'a> MirPrettyCtx<'a> { } } } - ProjectionElem::TupleOrClosureField(it) => { + ProjectionElem::Field(Either::Right(field)) => { + f(this, local, head); + w!(this, ".{}", field.index); + } + ProjectionElem::ClosureField(it) => { f(this, local, head); w!(this, ".{}", it); } diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index 003ae60e8e..d270328605 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -4506,3 +4506,50 @@ fn ttt() { "#, ); } + +#[test] +fn infer_borrow() { + check_types( + r#" +//- minicore: index +pub struct SomeMap; + +pub trait Borrow { + fn borrow(&self) -> &Borrowed; +} + +impl Borrow for T { + fn borrow(&self) -> &T { + self + } +} + +impl Borrow for &T { + fn borrow(&self) -> &T { + &**self + } +} + +impl> core::ops::Index for SomeMap { + type Output = (); + + fn index(&self, _: KB) -> &() { + &() + } +} + +impl core::ops::IndexMut for SomeMap { + fn index_mut(&mut self, _: K) -> &mut () { + &mut () + } +} + +fn foo() { + let mut map = SomeMap; + map["a"] = (); + map; + //^^^ SomeMap<&str> +} +"#, + ); +} diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs index d60d20f5b7..60ddc4aa86 100644 --- a/crates/hir/src/attrs.rs +++ b/crates/hir/src/attrs.rs @@ -1,5 +1,7 @@ //! Attributes & documentation for hir types. +use std::ops::ControlFlow; + use base_db::FileId; use hir_def::{ attr::AttrsWithOwner, @@ -13,13 +15,13 @@ use hir_expand::{ name::Name, span_map::{RealSpanMap, SpanMapRef}, }; -use hir_ty::db::HirDatabase; +use hir_ty::{db::HirDatabase, method_resolution}; use syntax::{ast, AstNode}; use crate::{ Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl, - Field, Function, GenericParam, Impl, LifetimeParam, Macro, Module, ModuleDef, Static, Struct, - Trait, TraitAlias, TypeAlias, TypeParam, Union, Variant, VariantDef, + Field, Function, GenericParam, HasCrate, Impl, LifetimeParam, Macro, Module, ModuleDef, Static, + Struct, Trait, TraitAlias, Type, TypeAlias, TypeParam, Union, Variant, VariantDef, }; pub trait HasAttrs { @@ -99,9 +101,6 @@ pub fn resolve_doc_path_on( link: &str, ns: Option, ) -> Option { - // AttrDefId::FieldId(it) => it.parent.resolver(db.upcast()), - // AttrDefId::EnumVariantId(it) => it.parent.resolver(db.upcast()), - resolve_doc_path_on_(db, link, def.attr_id(), ns) } @@ -205,8 +204,14 @@ fn resolve_assoc_or_field( } }; - // FIXME: Resolve associated items here, e.g. `Option::map`. Note that associated items take - // precedence over fields. + // Resolve inherent items first, then trait items, then fields. + if let Some(assoc_item_def) = resolve_assoc_item(db, &ty, &name, ns) { + return Some(assoc_item_def); + } + + if let Some(impl_trait_item_def) = resolve_impl_trait_item(db, resolver, &ty, &name, ns) { + return Some(impl_trait_item_def); + } let variant_def = match ty.as_adt()? { Adt::Struct(it) => it.into(), @@ -216,6 +221,65 @@ fn resolve_assoc_or_field( resolve_field(db, variant_def, name, ns) } +fn resolve_assoc_item( + db: &dyn HirDatabase, + ty: &Type, + name: &Name, + ns: Option, +) -> Option { + ty.iterate_assoc_items(db, ty.krate(db), move |assoc_item| { + if assoc_item.name(db)? != *name { + return None; + } + as_module_def_if_namespace_matches(assoc_item, ns) + }) +} + +fn resolve_impl_trait_item( + db: &dyn HirDatabase, + resolver: Resolver, + ty: &Type, + name: &Name, + ns: Option, +) -> Option { + let canonical = ty.canonical(); + let krate = ty.krate(db); + let environment = resolver.generic_def().map_or_else( + || crate::TraitEnvironment::empty(krate.id).into(), + |d| db.trait_environment(d), + ); + let traits_in_scope = resolver.traits_in_scope(db.upcast()); + + let mut result = None; + + // `ty.iterate_path_candidates()` require a scope, which is not available when resolving + // attributes here. Use path resolution directly instead. + // + // FIXME: resolve type aliases (which are not yielded by iterate_path_candidates) + method_resolution::iterate_path_candidates( + &canonical, + db, + environment, + &traits_in_scope, + method_resolution::VisibleFromModule::None, + Some(name), + &mut |assoc_item_id| { + // If two traits in scope define the same item, Rustdoc links to no specific trait (for + // instance, given two methods `a`, Rustdoc simply links to `method.a` with no + // disambiguation) so we just pick the first one we find as well. + result = as_module_def_if_namespace_matches(assoc_item_id.into(), ns); + + if result.is_some() { + ControlFlow::Break(()) + } else { + ControlFlow::Continue(()) + } + }, + ); + + result +} + fn resolve_field( db: &dyn HirDatabase, def: VariantDef, @@ -228,6 +292,19 @@ fn resolve_field( def.fields(db).into_iter().find(|f| f.name(db) == name).map(DocLinkDef::Field) } +fn as_module_def_if_namespace_matches( + assoc_item: AssocItem, + ns: Option, +) -> Option { + let (def, expected_ns) = match assoc_item { + AssocItem::Function(it) => (ModuleDef::Function(it), Namespace::Values), + AssocItem::Const(it) => (ModuleDef::Const(it), Namespace::Values), + AssocItem::TypeAlias(it) => (ModuleDef::TypeAlias(it), Namespace::Types), + }; + + (ns.unwrap_or(expected_ns) == expected_ns).then(|| DocLinkDef::ModuleDef(def)) +} + fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option { // FIXME: this is not how we should get a mod path here. let try_get_modpath = |link: &str| { diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs index 5847c8a9fb..9b99b141fc 100644 --- a/crates/hir/src/display.rs +++ b/crates/hir/src/display.rs @@ -19,8 +19,8 @@ use hir_ty::{ use crate::{ Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Enum, ExternCrateDecl, Field, Function, GenericParam, HasCrate, HasVisibility, LifetimeParam, Macro, Module, SelfParam, - Static, Struct, Trait, TraitAlias, TyBuilder, Type, TypeAlias, TypeOrConstParam, TypeParam, - Union, Variant, + Static, Struct, Trait, TraitAlias, TupleField, TyBuilder, Type, TypeAlias, TypeOrConstParam, + TypeParam, Union, Variant, }; impl HirDisplay for Function { @@ -257,6 +257,13 @@ impl HirDisplay for Field { } } +impl HirDisplay for TupleField { + fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { + write!(f, "pub {}: ", self.name().display(f.db.upcast()))?; + self.ty(f.db).hir_fmt(f) + } +} + impl HirDisplay for Variant { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { write!(f, "{}", self.name(f.db).display(f.db.upcast()))?; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 09b56e1382..0266915c39 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -55,7 +55,7 @@ use hir_def::{ AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalEnumVariantId, LocalFieldId, - Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, + Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TupleId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, }; use hir_expand::{attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, MacroCallKind}; @@ -1038,6 +1038,29 @@ pub struct Field { pub(crate) id: LocalFieldId, } +#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)] +pub struct TupleField { + pub owner: DefWithBodyId, + pub tuple: TupleId, + pub index: u32, +} + +impl TupleField { + pub fn name(&self) -> Name { + Name::new_tuple_field(self.index as usize) + } + + pub fn ty(&self, db: &dyn HirDatabase) -> Type { + let ty = db.infer(self.owner).tuple_field_access_types[&self.tuple] + .as_slice(Interner) + .get(self.index as usize) + .and_then(|arg| arg.ty(Interner)) + .cloned() + .unwrap_or_else(|| TyKind::Error.intern(Interner)); + Type { env: db.trait_environment_for_body(self.owner), ty } + } +} + #[derive(Debug, PartialEq, Eq)] pub enum FieldSource { Named(ast::RecordField), @@ -1070,7 +1093,7 @@ impl Field { pub fn layout(&self, db: &dyn HirDatabase) -> Result { db.layout_of_ty( - self.ty(db).ty.clone(), + self.ty(db).ty, db.trait_environment(match hir_def::VariantId::from(self.parent) { hir_def::VariantId::EnumVariantId(id) => GenericDefId::EnumVariantId(id), hir_def::VariantId::StructId(id) => GenericDefId::AdtId(id.into()), @@ -1831,7 +1854,7 @@ impl DefWithBody { let local = Local { parent: self.into(), binding_id }; match (need_mut, local.is_mut(db)) { (mir::MutabilityReason::Unused, _) => { - let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with("_")); + let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with('_')); if !should_ignore { acc.push(UnusedVariable { local }.into()) } @@ -1856,7 +1879,7 @@ impl DefWithBody { } (mir::MutabilityReason::Not, true) => { if !infer.mutated_bindings_in_closure.contains(&binding_id) { - let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with("_")); + let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with('_')); if !should_ignore { acc.push(UnusedMut { local }.into()) } @@ -2160,7 +2183,7 @@ impl Function { return r; } }; - let (result, stdout, stderr) = interpret_mir(db, body, false, None); + let (result, output) = interpret_mir(db, body, false, None); let mut text = match result { Ok(_) => "pass".to_string(), Err(e) => { @@ -2169,10 +2192,12 @@ impl Function { r } }; + let stdout = output.stdout().into_owned(); if !stdout.is_empty() { text += "\n--------- stdout ---------\n"; text += &stdout; } + let stderr = output.stdout().into_owned(); if !stderr.is_empty() { text += "\n--------- stderr ---------\n"; text += &stderr; @@ -3648,7 +3673,6 @@ impl Closure { let (captures, _) = infer.closure_info(&self.id); captures .iter() - .cloned() .map(|capture| Type { env: db.trait_environment_for_body(owner), ty: capture.ty(&self.subst), @@ -4121,6 +4145,10 @@ impl Type { } } + pub(crate) fn canonical(&self) -> Canonical { + hir_ty::replace_errors_with_variables(&self.ty) + } + /// Returns types that this type dereferences to (including this type itself). The returned /// iterator won't yield the same type more than once even if the deref chain contains a cycle. pub fn autoderef(&self, db: &dyn HirDatabase) -> impl Iterator + '_ { diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index fdc604a006..f51fe80931 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -40,7 +40,7 @@ use crate::{ Access, Adjust, Adjustment, AutoBorrow, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, DeriveHelper, Field, Function, HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local, Macro, Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef, Struct, ToolModule, Trait, - Type, TypeAlias, TypeParam, VariantDef, + TupleField, Type, TypeAlias, TypeParam, VariantDef, }; pub enum DescendPreference { @@ -428,7 +428,7 @@ impl<'db> SemanticsImpl<'db> { if let Some(original_string) = ast::String::cast(original_token.clone()) { if let Some(quote) = original_string.open_quote_text_range() { return self - .descend_into_macros(DescendPreference::SameText, original_token.clone()) + .descend_into_macros(DescendPreference::SameText, original_token) .into_iter() .find_map(|token| { self.resolve_offset_in_format_args( @@ -1085,14 +1085,14 @@ impl<'db> SemanticsImpl<'db> { self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call) } - pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option { + pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option> { self.analyze(field.syntax())?.resolve_field(self.db, field) } pub fn resolve_field_fallback( &self, field: &ast::FieldExpr, - ) -> Option> { + ) -> Option, Function>> { self.analyze(field.syntax())?.resolve_field_fallback(self.db, field) } diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 54b4d81012..73f8db762a 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -50,7 +50,7 @@ use triomphe::Arc; use crate::{ db::HirDatabase, semantics::PathResolution, Adt, AssocItem, BindingMode, BuiltinAttr, BuiltinType, Callable, Const, DeriveHelper, Field, Function, Local, Macro, ModuleDef, Static, - Struct, ToolModule, Trait, TraitAlias, Type, TypeAlias, Variant, + Struct, ToolModule, Trait, TraitAlias, TupleField, Type, TypeAlias, Variant, }; /// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of @@ -297,7 +297,11 @@ impl SourceAnalyzer { Some((f_in_trait, substs)) => Some(Either::Left( self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into(), )), - None => inference_result.field_resolution(expr_id).map(Into::into).map(Either::Right), + None => inference_result + .field_resolution(expr_id) + .and_then(Either::left) + .map(Into::into) + .map(Either::Right), } } @@ -305,20 +309,28 @@ impl SourceAnalyzer { &self, db: &dyn HirDatabase, field: &ast::FieldExpr, - ) -> Option { + ) -> Option> { + let &(def, ..) = self.def.as_ref()?; let expr_id = self.expr_id(db, &field.clone().into())?; - self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into()) + self.infer.as_ref()?.field_resolution(expr_id).map(|it| { + it.map_either(Into::into, |f| TupleField { owner: def, tuple: f.tuple, index: f.index }) + }) } pub(crate) fn resolve_field_fallback( &self, db: &dyn HirDatabase, field: &ast::FieldExpr, - ) -> Option> { + ) -> Option, Function>> { + let &(def, ..) = self.def.as_ref()?; let expr_id = self.expr_id(db, &field.clone().into())?; let inference_result = self.infer.as_ref()?; match inference_result.field_resolution(expr_id) { - Some(field) => Some(Either::Left(field.into())), + Some(field) => Some(Either::Left(field.map_either(Into::into, |f| TupleField { + owner: def, + tuple: f.tuple, + index: f.index, + }))), None => inference_result.method_resolution(expr_id).map(|(f, substs)| { Either::Right(self.resolve_impl_method_or_trait_def(db, f, substs).into()) }), diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs index 4da0dfba67..841ddfb9c4 100644 --- a/crates/hir/src/symbols.rs +++ b/crates/hir/src/symbols.rs @@ -18,11 +18,11 @@ use crate::{Module, ModuleDef, Semantics}; /// possible. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct FileSymbol { - // even though name can be derived from the def, we store it for efficiency pub name: SmolStr, pub def: ModuleDef, pub loc: DeclarationLocation, pub container_name: Option, + /// Whether this symbol is a doc alias for the original symbol. pub is_alias: bool, pub is_assoc: bool, } @@ -163,11 +163,9 @@ impl<'a> SymbolCollector<'a> { } // Record renamed imports. - // In case it imports multiple items under different namespaces we just pick one arbitrarily + // FIXME: In case it imports multiple items under different namespaces we just pick one arbitrarily // for now. for id in scope.imports() { - let loc = id.import.lookup(self.db.upcast()); - loc.id.item_tree(self.db.upcast()); let source = id.import.child_source(self.db.upcast()); let Some(use_tree_src) = source.value.get(id.idx) else { continue }; let Some(rename) = use_tree_src.rename() else { continue }; diff --git a/crates/ide-assists/src/handlers/auto_import.rs b/crates/ide-assists/src/handlers/auto_import.rs index 1f785b5d0a..7b71d9b869 100644 --- a/crates/ide-assists/src/handlers/auto_import.rs +++ b/crates/ide-assists/src/handlers/auto_import.rs @@ -89,12 +89,14 @@ use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel}; // ``` pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let (import_assets, syntax_under_caret) = find_importable_node(ctx)?; - let mut proposed_imports = import_assets.search_for_imports( - &ctx.sema, - ctx.config.insert_use.prefix_kind, - ctx.config.prefer_no_std, - ctx.config.prefer_no_std, - ); + let mut proposed_imports: Vec<_> = import_assets + .search_for_imports( + &ctx.sema, + ctx.config.insert_use.prefix_kind, + ctx.config.prefer_no_std, + ctx.config.prefer_no_std, + ) + .collect(); if proposed_imports.is_empty() { return None; } @@ -113,6 +115,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< )?; // we aren't interested in different namespaces + proposed_imports.sort_by(|a, b| a.import_path.cmp(&b.import_path)); proposed_imports.dedup_by(|a, b| a.import_path == b.import_path); let current_node = match ctx.covering_element() { diff --git a/crates/ide-assists/src/handlers/bool_to_enum.rs b/crates/ide-assists/src/handlers/bool_to_enum.rs index b7b00e7ed0..dc1952c3ff 100644 --- a/crates/ide-assists/src/handlers/bool_to_enum.rs +++ b/crates/ide-assists/src/handlers/bool_to_enum.rs @@ -301,7 +301,7 @@ fn replace_usages( // add imports across modules where needed if let Some((import_scope, path)) = import_data { - let scope = match import_scope.clone() { + let scope = match import_scope { ImportScope::File(it) => ImportScope::File(edit.make_mut(it)), ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)), ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)), @@ -329,7 +329,7 @@ fn augment_references_with_imports( references .into_iter() .filter_map(|FileReference { range, name, .. }| { - let name = name.clone().into_name_like()?; + let name = name.into_name_like()?; ctx.sema.scope(name.syntax()).map(|scope| (range, name, scope.module())) }) .map(|(range, name, ref_module)| { diff --git a/crates/ide-assists/src/handlers/convert_let_else_to_match.rs b/crates/ide-assists/src/handlers/convert_let_else_to_match.rs index 5f7056b9c1..79c34c14da 100644 --- a/crates/ide-assists/src/handlers/convert_let_else_to_match.rs +++ b/crates/ide-assists/src/handlers/convert_let_else_to_match.rs @@ -1,5 +1,6 @@ use hir::Semantics; use ide_db::RootDatabase; +use syntax::ast::RangeItem; use syntax::ast::{edit::AstNodeEdit, AstNode, HasName, LetStmt, Name, Pat}; use syntax::T; diff --git a/crates/ide-assists/src/handlers/convert_nested_function_to_closure.rs b/crates/ide-assists/src/handlers/convert_nested_function_to_closure.rs index 399f87c8f5..c30f3e1c3b 100644 --- a/crates/ide-assists/src/handlers/convert_nested_function_to_closure.rs +++ b/crates/ide-assists/src/handlers/convert_nested_function_to_closure.rs @@ -49,8 +49,8 @@ pub(crate) fn convert_nested_function_to_closure( target, |edit| { let params = ¶m_list.syntax().text().to_string(); - let params = params.strip_prefix("(").unwrap_or(params); - let params = params.strip_suffix(")").unwrap_or(params); + let params = params.strip_prefix('(').unwrap_or(params); + let params = params.strip_suffix(')').unwrap_or(params); let mut body = body.to_string(); if !has_semicolon(&function) { diff --git a/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs index 79b46d6612..41366658a7 100644 --- a/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs +++ b/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs @@ -190,7 +190,7 @@ fn augment_references_with_imports( ctx.sema.scope(name.syntax()).map(|scope| (name, scope.module())) }) .map(|(name, ref_module)| { - let new_name = edit.make_mut(name.clone()); + let new_name = edit.make_mut(name); // if the referenced module is not the same as the target one and has not been seen before, add an import let import_data = if ref_module.nearest_non_block_module(ctx.db()) != *target_module diff --git a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs index 37db27a8fc..65e2a01847 100644 --- a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs @@ -6,6 +6,7 @@ use ide_db::{ defs::Definition, helpers::mod_path_to_ast, imports::insert_use::{insert_use, ImportScope, InsertUseConfig}, + path_transform::PathTransform, search::FileReference, FxHashSet, RootDatabase, }; @@ -105,6 +106,16 @@ pub(crate) fn extract_struct_from_enum_variant( .generic_param_list() .and_then(|known_generics| extract_generic_params(&known_generics, &field_list)); let generics = generic_params.as_ref().map(|generics| generics.clone_for_update()); + + // resolve GenericArg in field_list to actual type + let field_list = field_list.clone_for_update(); + if let Some((target_scope, source_scope)) = + ctx.sema.scope(enum_ast.syntax()).zip(ctx.sema.scope(field_list.syntax())) + { + PathTransform::generic_transformation(&target_scope, &source_scope) + .apply(field_list.syntax()); + } + let def = create_struct_def(variant_name.clone(), &variant, &field_list, generics, &enum_ast); @@ -244,8 +255,6 @@ fn create_struct_def( // for fields without any existing visibility, use visibility of enum let field_list: ast::FieldList = match field_list { Either::Left(field_list) => { - let field_list = field_list.clone_for_update(); - if let Some(vis) = &enum_vis { field_list .fields() @@ -254,11 +263,9 @@ fn create_struct_def( .for_each(|it| insert_vis(it.syntax(), vis.syntax())); } - field_list.into() + field_list.clone().into() } Either::Right(field_list) => { - let field_list = field_list.clone_for_update(); - if let Some(vis) = &enum_vis { field_list .fields() @@ -267,7 +274,7 @@ fn create_struct_def( .for_each(|it| insert_vis(it.syntax(), vis.syntax())); } - field_list.into() + field_list.clone().into() } }; field_list.reindent_to(IndentLevel::single()); @@ -425,6 +432,59 @@ mod tests { use super::*; + #[test] + fn issue_16197() { + check_assist( + extract_struct_from_enum_variant, + r#" +enum Foo { + Bar $0{ node: Box }, + Nil, +} +"#, + r#" +struct Bar{ node: Box } + +enum Foo { + Bar(Bar), + Nil, +} +"#, + ); + check_assist( + extract_struct_from_enum_variant, + r#" +enum Foo { + Bar $0{ node: Box, a: Arc> }, + Nil, +} +"#, + r#" +struct Bar{ node: Box, a: Arc> } + +enum Foo { + Bar(Bar), + Nil, +} +"#, + ); + check_assist( + extract_struct_from_enum_variant, + r#" +enum Foo { + Nil(Box$0, Arc>), +} +"#, + r#" +struct Nil(Box, Arc>); + +enum Foo { + Nil(Nil), +} +"#, + ); + } + #[test] fn test_extract_struct_several_fields_tuple() { check_assist( diff --git a/crates/ide-assists/src/handlers/extract_variable.rs b/crates/ide-assists/src/handlers/extract_variable.rs index 874b81d3b6..0b3bd0bed6 100644 --- a/crates/ide-assists/src/handlers/extract_variable.rs +++ b/crates/ide-assists/src/handlers/extract_variable.rs @@ -112,7 +112,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op let insert_place = edit.make_syntax_mut(place); // Adjust ws to insert depending on if this is all inline or on separate lines - let trailing_ws = if prev_ws.is_some_and(|it| it.text().starts_with("\n")) { + let trailing_ws = if prev_ws.is_some_and(|it| it.text().starts_with('\n')) { format!("\n{indent_to}") } else { format!(" ") diff --git a/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/crates/ide-assists/src/handlers/generate_delegate_methods.rs index db1e0ceaec..dc02aaf9af 100644 --- a/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -147,7 +147,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' None => { let name = &strukt_name.to_string(); let params = strukt.generic_param_list(); - let ty_params = params.clone(); + let ty_params = params; let where_clause = strukt.where_clause(); let impl_def = make::impl_( diff --git a/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/crates/ide-assists/src/handlers/generate_delegate_trait.rs index 0d34502add..339c3ac71e 100644 --- a/crates/ide-assists/src/handlers/generate_delegate_trait.rs +++ b/crates/ide-assists/src/handlers/generate_delegate_trait.rs @@ -17,7 +17,7 @@ use syntax::{ self, edit::{self, AstNodeEdit}, make, AssocItem, GenericArgList, GenericParamList, HasGenericParams, HasName, - HasTypeBounds, HasVisibility as astHasVisibility, Path, + HasTypeBounds, HasVisibility as astHasVisibility, Path, WherePred, }, ted::{self, Position}, AstNode, NodeOrToken, SmolStr, SyntaxKind, @@ -217,9 +217,9 @@ impl Struct { }; acc.add_group( - &GroupLabel(format!("Generate delegate impls for field `{}`", field.name)), + &GroupLabel(format!("Generate delegate trait impls for field `{}`", field.name)), AssistId("generate_delegate_trait", ide_db::assists::AssistKind::Generate), - format!("Generate delegate impl `{}` for `{}`", signature, field.name), + format!("Generate delegate trait impl `{}` for `{}`", signature, field.name), field.range, |builder| { builder.insert( @@ -243,12 +243,12 @@ fn generate_impl( let db = ctx.db(); let ast_strukt = &strukt.strukt; let strukt_ty = make::ty_path(make::ext::ident_path(&strukt.name.to_string())); + let strukt_params = ast_strukt.generic_param_list(); match delegee { Delegee::Bound(delegee) => { let bound_def = ctx.sema.source(delegee.to_owned())?.value; let bound_params = bound_def.generic_param_list(); - let strukt_params = ast_strukt.generic_param_list(); delegate = make::impl_trait( delegee.is_unsafe(db), @@ -266,11 +266,8 @@ fn generate_impl( .clone_for_update(); // Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths - let qualified_path_type = make::path_from_text(&format!( - "<{} as {}>", - field_ty.to_string(), - delegate.trait_()?.to_string() - )); + let qualified_path_type = + make::path_from_text(&format!("<{} as {}>", field_ty, delegate.trait_()?)); let delegate_assoc_items = delegate.get_or_create_assoc_item_list(); match bound_def.assoc_item_list() { @@ -295,63 +292,73 @@ fn generate_impl( } Delegee::Impls(trait_, old_impl) => { let old_impl = ctx.sema.source(old_impl.to_owned())?.value; + let old_impl_params = old_impl.generic_param_list(); + + // 1) Resolve conflicts between generic parameters in old_impl and + // those in strukt. + // + // These generics parameters will also be used in `field_ty` and + // `where_clauses`, so we should substitude arguments in them as well. + let strukt_params = resolve_name_conflicts(strukt_params, &old_impl_params); + let (field_ty, ty_where_clause) = match &strukt_params { + Some(strukt_params) => { + let args = strukt_params.to_generic_args(); + let field_ty = rename_strukt_args(ctx, ast_strukt, field_ty, &args)?; + let where_clause = ast_strukt + .where_clause() + .and_then(|wc| Some(rename_strukt_args(ctx, ast_strukt, &wc, &args)?)); + (field_ty, where_clause) + } + None => (field_ty.clone_for_update(), None), + }; + + // 2) Handle instantiated generics in `field_ty`. + + // 2.1) Some generics used in `self_ty` may be instantiated, so they + // are no longer generics, we should remove and instantiate those + // generics in advance. // `old_trait_args` contains names of generic args for trait in `old_impl` - let old_trait_args = old_impl + let old_impl_trait_args = old_impl .trait_()? .generic_arg_list() .map(|l| l.generic_args().map(|arg| arg.to_string())) .map_or_else(|| FxHashSet::default(), |it| it.collect()); - let old_impl_params = old_impl.generic_param_list(); + let trait_gen_params = remove_instantiated_params( + &old_impl.self_ty()?, + old_impl_params.clone(), + &old_impl_trait_args, + ); - // Resolve conflicts with generic parameters in strukt. - // These generics parameters will also be used in `field_ty` and `where_clauses`, - // so we should substitude arguments in them as well. - let (renamed_strukt_params, field_ty, ty_where_clause) = if let Some(strukt_params) = - resolve_conflicts_for_strukt(ast_strukt, old_impl_params.as_ref()) - { - let strukt_args = strukt_params.to_generic_args(); - let field_ty = - subst_name_in_strukt(ctx, ast_strukt, field_ty, strukt_args.clone())?; - let wc = ast_strukt - .where_clause() - .and_then(|wc| Some(subst_name_in_strukt(ctx, ast_strukt, &wc, strukt_args)?)); - (Some(strukt_params), field_ty, wc) - } else { - (None, field_ty.clone_for_update(), None) - }; - - // Some generics used in `field_ty` may be instantiated, so they are no longer - // `generics`. We should remove them from generics params, and use the rest params. - let trait_gen_params = - remove_instantiated_params(&old_impl.self_ty()?, old_impl_params, &old_trait_args); - - // Generate generic args that applied to current impl, this step will also remove unused params - let args_for_impl = - get_args_for_impl(&old_impl, &field_ty, &trait_gen_params, &old_trait_args); + // 2.2) Generate generic args applied on impl. + let transform_args = generate_args_for_impl( + old_impl_params, + &old_impl.self_ty()?, + &field_ty, + &trait_gen_params, + &old_impl_trait_args, + ); + // 2.3) Instantiate generics with `transform_impl`, this step also + // remove unused params. let mut trait_gen_args = old_impl.trait_()?.generic_arg_list(); - if let Some(arg_list) = &mut trait_gen_args { - *arg_list = arg_list.clone_for_update(); - transform_impl(ctx, ast_strukt, &old_impl, &args_for_impl, &arg_list.syntax())?; + if let Some(trait_args) = &mut trait_gen_args { + *trait_args = trait_args.clone_for_update(); + transform_impl(ctx, ast_strukt, &old_impl, &transform_args, &trait_args.syntax())?; } - let mut type_gen_args = - renamed_strukt_params.clone().map(|params| params.to_generic_args()); - if let Some(type_args) = &mut type_gen_args { - *type_args = type_args.clone_for_update(); - transform_impl(ctx, ast_strukt, &old_impl, &args_for_impl, &type_args.syntax())?; - } + let type_gen_args = strukt_params.clone().map(|params| params.to_generic_args()); let path_type = make::ty(&trait_.name(db).to_smol_str()).clone_for_update(); - transform_impl(ctx, ast_strukt, &old_impl, &args_for_impl, &path_type.syntax())?; + transform_impl(ctx, ast_strukt, &old_impl, &transform_args, &path_type.syntax())?; + // 3) Generate delegate trait impl delegate = make::impl_trait( trait_.is_unsafe(db), trait_gen_params, trait_gen_args, - renamed_strukt_params, + strukt_params, type_gen_args, trait_.is_auto(db), path_type, @@ -363,30 +370,26 @@ fn generate_impl( .clone_for_update(); // Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths - let qualified_path_type = make::path_from_text(&format!( - "<{} as {}>", - field_ty.to_string(), - delegate.trait_()?.to_string() - )); + let qualified_path_type = + make::path_from_text(&format!("<{} as {}>", field_ty, delegate.trait_()?)); + // 4) Transform associated items in delegte trait impl let delegate_assoc_items = delegate.get_or_create_assoc_item_list(); for item in old_impl .get_or_create_assoc_item_list() .assoc_items() .filter(|item| matches!(item, AssocItem::MacroCall(_)).not()) { - let assoc = process_assoc_item( - transform_assoc_item(ctx, ast_strukt, &old_impl, &args_for_impl, item)?, - qualified_path_type.clone(), - &field_name, - )?; + let item = item.clone_for_update(); + transform_impl(ctx, ast_strukt, &old_impl, &transform_args, item.syntax())?; + let assoc = process_assoc_item(item, qualified_path_type.clone(), &field_name)?; delegate_assoc_items.add_item(assoc); } - // Remove unused where clauses + // 5) Remove useless where clauses if let Some(wc) = delegate.where_clause() { - remove_useless_where_clauses(&delegate, wc)?; + remove_useless_where_clauses(&delegate.trait_()?, &delegate.self_ty()?, wc); } } } @@ -394,32 +397,6 @@ fn generate_impl( Some(delegate) } -fn transform_assoc_item( - ctx: &AssistContext<'_>, - strukt: &ast::Struct, - old_impl: &ast::Impl, - args: &Option, - item: AssocItem, -) -> Option { - let source_scope = ctx.sema.scope(&item.syntax()).unwrap(); - let target_scope = ctx.sema.scope(&strukt.syntax())?; - let hir_old_impl = ctx.sema.to_impl_def(old_impl)?; - let item = item.clone_for_update(); - let transform = args.as_ref().map_or_else( - || PathTransform::generic_transformation(&target_scope, &source_scope), - |args| { - PathTransform::impl_transformation( - &target_scope, - &source_scope, - hir_old_impl, - args.clone(), - ) - }, - ); - transform.apply(&item.syntax()); - Some(item) -} - fn transform_impl( ctx: &AssistContext<'_>, strukt: &ast::Struct, @@ -463,11 +440,11 @@ fn remove_instantiated_params( .segments() .filter_map(|seg| seg.generic_arg_list()) .flat_map(|it| it.generic_args()) - // However, if the param is also used in the trait arguments, it shouldn't be removed. + // However, if the param is also used in the trait arguments, + // it shouldn't be removed now, which will be instantiated in + // later `path_transform` .filter(|arg| !old_trait_args.contains(&arg.to_string())) - .for_each(|arg| { - new_gpl.remove_generic_arg(&arg); - }); + .for_each(|arg| new_gpl.remove_generic_arg(&arg)); (new_gpl.generic_params().count() > 0).then_some(new_gpl) }) } @@ -475,49 +452,37 @@ fn remove_instantiated_params( } } -fn remove_useless_where_clauses(delegate: &ast::Impl, wc: ast::WhereClause) -> Option<()> { - let trait_args = - delegate.trait_()?.generic_arg_list().map(|trait_args| trait_args.generic_args()); - let strukt_args = - delegate.self_ty()?.generic_arg_list().map(|strukt_args| strukt_args.generic_args()); - let used_generic_names = match (trait_args, strukt_args) { - (None, None) => None, - (None, Some(y)) => Some(y.map(|arg| arg.to_string()).collect::>()), - (Some(x), None) => Some(x.map(|arg| arg.to_string()).collect::>()), - (Some(x), Some(y)) => Some(x.chain(y).map(|arg| arg.to_string()).collect::>()), - }; +fn remove_useless_where_clauses(trait_ty: &ast::Type, self_ty: &ast::Type, wc: ast::WhereClause) { + let live_generics = [trait_ty, self_ty] + .into_iter() + .flat_map(|ty| ty.generic_arg_list()) + .flat_map(|gal| gal.generic_args()) + .map(|x| x.to_string()) + .collect::>(); - // Keep clauses that have generic clauses after substitution, and remove the rest - if let Some(used_generic_names) = used_generic_names { - wc.predicates() - .filter(|pred| { - pred.syntax() - .descendants_with_tokens() - .filter_map(|e| e.into_token()) - .find(|e| { - e.kind() == SyntaxKind::IDENT && used_generic_names.contains(&e.to_string()) - }) - .is_none() - }) - .for_each(|pred| { - wc.remove_predicate(pred); - }); - } else { - wc.predicates().for_each(|pred| wc.remove_predicate(pred)); - } + // Keep where-clauses that have generics after substitution, and remove the + // rest. + let has_live_generics = |pred: &WherePred| { + pred.syntax() + .descendants_with_tokens() + .filter_map(|e| e.into_token()) + .any(|e| e.kind() == SyntaxKind::IDENT && live_generics.contains(&e.to_string())) + .not() + }; + wc.predicates().filter(has_live_generics).for_each(|pred| wc.remove_predicate(pred)); if wc.predicates().count() == 0 { // Remove useless whitespaces - wc.syntax() - .siblings_with_tokens(syntax::Direction::Prev) - .skip(1) - .take_while(|node_or_tok| node_or_tok.kind() == SyntaxKind::WHITESPACE) - .for_each(|ws| ted::remove(ws)); - wc.syntax() - .siblings_with_tokens(syntax::Direction::Next) - .skip(1) - .take_while(|node_or_tok| node_or_tok.kind() == SyntaxKind::WHITESPACE) + [syntax::Direction::Prev, syntax::Direction::Next] + .into_iter() + .flat_map(|dir| { + wc.syntax() + .siblings_with_tokens(dir) + .skip(1) + .take_while(|node_or_tok| node_or_tok.kind() == SyntaxKind::WHITESPACE) + }) .for_each(|ws| ted::remove(ws)); + ted::insert( ted::Position::after(wc.syntax()), NodeOrToken::Token(make::token(SyntaxKind::WHITESPACE)), @@ -525,84 +490,63 @@ fn remove_useless_where_clauses(delegate: &ast::Impl, wc: ast::WhereClause) -> O // Remove where clause ted::remove(wc.syntax()); } - - Some(()) } -fn get_args_for_impl( - old_impl: &ast::Impl, +// Generate generic args that should be apply to current impl. +// +// For exmaple, say we have implementation `impl Trait for B`, +// and `b: B` in struct `S`. Then the `A` should be instantiated to `T`. +// While the last two generic args `B` and `C` doesn't change, it remains +// ``. So we apply `` as generic arguments to impl. +fn generate_args_for_impl( + old_impl_gpl: Option, + self_ty: &ast::Type, field_ty: &ast::Type, trait_params: &Option, old_trait_args: &FxHashSet, ) -> Option { - // Generate generic args that should be apply to current impl - // - // For exmaple, if we have `impl Trait for B`, and `b: B` in `S`, - // then the generic `A` should be renamed to `T`. While the last two generic args - // doesn't change, it renames . So we apply `` as generic arguments - // to impl. - let old_impl_params = old_impl.generic_param_list(); - let self_ty = old_impl.self_ty(); + let Some(old_impl_args) = old_impl_gpl.map(|gpl| gpl.to_generic_args().generic_args()) else { + return None; + }; + // Create pairs of the args of `self_ty` and corresponding `field_ty` to + // form the substitution list + let mut arg_substs = FxHashMap::default(); - if let (Some(old_impl_gpl), Some(self_ty)) = (old_impl_params, self_ty) { - // Make pair of the arguments of `field_ty` and `old_strukt_args` to - // get the list for substitution - let mut arg_substs = FxHashMap::default(); - - match field_ty { - field_ty @ ast::Type::PathType(_) => { - let field_args = field_ty.generic_arg_list(); - if let (Some(field_args), Some(old_impl_args)) = - (field_args, self_ty.generic_arg_list()) - { - field_args.generic_args().zip(old_impl_args.generic_args()).for_each( - |(field_arg, impl_arg)| { - arg_substs.entry(impl_arg.to_string()).or_insert(field_arg); - }, - ) - } + match field_ty { + field_ty @ ast::Type::PathType(_) => { + let field_args = field_ty.generic_arg_list().map(|gal| gal.generic_args()); + let self_ty_args = self_ty.generic_arg_list().map(|gal| gal.generic_args()); + if let (Some(field_args), Some(self_ty_args)) = (field_args, self_ty_args) { + self_ty_args.zip(field_args).for_each(|(self_ty_arg, field_arg)| { + arg_substs.entry(self_ty_arg.to_string()).or_insert(field_arg); + }) } - _ => {} } - - let args = old_impl_gpl - .to_generic_args() - .generic_args() - .map(|old_arg| { - arg_substs.get(&old_arg.to_string()).map_or_else( - || old_arg.clone(), - |replace_with| { - // The old_arg will be replaced, so it becomes redundant - let old_arg_name = old_arg.to_string(); - if old_trait_args.contains(&old_arg_name) { - // However, we should check type bounds and where clauses on old_arg, - // if it has type bound, we should keep the type bound. - // match trait_params.and_then(|params| params.remove_generic_arg(&old_arg)) { - // Some(ast::GenericParam::TypeParam(ty)) => { - // ty.type_bound_list().and_then(|bounds| ) - // } - // _ => {} - // } - if let Some(params) = trait_params { - params.remove_generic_arg(&old_arg); - } - } - replace_with.clone() - }, - ) - }) - .collect_vec(); - args.is_empty().not().then(|| make::generic_arg_list(args.into_iter())) - } else { - None + _ => {} } + + let args = old_impl_args + .map(|old_arg| { + arg_substs.get(&old_arg.to_string()).map_or_else( + || old_arg.clone(), + |replace_with| { + // The old_arg will be replaced, so it becomes redundant + if trait_params.is_some() && old_trait_args.contains(&old_arg.to_string()) { + trait_params.as_ref().unwrap().remove_generic_arg(&old_arg) + } + replace_with.clone() + }, + ) + }) + .collect_vec(); + args.is_empty().not().then(|| make::generic_arg_list(args.into_iter())) } -fn subst_name_in_strukt( +fn rename_strukt_args( ctx: &AssistContext<'_>, strukt: &ast::Struct, item: &N, - args: GenericArgList, + args: &GenericArgList, ) -> Option where N: ast::AstNode, @@ -611,9 +555,11 @@ where let hir_adt = hir::Adt::from(hir_strukt); let item = item.clone_for_update(); - let item_scope = ctx.sema.scope(item.syntax())?; - let transform = PathTransform::adt_transformation(&item_scope, &item_scope, hir_adt, args); + let scope = ctx.sema.scope(item.syntax())?; + + let transform = PathTransform::adt_transformation(&scope, &scope, hir_adt, args.clone()); transform.apply(&item.syntax()); + Some(item) } @@ -627,16 +573,16 @@ fn has_self_type(trait_: hir::Trait, ctx: &AssistContext<'_>) -> Option<()> { .map(|_| ()) } -fn resolve_conflicts_for_strukt( - strukt: &ast::Struct, - old_impl_params: Option<&ast::GenericParamList>, +fn resolve_name_conflicts( + strukt_params: Option, + old_impl_params: &Option, ) -> Option { - match (strukt.generic_param_list(), old_impl_params) { + match (strukt_params, old_impl_params) { (Some(old_strukt_params), Some(old_impl_params)) => { let params = make::generic_param_list(std::iter::empty()).clone_for_update(); for old_strukt_param in old_strukt_params.generic_params() { - // Get old name from `strukt`` + // Get old name from `strukt` let mut name = SmolStr::from(match &old_strukt_param { ast::GenericParam::ConstParam(c) => c.name()?.to_string(), ast::GenericParam::LifetimeParam(l) => { @@ -807,7 +753,7 @@ fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> Option ast::Path { - make::path_from_text(&format!("{}::{}", qual_path_ty.to_string(), path_expr_seg.to_string())) + make::path_from_text(&format!("{}::{}", qual_path_ty, path_expr_seg)) } #[cfg(test)] diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index 5bb200e84a..50528e1caa 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -432,7 +432,7 @@ fn get_fn_target( } None => next_space_for_fn_after_call_site(ast::CallableExpr::Call(call))?, }; - Some((target.clone(), file)) + Some((target, file)) } fn get_method_target( diff --git a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs index cb8ef39565..d90d366ffe 100644 --- a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs +++ b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs @@ -47,7 +47,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_> let impl_def = ctx.find_node_at_offset::()?.clone_for_update(); let trait_ = impl_def.trait_()?; - if let ast::Type::PathType(trait_path) = trait_.clone() { + if let ast::Type::PathType(trait_path) = trait_ { let trait_type = ctx.sema.resolve_trait(&trait_path.path()?)?; let scope = ctx.sema.scope(trait_path.syntax())?; if trait_type != FamousDefs(&ctx.sema, scope.krate()).core_convert_Index()? { @@ -105,7 +105,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_> "Generate `IndexMut` impl from this `Index` trait", target, |edit| { - edit.insert(target.start(), format!("$0{}\n\n", impl_def.to_string())); + edit.insert(target.start(), format!("$0{}\n\n", impl_def)); }, ) } diff --git a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs index 0f67380d12..315b6487b5 100644 --- a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs +++ b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs @@ -128,7 +128,7 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ builder.replace_snippet( snippet_cap, impl_name.syntax().text_range(), - format!("${{0:TraitName}}{} for {}", arg_list, impl_name.to_string()), + format!("${{0:TraitName}}{} for {}", arg_list, impl_name), ); // Insert trait before TraitImpl @@ -144,17 +144,13 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ } else { builder.replace( impl_name.syntax().text_range(), - format!("NewTrait{} for {}", arg_list, impl_name.to_string()), + format!("NewTrait{} for {}", arg_list, impl_name), ); // Insert trait before TraitImpl builder.insert( impl_ast.syntax().text_range().start(), - format!( - "{}\n\n{}", - trait_ast.to_string(), - IndentLevel::from_node(impl_ast.syntax()) - ), + format!("{}\n\n{}", trait_ast, IndentLevel::from_node(impl_ast.syntax())), ); } diff --git a/crates/ide-assists/src/handlers/qualify_path.rs b/crates/ide-assists/src/handlers/qualify_path.rs index fde46db305..0864871849 100644 --- a/crates/ide-assists/src/handlers/qualify_path.rs +++ b/crates/ide-assists/src/handlers/qualify_path.rs @@ -37,11 +37,9 @@ use crate::{ // ``` pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let (import_assets, syntax_under_caret) = find_importable_node(ctx)?; - let mut proposed_imports = import_assets.search_for_relative_paths( - &ctx.sema, - ctx.config.prefer_no_std, - ctx.config.prefer_prelude, - ); + let mut proposed_imports: Vec<_> = import_assets + .search_for_relative_paths(&ctx.sema, ctx.config.prefer_no_std, ctx.config.prefer_prelude) + .collect(); if proposed_imports.is_empty() { return None; } @@ -82,6 +80,7 @@ pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option }; // we aren't interested in different namespaces + proposed_imports.sort_by(|a, b| a.import_path.cmp(&b.import_path)); proposed_imports.dedup_by(|a, b| a.import_path == b.import_path); let group_label = group_label(candidate); diff --git a/crates/ide-assists/src/handlers/remove_parentheses.rs b/crates/ide-assists/src/handlers/remove_parentheses.rs index 0281b29cd4..99c55e9ff7 100644 --- a/crates/ide-assists/src/handlers/remove_parentheses.rs +++ b/crates/ide-assists/src/handlers/remove_parentheses.rs @@ -43,7 +43,7 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) -> let prev_token = parens.syntax().first_token().and_then(|it| it.prev_token()); let need_to_add_ws = match prev_token { Some(it) => { - let tokens = vec![T![&], T![!], T!['('], T!['['], T!['{']]; + let tokens = [T![&], T![!], T!['('], T!['['], T!['{']]; it.kind() != SyntaxKind::WHITESPACE && !tokens.contains(&it.kind()) } None => false, diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index b54e4204e3..788cc846c2 100644 --- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -74,7 +74,6 @@ pub(crate) fn replace_derive_with_manual_impl( current_crate, NameToImport::exact_case_sensitive(path.segments().last()?.to_string()), items_locator::AssocSearchMode::Exclude, - Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()), ) .filter_map(|item| match item.as_module_def()? { ModuleDef::Trait(trait_) => Some(trait_), diff --git a/crates/ide-completion/src/completions/flyimport.rs b/crates/ide-completion/src/completions/flyimport.rs index d74d3b264a..446f0be834 100644 --- a/crates/ide-completion/src/completions/flyimport.rs +++ b/crates/ide-completion/src/completions/flyimport.rs @@ -263,7 +263,6 @@ fn import_on_the_fly( ctx.config.prefer_no_std, ctx.config.prefer_prelude, ) - .into_iter() .filter(ns_filter) .filter(|import| { let original_item = &import.original_item; @@ -271,8 +270,14 @@ fn import_on_the_fly( && !ctx.is_item_hidden(original_item) && ctx.check_stability(original_item.attrs(ctx.db).as_deref()) }) - .sorted_by_key(|located_import| { - compute_fuzzy_completion_order_key(&located_import.import_path, &user_input_lowercased) + .sorted_by(|a, b| { + let key = |import_path| { + ( + compute_fuzzy_completion_order_key(import_path, &user_input_lowercased), + import_path, + ) + }; + key(&a.import_path).cmp(&key(&b.import_path)) }) .filter_map(|import| { render_resolution_with_import(RenderContext::new(ctx), path_ctx, import) @@ -310,7 +315,6 @@ fn import_on_the_fly_pat_( ctx.config.prefer_no_std, ctx.config.prefer_prelude, ) - .into_iter() .filter(ns_filter) .filter(|import| { let original_item = &import.original_item; @@ -318,8 +322,14 @@ fn import_on_the_fly_pat_( && !ctx.is_item_hidden(original_item) && ctx.check_stability(original_item.attrs(ctx.db).as_deref()) }) - .sorted_by_key(|located_import| { - compute_fuzzy_completion_order_key(&located_import.import_path, &user_input_lowercased) + .sorted_by(|a, b| { + let key = |import_path| { + ( + compute_fuzzy_completion_order_key(import_path, &user_input_lowercased), + import_path, + ) + }; + key(&a.import_path).cmp(&key(&b.import_path)) }) .filter_map(|import| { render_resolution_with_import_pat(RenderContext::new(ctx), pattern_ctx, import) @@ -352,13 +362,18 @@ fn import_on_the_fly_method( ctx.config.prefer_no_std, ctx.config.prefer_prelude, ) - .into_iter() .filter(|import| { !ctx.is_item_hidden(&import.item_to_import) && !ctx.is_item_hidden(&import.original_item) }) - .sorted_by_key(|located_import| { - compute_fuzzy_completion_order_key(&located_import.import_path, &user_input_lowercased) + .sorted_by(|a, b| { + let key = |import_path| { + ( + compute_fuzzy_completion_order_key(import_path, &user_input_lowercased), + import_path, + ) + }; + key(&a.import_path).cmp(&key(&b.import_path)) }) .for_each(|import| match import.original_item { ItemInNs::Values(hir::ModuleDef::Function(f)) => { @@ -407,7 +422,8 @@ fn compute_fuzzy_completion_order_key( ) -> usize { cov_mark::hit!(certain_fuzzy_order_test); let import_name = match proposed_mod_path.segments().last() { - Some(name) => name.to_smol_str().to_lowercase(), + // FIXME: nasty alloc, this is a hot path! + Some(name) => name.to_smol_str().to_ascii_lowercase(), None => return usize::MAX, }; match import_name.match_indices(user_input_lowercased).next() { diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs index ff324e7a56..6a98e109f6 100644 --- a/crates/ide-completion/src/lib.rs +++ b/crates/ide-completion/src/lib.rs @@ -256,7 +256,6 @@ pub fn resolve_completion_edits( current_crate, NameToImport::exact_case_sensitive(imported_name), items_locator::AssocSearchMode::Include, - Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()), ); let import = items_with_name .filter_map(|candidate| { diff --git a/crates/ide-completion/src/tests/flyimport.rs b/crates/ide-completion/src/tests/flyimport.rs index c58374f2e8..1af16ef857 100644 --- a/crates/ide-completion/src/tests/flyimport.rs +++ b/crates/ide-completion/src/tests/flyimport.rs @@ -599,6 +599,7 @@ fn main() { expect![[r#" fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED + me random_method(…) (use dep::test_mod::TestTrait) fn(&self) DEPRECATED "#]], ); } diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs index ded5d4e3db..8f55f30a2d 100644 --- a/crates/ide-db/src/defs.rs +++ b/crates/ide-db/src/defs.rs @@ -6,18 +6,22 @@ // FIXME: this badly needs rename/rewrite (matklad, 2020-02-06). use arrayvec::ArrayVec; +use either::Either; use hir::{ - Adt, AsAssocItem, AssocItem, BuiltinAttr, BuiltinType, Const, Crate, DeriveHelper, DocLinkDef, - ExternCrateDecl, Field, Function, GenericParam, HasVisibility, Impl, Label, Local, Macro, - Module, ModuleDef, Name, PathResolution, Semantics, Static, ToolModule, Trait, TraitAlias, - TypeAlias, Variant, Visibility, + Adt, AsAssocItem, AssocItem, AttributeTemplate, BuiltinAttr, BuiltinType, Const, Crate, + DefWithBody, DeriveHelper, DocLinkDef, ExternCrateDecl, Field, Function, GenericParam, + HasVisibility, HirDisplay, Impl, Label, Local, Macro, Module, ModuleDef, Name, PathResolution, + Semantics, Static, ToolModule, Trait, TraitAlias, TupleField, TypeAlias, Variant, VariantDef, + Visibility, }; -use stdx::impl_from; +use stdx::{format_to, impl_from}; use syntax::{ ast::{self, AstNode}, match_ast, SyntaxKind, SyntaxNode, SyntaxToken, }; +use crate::documentation::{Documentation, HasDocs}; +use crate::famous_defs::FamousDefs; use crate::RootDatabase; // FIXME: a more precise name would probably be `Symbol`? @@ -25,6 +29,7 @@ use crate::RootDatabase; pub enum Definition { Macro(Macro), Field(Field), + TupleField(TupleField), Module(Module), Function(Function), Adt(Adt), @@ -76,13 +81,21 @@ impl Definition { Definition::Label(it) => it.module(db), Definition::ExternCrateDecl(it) => it.module(db), Definition::DeriveHelper(it) => it.derive().module(db), - Definition::BuiltinAttr(_) | Definition::BuiltinType(_) | Definition::ToolModule(_) => { - return None - } + Definition::BuiltinAttr(_) + | Definition::BuiltinType(_) + | Definition::TupleField(_) + | Definition::ToolModule(_) => return None, }; Some(module) } + pub fn enclosing_definition(&self, db: &RootDatabase) -> Option { + match self { + Definition::Local(it) => it.parent(db).try_into().ok(), + _ => None, + } + } + pub fn visibility(&self, db: &RootDatabase) -> Option { let vis = match self { Definition::Field(sf) => sf.visibility(db), @@ -96,7 +109,7 @@ impl Definition { Definition::TypeAlias(it) => it.visibility(db), Definition::Variant(it) => it.visibility(db), Definition::ExternCrateDecl(it) => it.visibility(db), - Definition::BuiltinType(_) => Visibility::Public, + Definition::BuiltinType(_) | Definition::TupleField(_) => Visibility::Public, Definition::Macro(_) => return None, Definition::BuiltinAttr(_) | Definition::ToolModule(_) @@ -123,6 +136,7 @@ impl Definition { Definition::TraitAlias(it) => it.name(db), Definition::TypeAlias(it) => it.name(db), Definition::BuiltinType(it) => it.name(), + Definition::TupleField(it) => it.name(), Definition::SelfType(_) => return None, Definition::Local(it) => it.name(db), Definition::GenericParam(it) => it.name(db), @@ -134,6 +148,127 @@ impl Definition { }; Some(name) } + + pub fn docs( + &self, + db: &RootDatabase, + famous_defs: Option<&FamousDefs<'_, '_>>, + ) -> Option { + let docs = match self { + Definition::Macro(it) => it.docs(db), + Definition::Field(it) => it.docs(db), + Definition::Module(it) => it.docs(db), + Definition::Function(it) => it.docs(db), + Definition::Adt(it) => it.docs(db), + Definition::Variant(it) => it.docs(db), + Definition::Const(it) => it.docs(db), + Definition::Static(it) => it.docs(db), + Definition::Trait(it) => it.docs(db), + Definition::TraitAlias(it) => it.docs(db), + Definition::TypeAlias(it) => it.docs(db), + Definition::BuiltinType(it) => { + famous_defs.and_then(|fd| { + // std exposes prim_{} modules with docstrings on the root to document the builtins + let primitive_mod = format!("prim_{}", it.name().display(fd.0.db)); + let doc_owner = find_std_module(fd, &primitive_mod)?; + doc_owner.docs(fd.0.db) + }) + } + Definition::Local(_) => None, + Definition::SelfType(impl_def) => { + impl_def.self_ty(db).as_adt().map(|adt| adt.docs(db))? + } + Definition::GenericParam(_) => None, + Definition::Label(_) => None, + Definition::ExternCrateDecl(it) => it.docs(db), + + Definition::BuiltinAttr(it) => { + let name = it.name(db); + let AttributeTemplate { word, list, name_value_str } = it.template(db)?; + let mut docs = "Valid forms are:".to_owned(); + if word { + format_to!(docs, "\n - #\\[{}]", name); + } + if let Some(list) = list { + format_to!(docs, "\n - #\\[{}({})]", name, list); + } + if let Some(name_value_str) = name_value_str { + format_to!(docs, "\n - #\\[{} = {}]", name, name_value_str); + } + Some(Documentation::new(docs.replace('*', "\\*"))) + } + Definition::ToolModule(_) => None, + Definition::DeriveHelper(_) => None, + Definition::TupleField(_) => None, + }; + + docs.or_else(|| { + // docs are missing, for assoc items of trait impls try to fall back to the docs of the + // original item of the trait + let assoc = self.as_assoc_item(db)?; + let trait_ = assoc.containing_trait_impl(db)?; + let name = Some(assoc.name(db)?); + let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?; + item.docs(db) + }) + } + + pub fn label(&self, db: &RootDatabase) -> Option { + let label = match *self { + Definition::Macro(it) => it.display(db).to_string(), + Definition::Field(it) => it.display(db).to_string(), + Definition::TupleField(it) => it.display(db).to_string(), + Definition::Module(it) => it.display(db).to_string(), + Definition::Function(it) => it.display(db).to_string(), + Definition::Adt(it) => it.display(db).to_string(), + Definition::Variant(it) => it.display(db).to_string(), + Definition::Const(it) => it.display(db).to_string(), + Definition::Static(it) => it.display(db).to_string(), + Definition::Trait(it) => it.display(db).to_string(), + Definition::TraitAlias(it) => it.display(db).to_string(), + Definition::TypeAlias(it) => it.display(db).to_string(), + Definition::BuiltinType(it) => it.name().display(db).to_string(), + Definition::Local(it) => { + let ty = it.ty(db); + let ty = ty.display_truncated(db, None); + let is_mut = if it.is_mut(db) { "mut " } else { "" }; + let desc = match it.primary_source(db).into_ident_pat() { + Some(ident) => { + let name = it.name(db); + let let_kw = if ident.syntax().parent().map_or(false, |p| { + p.kind() == SyntaxKind::LET_STMT || p.kind() == SyntaxKind::LET_EXPR + }) { + "let " + } else { + "" + }; + format!("{let_kw}{is_mut}{}: {ty}", name.display(db)) + } + None => format!("{is_mut}self: {ty}"), + }; + desc + } + Definition::SelfType(impl_def) => { + impl_def.self_ty(db).as_adt().and_then(|adt| Definition::Adt(adt).label(db))? + } + Definition::GenericParam(it) => it.display(db).to_string(), + Definition::Label(it) => it.name(db).display(db).to_string(), + Definition::ExternCrateDecl(it) => it.display(db).to_string(), + Definition::BuiltinAttr(it) => format!("#[{}]", it.name(db)), + Definition::ToolModule(it) => it.name(db).to_string(), + Definition::DeriveHelper(it) => format!("derive_helper {}", it.name(db).display(db)), + }; + Some(label) + } +} + +fn find_std_module(famous_defs: &FamousDefs<'_, '_>, name: &str) -> Option { + let db = famous_defs.0.db; + let std_crate = famous_defs.std()?; + let std_root_module = std_crate.root_module(); + std_root_module.children(db).find(|module| { + module.name(db).map_or(false, |module| module.display(db).to_string() == name) + }) } // FIXME: IdentClass as a name no longer fits @@ -502,9 +637,11 @@ impl NameRefClass { ast::FieldExpr(field_expr) => { sema.resolve_field_fallback(&field_expr) .map(|it| { - it.map_left(Definition::Field) - .map_right(Definition::Function) - .either(NameRefClass::Definition, NameRefClass::Definition) + NameRefClass::Definition(match it { + Either::Left(Either::Left(field)) => Definition::Field(field), + Either::Left(Either::Right(field)) => Definition::TupleField(field), + Either::Right(fun) => Definition::Function(fun), + }) }) }, ast::RecordPatField(record_pat_field) => { @@ -662,3 +799,22 @@ impl From for Definition { } } } + +impl From for Definition { + fn from(def: VariantDef) -> Self { + ModuleDef::from(def).into() + } +} + +impl TryFrom for Definition { + type Error = (); + fn try_from(def: DefWithBody) -> Result { + match def { + DefWithBody::Function(it) => Ok(it.into()), + DefWithBody::Static(it) => Ok(it.into()), + DefWithBody::Const(it) => Ok(it.into()), + DefWithBody::Variant(it) => Ok(it.into()), + DefWithBody::InTypeConst(_) => Err(()), + } + } +} diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs index a4f0a6df78..652968d808 100644 --- a/crates/ide-db/src/imports/import_assets.rs +++ b/crates/ide-db/src/imports/import_assets.rs @@ -207,7 +207,7 @@ impl ImportAssets { prefix_kind: PrefixKind, prefer_no_std: bool, prefer_prelude: bool, - ) -> Vec { + ) -> impl Iterator { let _p = profile::span("import_assets::search_for_imports"); self.search_for(sema, Some(prefix_kind), prefer_no_std, prefer_prelude) } @@ -218,7 +218,7 @@ impl ImportAssets { sema: &Semantics<'_, RootDatabase>, prefer_no_std: bool, prefer_prelude: bool, - ) -> Vec { + ) -> impl Iterator { let _p = profile::span("import_assets::search_for_relative_paths"); self.search_for(sema, None, prefer_no_std, prefer_prelude) } @@ -259,9 +259,15 @@ impl ImportAssets { prefixed: Option, prefer_no_std: bool, prefer_prelude: bool, - ) -> Vec { + ) -> impl Iterator { let _p = profile::span("import_assets::search_for"); + let scope = match sema.scope(&self.candidate_node) { + Some(it) => it, + None => return >::default().into_iter(), + }; + + let krate = self.module_with_candidate.krate(); let scope_definitions = self.scope_definitions(sema); let mod_path = |item| { get_mod_path( @@ -272,30 +278,30 @@ impl ImportAssets { prefer_no_std, prefer_prelude, ) - }; - - let krate = self.module_with_candidate.krate(); - let scope = match sema.scope(&self.candidate_node) { - Some(it) => it, - None => return Vec::new(), + .filter(|path| path.len() > 1) }; match &self.import_candidate { ImportCandidate::Path(path_candidate) => { - path_applicable_imports(sema, krate, path_candidate, mod_path) - } - ImportCandidate::TraitAssocItem(trait_candidate) => { - trait_applicable_items(sema, krate, &scope, trait_candidate, true, mod_path) - } - ImportCandidate::TraitMethod(trait_candidate) => { - trait_applicable_items(sema, krate, &scope, trait_candidate, false, mod_path) + path_applicable_imports(sema, krate, path_candidate, mod_path, |item_to_import| { + !scope_definitions.contains(&ScopeDef::from(item_to_import)) + }) } + ImportCandidate::TraitAssocItem(trait_candidate) + | ImportCandidate::TraitMethod(trait_candidate) => trait_applicable_items( + sema, + krate, + &scope, + trait_candidate, + matches!(self.import_candidate, ImportCandidate::TraitAssocItem(_)), + mod_path, + |trait_to_import| { + !scope_definitions + .contains(&ScopeDef::ModuleDef(ModuleDef::Trait(trait_to_import))) + }, + ), } .into_iter() - .filter(|import| import.import_path.len() > 1) - .filter(|import| !scope_definitions.contains(&ScopeDef::from(import.item_to_import))) - .sorted_by(|a, b| a.import_path.cmp(&b.import_path)) - .collect() } fn scope_definitions(&self, sema: &Semantics<'_, RootDatabase>) -> FxHashSet { @@ -315,6 +321,7 @@ fn path_applicable_imports( current_crate: Crate, path_candidate: &PathImportCandidate, mod_path: impl Fn(ItemInNs) -> Option + Copy, + scope_filter: impl Fn(ItemInNs) -> bool + Copy, ) -> FxHashSet { let _p = profile::span("import_assets::path_applicable_imports"); @@ -333,12 +340,15 @@ fn path_applicable_imports( // // see also an ignored test under FIXME comment in the qualify_path.rs module AssocSearchMode::Exclude, - Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()), ) .filter_map(|item| { + if !scope_filter(item) { + return None; + } let mod_path = mod_path(item)?; Some(LocatedImport::new(mod_path, item, item)) }) + .take(DEFAULT_QUERY_SEARCH_LIMIT.inner()) .collect() } Some(qualifier) => items_locator::items_with_name( @@ -346,9 +356,9 @@ fn path_applicable_imports( current_crate, path_candidate.name.clone(), AssocSearchMode::Include, - Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()), ) - .filter_map(|item| import_for_item(sema.db, mod_path, &qualifier, item)) + .filter_map(|item| import_for_item(sema.db, mod_path, &qualifier, item, scope_filter)) + .take(DEFAULT_QUERY_SEARCH_LIMIT.inner()) .collect(), } } @@ -358,6 +368,7 @@ fn import_for_item( mod_path: impl Fn(ItemInNs) -> Option, unresolved_qualifier: &[SmolStr], original_item: ItemInNs, + scope_filter: impl Fn(ItemInNs) -> bool, ) -> Option { let _p = profile::span("import_assets::import_for_item"); let [first_segment, ..] = unresolved_qualifier else { return None }; @@ -413,15 +424,16 @@ fn import_for_item( // especially in case of lazy completion edit resolutions. return None; } - (false, Some(trait_to_import)) => { + (false, Some(trait_to_import)) if scope_filter(trait_to_import) => { LocatedImport::new(mod_path(trait_to_import)?, trait_to_import, original_item) } - (true, None) => { + (true, None) if scope_filter(original_item_candidate) => { LocatedImport::new(import_path_candidate, original_item_candidate, original_item) } - (false, None) => { + (false, None) if scope_filter(segment_import) => { LocatedImport::new(mod_path(segment_import)?, segment_import, original_item) } + _ => return None, }) } @@ -490,6 +502,7 @@ fn trait_applicable_items( trait_candidate: &TraitImportCandidate, trait_assoc_item: bool, mod_path: impl Fn(ItemInNs) -> Option, + scope_filter: impl Fn(hir::Trait) -> bool, ) -> FxHashSet { let _p = profile::span("import_assets::trait_applicable_items"); @@ -500,22 +513,24 @@ fn trait_applicable_items( let related_traits = inherent_traits.chain(env_traits).collect::>(); let mut required_assoc_items = FxHashSet::default(); - let trait_candidates = items_locator::items_with_name( + let trait_candidates: FxHashSet<_> = items_locator::items_with_name( sema, current_crate, trait_candidate.assoc_item_name.clone(), AssocSearchMode::AssocItemsOnly, - Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()), ) .filter_map(|input| item_as_assoc(db, input)) .filter_map(|assoc| { + if !trait_assoc_item && matches!(assoc, AssocItem::Const(_) | AssocItem::TypeAlias(_)) { + return None; + } + let assoc_item_trait = assoc.containing_trait(db)?; if related_traits.contains(&assoc_item_trait) { - None - } else { - required_assoc_items.insert(assoc); - Some(assoc_item_trait.into()) + return None; } + required_assoc_items.insert(assoc); + Some(assoc_item_trait.into()) }) .collect(); @@ -531,12 +546,8 @@ fn trait_applicable_items( None, |assoc| { if required_assoc_items.contains(&assoc) { - if let AssocItem::Function(f) = assoc { - if f.self_param(db).is_some() { - return None; - } - } - let located_trait = assoc.containing_trait(db)?; + let located_trait = + assoc.containing_trait(db).filter(|&it| scope_filter(it))?; let trait_item = ItemInNs::from(ModuleDef::from(located_trait)); let import_path = trait_import_paths .entry(trait_item) @@ -561,7 +572,8 @@ fn trait_applicable_items( |function| { let assoc = function.as_assoc_item(db)?; if required_assoc_items.contains(&assoc) { - let located_trait = assoc.containing_trait(db)?; + let located_trait = + assoc.containing_trait(db).filter(|&it| scope_filter(it))?; let trait_item = ItemInNs::from(ModuleDef::from(located_trait)); let import_path = trait_import_paths .entry(trait_item) diff --git a/crates/ide-db/src/items_locator.rs b/crates/ide-db/src/items_locator.rs index 4a5d234f73..432f1d745d 100644 --- a/crates/ide-db/src/items_locator.rs +++ b/crates/ide-db/src/items_locator.rs @@ -19,26 +19,24 @@ pub fn items_with_name<'a>( krate: Crate, name: NameToImport, assoc_item_search: AssocSearchMode, - limit: Option, ) -> impl Iterator + 'a { let _p = profile::span("items_with_name").detail(|| { format!( - "Name: {}, crate: {:?}, assoc items: {:?}, limit: {:?}", + "Name: {}, crate: {:?}, assoc items: {:?}", name.text(), assoc_item_search, krate.display_name(sema.db).map(|name| name.to_string()), - limit, ) }); let prefix = matches!(name, NameToImport::Prefix(..)); - let (mut local_query, mut external_query) = match name { + let (local_query, external_query) = match name { NameToImport::Prefix(exact_name, case_sensitive) | NameToImport::Exact(exact_name, case_sensitive) => { let mut local_query = symbol_index::Query::new(exact_name.clone()); + local_query.assoc_search_mode(assoc_item_search); let mut external_query = - // import_map::Query::new(exact_name).assoc_search_mode(assoc_item_search); - import_map::Query::new(exact_name); + import_map::Query::new(exact_name).assoc_search_mode(assoc_item_search); if prefix { local_query.prefix(); external_query = external_query.prefix(); @@ -55,8 +53,9 @@ pub fn items_with_name<'a>( NameToImport::Fuzzy(fuzzy_search_string, case_sensitive) => { let mut local_query = symbol_index::Query::new(fuzzy_search_string.clone()); local_query.fuzzy(); + local_query.assoc_search_mode(assoc_item_search); - let mut external_query = import_map::Query::new(fuzzy_search_string.clone()) + let mut external_query = import_map::Query::new(fuzzy_search_string) .fuzzy() .assoc_search_mode(assoc_item_search); @@ -69,18 +68,12 @@ pub fn items_with_name<'a>( } }; - if let Some(limit) = limit { - external_query = external_query.limit(limit); - local_query.limit(limit); - } - - find_items(sema, krate, assoc_item_search, local_query, external_query) + find_items(sema, krate, local_query, external_query) } fn find_items<'a>( sema: &'a Semantics<'_, RootDatabase>, krate: Crate, - assoc_item_search: AssocSearchMode, local_query: symbol_index::Query, external_query: import_map::Query, ) -> impl Iterator + 'a { @@ -98,18 +91,12 @@ fn find_items<'a>( }); // Query the local crate using the symbol index. - let local_results = local_query - .search(&symbol_index::crate_symbols(db, krate)) - .into_iter() - .filter(move |candidate| match assoc_item_search { - AssocSearchMode::Include => true, - AssocSearchMode::Exclude => !candidate.is_assoc, - AssocSearchMode::AssocItemsOnly => candidate.is_assoc, - }) - .map(|local_candidate| match local_candidate.def { + let mut local_results = Vec::new(); + local_query.search(&symbol_index::crate_symbols(db, krate), |local_candidate| { + local_results.push(match local_candidate.def { hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def), def => ItemInNs::from(def), - }); - - external_importables.chain(local_results) + }) + }); + local_results.into_iter().chain(external_importables) } diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs index 8c1a6e6e40..edfeddc1bc 100644 --- a/crates/ide-db/src/path_transform.rs +++ b/crates/ide-db/src/path_transform.rs @@ -159,7 +159,7 @@ impl<'a> PathTransform<'a> { .for_each(|(k, v)| match (k.split(db), v) { (Either::Right(k), Some(TypeOrConst::Either(v))) => { if let Some(ty) = v.ty() { - type_substs.insert(k, ty.clone()); + type_substs.insert(k, ty); } } (Either::Right(k), None) => { diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index 7f28965885..f694f7160d 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -198,6 +198,7 @@ impl Definition { Definition::SelfType(_) => return None, Definition::BuiltinAttr(_) => return None, Definition::ToolModule(_) => return None, + Definition::TupleField(_) => return None, // FIXME: This should be doable in theory Definition::DeriveHelper(_) => return None, }; diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index dbef360268..a40dd2692c 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -539,7 +539,7 @@ impl<'a> FindUsages<'a> { tree.token_at_offset(offset).into_iter().for_each(|token| { let Some(str_token) = ast::String::cast(token.clone()) else { return }; if let Some((range, nameres)) = - sema.check_for_format_args_template(token.clone(), offset) + sema.check_for_format_args_template(token, offset) { if self.found_format_args_ref(file_id, range, str_token, nameres, sink) { return; diff --git a/crates/ide-db/src/source_change.rs b/crates/ide-db/src/source_change.rs index c7188f1f79..73be6a4071 100644 --- a/crates/ide-db/src/source_change.rs +++ b/crates/ide-db/src/source_change.rs @@ -341,13 +341,13 @@ impl SourceChangeBuilder { /// Adds a tabstop snippet to place the cursor before `token` pub fn add_tabstop_before_token(&mut self, _cap: SnippetCap, token: SyntaxToken) { assert!(token.parent().is_some()); - self.add_snippet(PlaceSnippet::Before(token.clone().into())); + self.add_snippet(PlaceSnippet::Before(token.into())); } /// Adds a tabstop snippet to place the cursor after `token` pub fn add_tabstop_after_token(&mut self, _cap: SnippetCap, token: SyntaxToken) { assert!(token.parent().is_some()); - self.add_snippet(PlaceSnippet::After(token.clone().into())); + self.add_snippet(PlaceSnippet::After(token.into())); } /// Adds a snippet to move the cursor selected over `node` diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs index f5f0f0576f..c2e95ca860 100644 --- a/crates/ide-db/src/symbol_index.rs +++ b/crates/ide-db/src/symbol_index.rs @@ -31,9 +31,10 @@ use base_db::{ salsa::{self, ParallelDatabase}, SourceDatabaseExt, SourceRootId, Upcast, }; -use fst::{self, Streamer}; +use fst::{self, raw::IndexedValue, Automaton, Streamer}; use hir::{ db::HirDatabase, + import_map::{AssocSearchMode, SearchMode}, symbols::{FileSymbol, SymbolCollector}, Crate, Module, }; @@ -43,22 +44,15 @@ use triomphe::Arc; use crate::RootDatabase; -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -enum SearchMode { - Fuzzy, - Exact, - Prefix, -} - #[derive(Debug, Clone)] pub struct Query { query: String, lowercased: String, + mode: SearchMode, + assoc_mode: AssocSearchMode, + case_sensitive: bool, only_types: bool, libs: bool, - mode: SearchMode, - case_sensitive: bool, - limit: usize, } impl Query { @@ -70,8 +64,8 @@ impl Query { only_types: false, libs: false, mode: SearchMode::Fuzzy, + assoc_mode: AssocSearchMode::Include, case_sensitive: false, - limit: usize::max_value(), } } @@ -95,12 +89,13 @@ impl Query { self.mode = SearchMode::Prefix; } - pub fn case_sensitive(&mut self) { - self.case_sensitive = true; + /// Specifies whether we want to include associated items in the result. + pub fn assoc_search_mode(&mut self, assoc_mode: AssocSearchMode) { + self.assoc_mode = assoc_mode; } - pub fn limit(&mut self, limit: usize) { - self.limit = limit + pub fn case_sensitive(&mut self) { + self.case_sensitive = true; } } @@ -225,7 +220,9 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec { indices.iter().flat_map(|indices| indices.iter().cloned()).collect() }; - query.search(&indices) + let mut res = vec![]; + query.search(&indices, |f| res.push(f.clone())); + res } #[derive(Default)] @@ -285,6 +282,7 @@ impl SymbolIndex { builder.insert(key, value).unwrap(); } + // FIXME: fst::Map should ideally have a way to shrink the backing buffer without the unwrap dance let map = fst::Map::new({ let mut buf = builder.into_inner().unwrap(); buf.shrink_to_fit(); @@ -317,22 +315,54 @@ impl SymbolIndex { } impl Query { - pub(crate) fn search(self, indices: &[Arc]) -> Vec { + pub(crate) fn search<'sym>( + self, + indices: &'sym [Arc], + cb: impl FnMut(&'sym FileSymbol), + ) { let _p = profile::span("symbol_index::Query::search"); let mut op = fst::map::OpBuilder::new(); - for file_symbols in indices.iter() { - let automaton = fst::automaton::Subsequence::new(&self.lowercased); - op = op.add(file_symbols.map.search(automaton)) + match self.mode { + SearchMode::Exact => { + let automaton = fst::automaton::Str::new(&self.lowercased); + + for index in indices.iter() { + op = op.add(index.map.search(&automaton)); + } + self.search_maps(&indices, op.union(), cb) + } + SearchMode::Fuzzy => { + let automaton = fst::automaton::Subsequence::new(&self.lowercased); + + for index in indices.iter() { + op = op.add(index.map.search(&automaton)); + } + self.search_maps(&indices, op.union(), cb) + } + SearchMode::Prefix => { + let automaton = fst::automaton::Str::new(&self.lowercased).starts_with(); + + for index in indices.iter() { + op = op.add(index.map.search(&automaton)); + } + self.search_maps(&indices, op.union(), cb) + } } - let mut stream = op.union(); - let mut res = Vec::new(); + } + + fn search_maps<'sym>( + &self, + indices: &'sym [Arc], + mut stream: fst::map::Union<'_>, + mut cb: impl FnMut(&'sym FileSymbol), + ) { while let Some((_, indexed_values)) = stream.next() { - for indexed_value in indexed_values { - let symbol_index = &indices[indexed_value.index]; - let (start, end) = SymbolIndex::map_value_to_range(indexed_value.value); + for &IndexedValue { index, value } in indexed_values { + let symbol_index = &indices[index]; + let (start, end) = SymbolIndex::map_value_to_range(value); for symbol in &symbol_index.symbols[start..end] { - if self.only_types + let non_type_for_type_only_query = self.only_types && !matches!( symbol.def, hir::ModuleDef::Adt(..) @@ -340,38 +370,23 @@ impl Query { | hir::ModuleDef::BuiltinType(..) | hir::ModuleDef::TraitAlias(..) | hir::ModuleDef::Trait(..) - ) - { + ); + if non_type_for_type_only_query || !self.matches_assoc_mode(symbol.is_assoc) { continue; } - let skip = match self.mode { - SearchMode::Fuzzy => { - self.case_sensitive - && self.query.chars().any(|c| !symbol.name.contains(c)) - } - SearchMode::Exact => symbol.name != self.query, - SearchMode::Prefix if self.case_sensitive => { - !symbol.name.starts_with(&self.query) - } - SearchMode::Prefix => symbol - .name - .chars() - .zip(self.lowercased.chars()) - .all(|(n, q)| n.to_lowercase().next() == Some(q)), - }; - - if skip { - continue; - } - - res.push(symbol.clone()); - if res.len() >= self.limit { - return res; + if self.mode.check(&self.query, self.case_sensitive, &symbol.name) { + cb(symbol); } } } } - res + } + + fn matches_assoc_mode(&self, is_trait_assoc_item: bool) -> bool { + match (is_trait_assoc_item, self.assoc_mode) { + (true, AssocSearchMode::Exclude) | (false, AssocSearchMode::AssocItemsOnly) => false, + _ => true, + } } } diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs index c202264bb5..6ecfd55ea0 100644 --- a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs +++ b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs @@ -1,5 +1,10 @@ -use hir::{Const, Function, HasSource, TypeAlias}; -use ide_db::base_db::FileRange; +use hir::{db::ExpandDatabase, Const, Function, HasSource, HirDisplay, TypeAlias}; +use ide_db::{ + assists::{Assist, AssistId, AssistKind}, + label::Label, + source_change::SourceChangeBuilder, +}; +use text_edit::TextRange; use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; @@ -10,47 +15,195 @@ pub(crate) fn trait_impl_redundant_assoc_item( ctx: &DiagnosticsContext<'_>, d: &hir::TraitImplRedundantAssocItems, ) -> Diagnostic { - let name = d.assoc_item.0.clone(); - let assoc_item = d.assoc_item.1; let db = ctx.sema.db; + let name = d.assoc_item.0.clone(); + let redundant_assoc_item_name = name.display(db); + let assoc_item = d.assoc_item.1; let default_range = d.impl_.syntax_node_ptr().text_range(); let trait_name = d.trait_.name(db).to_smol_str(); - let (redundant_item_name, diagnostic_range) = match assoc_item { - hir::AssocItem::Function(id) => ( - format!("`fn {}`", name.display(db)), - Function::from(id) - .source(db) - .map(|it| it.syntax().value.text_range()) - .unwrap_or(default_range), - ), - hir::AssocItem::Const(id) => ( - format!("`const {}`", name.display(db)), - Const::from(id) - .source(db) - .map(|it| it.syntax().value.text_range()) - .unwrap_or(default_range), - ), - hir::AssocItem::TypeAlias(id) => ( - format!("`type {}`", name.display(db)), - TypeAlias::from(id) - .source(db) - .map(|it| it.syntax().value.text_range()) - .unwrap_or(default_range), - ), + let (redundant_item_name, diagnostic_range, redundant_item_def) = match assoc_item { + hir::AssocItem::Function(id) => { + let function = Function::from(id); + ( + format!("`fn {}`", redundant_assoc_item_name), + function + .source(db) + .map(|it| it.syntax().value.text_range()) + .unwrap_or(default_range), + format!("\n {};", function.display(db)), + ) + } + hir::AssocItem::Const(id) => { + let constant = Const::from(id); + ( + format!("`const {}`", redundant_assoc_item_name), + constant + .source(db) + .map(|it| it.syntax().value.text_range()) + .unwrap_or(default_range), + format!("\n {};", constant.display(db)), + ) + } + hir::AssocItem::TypeAlias(id) => { + let type_alias = TypeAlias::from(id); + ( + format!("`type {}`", redundant_assoc_item_name), + type_alias + .source(db) + .map(|it| it.syntax().value.text_range()) + .unwrap_or(default_range), + format!("\n type {};", type_alias.name(ctx.sema.db).to_smol_str()), + ) + } }; Diagnostic::new( DiagnosticCode::RustcHardError("E0407"), format!("{redundant_item_name} is not a member of trait `{trait_name}`"), - FileRange { file_id: d.file_id.file_id().unwrap(), range: diagnostic_range }, + hir::InFile::new(d.file_id, diagnostic_range).original_node_file_range_rooted(db), ) + .with_fixes(quickfix_for_redundant_assoc_item( + ctx, + d, + redundant_item_def, + diagnostic_range, + )) +} + +/// add assoc item into the trait def body +fn quickfix_for_redundant_assoc_item( + ctx: &DiagnosticsContext<'_>, + d: &hir::TraitImplRedundantAssocItems, + redundant_item_def: String, + range: TextRange, +) -> Option> { + let add_assoc_item_def = |builder: &mut SourceChangeBuilder| -> Option<()> { + let db = ctx.sema.db; + let root = db.parse_or_expand(d.file_id); + // don't modify trait def in outer crate + let current_crate = ctx.sema.scope(&d.impl_.syntax_node_ptr().to_node(&root))?.krate(); + let trait_def_crate = d.trait_.module(db).krate(); + if trait_def_crate != current_crate { + return None; + } + + let trait_def = d.trait_.source(db)?.value; + let l_curly = trait_def.assoc_item_list()?.l_curly_token()?.text_range(); + let where_to_insert = + hir::InFile::new(d.file_id, l_curly).original_node_file_range_rooted(db).range; + + Some(builder.insert(where_to_insert.end(), redundant_item_def)) + }; + let file_id = d.file_id.file_id()?; + let mut source_change_builder = SourceChangeBuilder::new(file_id); + add_assoc_item_def(&mut source_change_builder)?; + + Some(vec![Assist { + id: AssistId("add assoc item def into trait def", AssistKind::QuickFix), + label: Label::new("Add assoc item def into trait def".to_string()), + group: None, + target: range, + source_change: Some(source_change_builder.finish()), + trigger_signature_help: false, + }]) } #[cfg(test)] mod tests { - use crate::tests::check_diagnostics; + use crate::tests::{check_diagnostics, check_fix, check_no_fix}; + + #[test] + fn quickfix_for_assoc_func() { + check_fix( + r#" +trait Marker { + fn boo(); +} +struct Foo; +impl Marker for Foo { + fn$0 bar(_a: i32, _b: String) -> String {} + fn boo() {} +} + "#, + r#" +trait Marker { + fn bar(_a: i32, _b: String) -> String; + fn boo(); +} +struct Foo; +impl Marker for Foo { + fn bar(_a: i32, _b: String) -> String {} + fn boo() {} +} + "#, + ) + } + + #[test] + fn quickfix_for_assoc_const() { + check_fix( + r#" +trait Marker { + fn foo () {} +} +struct Foo; +impl Marker for Foo { + const FLAG: bool$0 = false; +} + "#, + r#" +trait Marker { + const FLAG: bool; + fn foo () {} +} +struct Foo; +impl Marker for Foo { + const FLAG: bool = false; +} + "#, + ) + } + + #[test] + fn quickfix_for_assoc_type() { + check_fix( + r#" +trait Marker { +} +struct Foo; +impl Marker for Foo { + type T = i32;$0 +} + "#, + r#" +trait Marker { + type T; +} +struct Foo; +impl Marker for Foo { + type T = i32; +} + "#, + ) + } + + #[test] + fn quickfix_dont_work() { + check_no_fix( + r#" + //- /dep.rs crate:dep + trait Marker { + } + //- /main.rs crate:main deps:dep + struct Foo; + impl dep::Marker for Foo { + type T = i32;$0 + } + "#, + ) + } #[test] fn trait_with_default_value() { @@ -64,12 +217,12 @@ trait Marker { struct Foo; impl Marker for Foo { type T = i32; - //^^^^^^^^^^^^^ error: `type T` is not a member of trait `Marker` + //^^^^^^^^^^^^^ 💡 error: `type T` is not a member of trait `Marker` const FLAG: bool = true; fn bar() {} - //^^^^^^^^^^^ error: `fn bar` is not a member of trait `Marker` + //^^^^^^^^^^^ 💡 error: `fn bar` is not a member of trait `Marker` fn boo() {} } diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs index 60a45a05a4..41fb672908 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs @@ -160,7 +160,7 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) - // if receiver should be pass as first arg in the assoc func, // we could omit generic parameters cause compiler can deduce it automatically if !need_to_take_receiver_as_first_arg && !generic_parameters.is_empty() { - let generic_parameters = generic_parameters.join(", ").to_string(); + let generic_parameters = generic_parameters.join(", "); receiver_type_adt_name = format!("{}::<{}>", receiver_type_adt_name, generic_parameters); } diff --git a/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/crates/ide-diagnostics/src/handlers/unresolved_module.rs index e90d385bab..4d7d425bab 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_module.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_module.rs @@ -58,9 +58,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedModule) -> Option Option { let mod_path = definition_mod_path(db, &def); - let (label, docs) = match def { - Definition::Macro(it) => label_and_docs(db, it), - Definition::Field(it) => label_and_layout_info_and_docs( - db, - it, - config, - |&it| it.layout(db), - |_| { - let var_def = it.parent_def(db); - match var_def { - hir::VariantDef::Struct(s) => { - Adt::from(s).layout(db).ok().and_then(|layout| layout.field_offset(it)) + let label = def.label(db)?; + let docs = def.docs(db, famous_defs); + + let value = match def { + Definition::Variant(it) => { + if !it.parent_enum(db).is_data_carrying(db) { + match it.eval(db) { + Ok(it) => { + Some(if it >= 10 { format!("{it} ({it:#X})") } else { format!("{it}") }) } - _ => None, + Err(_) => it.value(db).map(|it| format!("{it:?}")), } - }, - ), - Definition::Module(it) => label_and_docs(db, it), - Definition::Function(it) => label_and_docs(db, it), - Definition::Adt(it) => { - label_and_layout_info_and_docs(db, it, config, |&it| it.layout(db), |_| None) + } else { + None + } } - Definition::Variant(it) => label_value_and_layout_info_and_docs( - db, - it, - config, - |&it| { - if !it.parent_enum(db).is_data_carrying(db) { - match it.eval(db) { - Ok(it) => { - Some(if it >= 10 { format!("{it} ({it:#X})") } else { format!("{it}") }) - } - Err(_) => it.value(db).map(|it| format!("{it:?}")), - } - } else { - None - } - }, - |it| it.layout(db), - |layout| layout.enum_tag_size(), - ), - Definition::Const(it) => label_value_and_docs(db, it, |it| { + Definition::Const(it) => { let body = it.render_eval(db); match body { Ok(it) => Some(it), @@ -447,53 +417,59 @@ pub(super) fn definition( Some(body.to_string()) } } - }), - Definition::Static(it) => label_value_and_docs(db, it, |it| { + } + Definition::Static(it) => { let source = it.source(db)?; let mut body = source.value.body()?.syntax().clone(); if source.file_id.is_macro() { body = insert_whitespace_into_node::insert_ws_into(body); } Some(body.to_string()) - }), - Definition::Trait(it) => label_and_docs(db, it), - Definition::TraitAlias(it) => label_and_docs(db, it), - Definition::TypeAlias(it) => { - label_and_layout_info_and_docs(db, it, config, |&it| it.ty(db).layout(db), |_| None) - } - Definition::BuiltinType(it) => { - return famous_defs - .and_then(|fd| builtin(fd, it)) - .or_else(|| Some(Markup::fenced_block(&it.name().display(db)))) - } - Definition::Local(it) => return local(db, it, config), - Definition::SelfType(impl_def) => { - impl_def.self_ty(db).as_adt().map(|adt| label_and_docs(db, adt))? - } - Definition::GenericParam(it) => (it.display(db).to_string(), None), - Definition::Label(it) => return Some(Markup::fenced_block(&it.name(db).display(db))), - Definition::ExternCrateDecl(it) => label_and_docs(db, it), - // FIXME: We should be able to show more info about these - Definition::BuiltinAttr(it) => return render_builtin_attr(db, it), - Definition::ToolModule(it) => return Some(Markup::fenced_block(&it.name(db))), - Definition::DeriveHelper(it) => { - (format!("derive_helper {}", it.name(db).display(db)), None) } + _ => None, }; - let docs = docs - .filter(|_| config.documentation) - .or_else(|| { - // docs are missing, for assoc items of trait impls try to fall back to the docs of the - // original item of the trait - let assoc = def.as_assoc_item(db)?; - let trait_ = assoc.containing_trait_impl(db)?; - let name = Some(assoc.name(db)?); - let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?; - item.docs(db) - }) - .map(Into::into); - markup(docs, label, mod_path) + let layout_info = match def { + Definition::Field(it) => render_memory_layout( + config.memory_layout, + || it.layout(db), + |_| { + let var_def = it.parent_def(db); + match var_def { + hir::VariantDef::Struct(s) => { + Adt::from(s).layout(db).ok().and_then(|layout| layout.field_offset(it)) + } + _ => None, + } + }, + |_| None, + ), + Definition::Adt(it) => { + render_memory_layout(config.memory_layout, || it.layout(db), |_| None, |_| None) + } + Definition::Variant(it) => render_memory_layout( + config.memory_layout, + || it.layout(db), + |_| None, + |layout| layout.enum_tag_size(), + ), + Definition::TypeAlias(it) => { + render_memory_layout(config.memory_layout, || it.ty(db).layout(db), |_| None, |_| None) + } + Definition::Local(it) => { + render_memory_layout(config.memory_layout, || it.ty(db).layout(db), |_| None, |_| None) + } + _ => None, + }; + + let label = match (value, layout_info) { + (Some(value), Some(layout_info)) => format!("{label} = {value}{layout_info}"), + (Some(value), None) => format!("{label} = {value}"), + (None, Some(layout_info)) => format!("{label}{layout_info}"), + (None, None) => label, + }; + + markup(docs.map(Into::into), label, mod_path) } fn type_info( @@ -595,114 +571,16 @@ fn closure_ty( Some(res) } -fn render_builtin_attr(db: &RootDatabase, attr: hir::BuiltinAttr) -> Option { - let name = attr.name(db); - let desc = format!("#[{name}]"); - - let AttributeTemplate { word, list, name_value_str } = match attr.template(db) { - Some(template) => template, - None => return Some(Markup::fenced_block(&attr.name(db))), - }; - let mut docs = "Valid forms are:".to_owned(); - if word { - format_to!(docs, "\n - #\\[{}]", name); - } - if let Some(list) = list { - format_to!(docs, "\n - #\\[{}({})]", name, list); - } - if let Some(name_value_str) = name_value_str { - format_to!(docs, "\n - #\\[{} = {}]", name, name_value_str); - } - markup(Some(docs.replace('*', "\\*")), desc, None) -} - -fn label_and_docs(db: &RootDatabase, def: D) -> (String, Option) -where - D: HasDocs + HirDisplay, -{ - let label = def.display(db).to_string(); - let docs = def.docs(db); - (label, docs) -} - -fn label_and_layout_info_and_docs( - db: &RootDatabase, - def: D, - config: &HoverConfig, - layout_extractor: E, - layout_offset_extractor: E2, -) -> (String, Option) -where - D: HasDocs + HirDisplay, - E: Fn(&D) -> Result, - E2: Fn(&Layout) -> Option, -{ - let mut label = def.display(db).to_string(); - if let Some(layout) = render_memory_layout( - config.memory_layout, - || layout_extractor(&def), - layout_offset_extractor, - |_| None, - ) { - format_to!(label, "{layout}"); - } - let docs = def.docs(db); - (label, docs) -} - -fn label_value_and_layout_info_and_docs( - db: &RootDatabase, - def: D, - config: &HoverConfig, - value_extractor: E, - layout_extractor: E2, - layout_tag_extractor: E3, -) -> (String, Option) -where - D: HasDocs + HirDisplay, - E: Fn(&D) -> Option, - E2: Fn(&D) -> Result, - E3: Fn(&Layout) -> Option, - V: Display, -{ - let value = value_extractor(&def); - let mut label = match value { - Some(value) => format!("{} = {value}", def.display(db)), - None => def.display(db).to_string(), - }; - if let Some(layout) = render_memory_layout( - config.memory_layout, - || layout_extractor(&def), - |_| None, - layout_tag_extractor, - ) { - format_to!(label, "{layout}"); - } - let docs = def.docs(db); - (label, docs) -} - -fn label_value_and_docs( - db: &RootDatabase, - def: D, - value_extractor: E, -) -> (String, Option) -where - D: HasDocs + HirDisplay, - E: Fn(&D) -> Option, - V: Display, -{ - let label = if let Some(value) = value_extractor(&def) { - format!("{} = {value}", def.display(db)) - } else { - def.display(db).to_string() - }; - let docs = def.docs(db); - (label, docs) -} - fn definition_mod_path(db: &RootDatabase, def: &Definition) -> Option { - if let Definition::GenericParam(_) = def { + if matches!( + def, + Definition::GenericParam(_) + | Definition::BuiltinType(_) + | Definition::Local(_) + | Definition::Label(_) + | Definition::BuiltinAttr(_) + | Definition::ToolModule(_) + ) { return None; } def.module(db).map(|module| path(db, module, definition_owner_name(db, def))) @@ -724,14 +602,6 @@ fn markup(docs: Option, desc: String, mod_path: Option) -> Optio Some(buf.into()) } -fn builtin(famous_defs: &FamousDefs<'_, '_>, builtin: hir::BuiltinType) -> Option { - // std exposes prim_{} modules with docstrings on the root to document the builtins - let primitive_mod = format!("prim_{}", builtin.name().display(famous_defs.0.db)); - let doc_owner = find_std_module(famous_defs, &primitive_mod)?; - let docs = doc_owner.docs(famous_defs.0.db)?; - markup(Some(docs.into()), builtin.name().display(famous_defs.0.db).to_string(), None) -} - fn find_std_module(famous_defs: &FamousDefs<'_, '_>, name: &str) -> Option { let db = famous_defs.0.db; let std_crate = famous_defs.std()?; @@ -741,34 +611,6 @@ fn find_std_module(famous_defs: &FamousDefs<'_, '_>, name: &str) -> Option Option { - let ty = it.ty(db); - let ty = ty.display_truncated(db, None); - let is_mut = if it.is_mut(db) { "mut " } else { "" }; - let mut desc = match it.primary_source(db).into_ident_pat() { - Some(ident) => { - let name = it.name(db); - let let_kw = if ident - .syntax() - .parent() - .map_or(false, |p| p.kind() == LET_STMT || p.kind() == LET_EXPR) - { - "let " - } else { - "" - }; - format!("{let_kw}{is_mut}{}: {ty}", name.display(db)) - } - None => format!("{is_mut}self: {ty}"), - }; - if let Some(layout) = - render_memory_layout(config.memory_layout, || it.ty(db).layout(db), |_| None, |_| None) - { - format_to!(desc, "{layout}"); - } - markup(None, desc, None) -} - fn render_memory_layout( config: Option, layout: impl FnOnce() -> Result, diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index e82d730e4a..f466b8e938 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -32,6 +32,7 @@ mod fn_lifetime_fn; mod implicit_static; mod param_name; mod implicit_drop; +mod range_exclusive; #[derive(Clone, Debug, PartialEq, Eq)] pub struct InlayHintsConfig { @@ -51,6 +52,7 @@ pub struct InlayHintsConfig { pub param_names_for_lifetime_elision_hints: bool, pub hide_named_constructor_hints: bool, pub hide_closure_initialization_hints: bool, + pub range_exclusive_hints: bool, pub closure_style: ClosureStyle, pub max_length: Option, pub closing_brace_hints_min_lines: Option, @@ -127,6 +129,7 @@ pub enum InlayKind { Parameter, Type, Drop, + RangeExclusive, } #[derive(Debug)] @@ -517,13 +520,20 @@ fn hints( closure_captures::hints(hints, famous_defs, config, file_id, it.clone()); closure_ret::hints(hints, famous_defs, config, file_id, it) }, + ast::Expr::RangeExpr(it) => range_exclusive::hints(hints, config, it), _ => None, } }, ast::Pat(it) => { binding_mode::hints(hints, sema, config, &it); - if let ast::Pat::IdentPat(it) = it { - bind_pat::hints(hints, famous_defs, config, file_id, &it); + match it { + ast::Pat::IdentPat(it) => { + bind_pat::hints(hints, famous_defs, config, file_id, &it); + } + ast::Pat::RangePat(it) => { + range_exclusive::hints(hints, config, it); + } + _ => {} } Some(()) }, @@ -593,7 +603,6 @@ mod tests { use hir::ClosureStyle; use itertools::Itertools; use test_utils::extract_annotations; - use text_edit::{TextRange, TextSize}; use crate::inlay_hints::{AdjustmentHints, AdjustmentHintsMode}; use crate::DiscriminantHints; @@ -622,6 +631,7 @@ mod tests { closing_brace_hints_min_lines: None, fields_to_resolve: InlayFieldsToResolve::empty(), implicit_drop_hints: false, + range_exclusive_hints: false, }; pub(super) const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig { type_hints: true, @@ -654,29 +664,6 @@ mod tests { assert_eq!(expected, actual, "\nExpected:\n{expected:#?}\n\nActual:\n{actual:#?}"); } - #[track_caller] - pub(super) fn check_expect(config: InlayHintsConfig, ra_fixture: &str, expect: Expect) { - let (analysis, file_id) = fixture::file(ra_fixture); - let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap(); - expect.assert_debug_eq(&inlay_hints) - } - - #[track_caller] - pub(super) fn check_expect_clear_loc( - config: InlayHintsConfig, - ra_fixture: &str, - expect: Expect, - ) { - let (analysis, file_id) = fixture::file(ra_fixture); - let mut inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap(); - inlay_hints.iter_mut().flat_map(|hint| &mut hint.label.parts).for_each(|hint| { - if let Some(loc) = &mut hint.linked_location { - loc.range = TextRange::empty(TextSize::from(0)); - } - }); - expect.assert_debug_eq(&inlay_hints) - } - /// Computes inlay hints for the fixture, applies all the provided text edits and then runs /// expect test. #[track_caller] diff --git a/crates/ide/src/inlay_hints/chaining.rs b/crates/ide/src/inlay_hints/chaining.rs index c9e9a22378..b6063978e9 100644 --- a/crates/ide/src/inlay_hints/chaining.rs +++ b/crates/ide/src/inlay_hints/chaining.rs @@ -75,12 +75,12 @@ pub(super) fn hints( #[cfg(test)] mod tests { - use expect_test::expect; + use expect_test::{expect, Expect}; + use text_edit::{TextRange, TextSize}; use crate::{ - inlay_hints::tests::{ - check_expect, check_expect_clear_loc, check_with_config, DISABLED_CONFIG, TEST_CONFIG, - }, + fixture, + inlay_hints::tests::{check_with_config, DISABLED_CONFIG, TEST_CONFIG}, InlayHintsConfig, }; @@ -89,6 +89,33 @@ mod tests { check_with_config(InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, ra_fixture); } + #[track_caller] + pub(super) fn check_expect(config: InlayHintsConfig, ra_fixture: &str, expect: Expect) { + let (analysis, file_id) = fixture::file(ra_fixture); + let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap(); + let filtered = + inlay_hints.into_iter().map(|hint| (hint.range, hint.label)).collect::>(); + expect.assert_debug_eq(&filtered) + } + + #[track_caller] + pub(super) fn check_expect_clear_loc( + config: InlayHintsConfig, + ra_fixture: &str, + expect: Expect, + ) { + let (analysis, file_id) = fixture::file(ra_fixture); + let mut inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap(); + inlay_hints.iter_mut().flat_map(|hint| &mut hint.label.parts).for_each(|hint| { + if let Some(loc) = &mut hint.linked_location { + loc.range = TextRange::empty(TextSize::from(0)); + } + }); + let filtered = + inlay_hints.into_iter().map(|hint| (hint.range, hint.label)).collect::>(); + expect.assert_debug_eq(&filtered) + } + #[test] fn chaining_hints_ignore_comments() { check_expect( @@ -109,13 +136,9 @@ fn main() { "#, expect![[r#" [ - InlayHint { - range: 147..172, - position: After, - pad_left: true, - pad_right: false, - kind: Chaining, - label: [ + ( + 147..172, + [ "", InlayHintLabelPart { text: "B", @@ -131,16 +154,10 @@ fn main() { }, "", ], - text_edit: None, - needs_resolve: true, - }, - InlayHint { - range: 147..154, - position: After, - pad_left: true, - pad_right: false, - kind: Chaining, - label: [ + ), + ( + 147..154, + [ "", InlayHintLabelPart { text: "A", @@ -156,9 +173,7 @@ fn main() { }, "", ], - text_edit: None, - needs_resolve: true, - }, + ), ] "#]], ); @@ -204,13 +219,9 @@ fn main() { }"#, expect![[r#" [ - InlayHint { - range: 143..190, - position: After, - pad_left: true, - pad_right: false, - kind: Chaining, - label: [ + ( + 143..190, + [ "", InlayHintLabelPart { text: "C", @@ -226,16 +237,10 @@ fn main() { }, "", ], - text_edit: None, - needs_resolve: true, - }, - InlayHint { - range: 143..179, - position: After, - pad_left: true, - pad_right: false, - kind: Chaining, - label: [ + ), + ( + 143..179, + [ "", InlayHintLabelPart { text: "B", @@ -251,9 +256,7 @@ fn main() { }, "", ], - text_edit: None, - needs_resolve: true, - }, + ), ] "#]], ); @@ -283,13 +286,9 @@ fn main() { }"#, expect![[r#" [ - InlayHint { - range: 143..190, - position: After, - pad_left: true, - pad_right: false, - kind: Chaining, - label: [ + ( + 143..190, + [ "", InlayHintLabelPart { text: "C", @@ -305,16 +304,10 @@ fn main() { }, "", ], - text_edit: None, - needs_resolve: true, - }, - InlayHint { - range: 143..179, - position: After, - pad_left: true, - pad_right: false, - kind: Chaining, - label: [ + ), + ( + 143..179, + [ "", InlayHintLabelPart { text: "B", @@ -330,9 +323,7 @@ fn main() { }, "", ], - text_edit: None, - needs_resolve: true, - }, + ), ] "#]], ); @@ -363,13 +354,9 @@ fn main() { "#, expect![[r#" [ - InlayHint { - range: 246..283, - position: After, - pad_left: true, - pad_right: false, - kind: Chaining, - label: [ + ( + 246..283, + [ "", InlayHintLabelPart { text: "B", @@ -398,16 +385,10 @@ fn main() { }, ">", ], - text_edit: None, - needs_resolve: true, - }, - InlayHint { - range: 246..265, - position: After, - pad_left: true, - pad_right: false, - kind: Chaining, - label: [ + ), + ( + 246..265, + [ "", InlayHintLabelPart { text: "A", @@ -436,9 +417,7 @@ fn main() { }, ">", ], - text_edit: None, - needs_resolve: true, - }, + ), ] "#]], ); @@ -471,13 +450,9 @@ fn main() { "#, expect![[r#" [ - InlayHint { - range: 174..241, - position: After, - pad_left: true, - pad_right: false, - kind: Chaining, - label: [ + ( + 174..241, + [ "impl ", InlayHintLabelPart { text: "Iterator", @@ -506,16 +481,10 @@ fn main() { }, " = ()>", ], - text_edit: None, - needs_resolve: true, - }, - InlayHint { - range: 174..224, - position: After, - pad_left: true, - pad_right: false, - kind: Chaining, - label: [ + ), + ( + 174..224, + [ "impl ", InlayHintLabelPart { text: "Iterator", @@ -544,16 +513,10 @@ fn main() { }, " = ()>", ], - text_edit: None, - needs_resolve: true, - }, - InlayHint { - range: 174..206, - position: After, - pad_left: true, - pad_right: false, - kind: Chaining, - label: [ + ), + ( + 174..206, + [ "impl ", InlayHintLabelPart { text: "Iterator", @@ -582,16 +545,10 @@ fn main() { }, " = ()>", ], - text_edit: None, - needs_resolve: true, - }, - InlayHint { - range: 174..189, - position: After, - pad_left: true, - pad_right: false, - kind: Chaining, - label: [ + ), + ( + 174..189, + [ "&mut ", InlayHintLabelPart { text: "MyIter", @@ -607,9 +564,7 @@ fn main() { }, "", ], - text_edit: None, - needs_resolve: true, - }, + ), ] "#]], ); @@ -639,13 +594,9 @@ fn main() { "#, expect![[r#" [ - InlayHint { - range: 124..130, - position: After, - pad_left: true, - pad_right: false, - kind: Type, - label: [ + ( + 124..130, + [ "", InlayHintLabelPart { text: "Struct", @@ -661,25 +612,10 @@ fn main() { }, "", ], - text_edit: Some( - TextEdit { - indels: [ - Indel { - insert: ": Struct", - delete: 130..130, - }, - ], - }, - ), - needs_resolve: true, - }, - InlayHint { - range: 145..185, - position: After, - pad_left: true, - pad_right: false, - kind: Chaining, - label: [ + ), + ( + 145..185, + [ "", InlayHintLabelPart { text: "Struct", @@ -695,16 +631,10 @@ fn main() { }, "", ], - text_edit: None, - needs_resolve: true, - }, - InlayHint { - range: 145..168, - position: After, - pad_left: true, - pad_right: false, - kind: Chaining, - label: [ + ), + ( + 145..168, + [ "", InlayHintLabelPart { text: "Struct", @@ -720,16 +650,10 @@ fn main() { }, "", ], - text_edit: None, - needs_resolve: true, - }, - InlayHint { - range: 222..228, - position: Before, - pad_left: false, - pad_right: true, - kind: Parameter, - label: [ + ), + ( + 222..228, + [ InlayHintLabelPart { text: "self", linked_location: Some( @@ -743,9 +667,7 @@ fn main() { tooltip: "", }, ], - text_edit: None, - needs_resolve: true, - }, + ), ] "#]], ); diff --git a/crates/ide/src/inlay_hints/range_exclusive.rs b/crates/ide/src/inlay_hints/range_exclusive.rs new file mode 100644 index 0000000000..50ab15c504 --- /dev/null +++ b/crates/ide/src/inlay_hints/range_exclusive.rs @@ -0,0 +1,121 @@ +//! Implementation of "range exclusive" inlay hints: +//! ```no_run +//! for i in 0../* < */10 {} +//! if let ../* < */100 = 50 {} +//! ``` +use syntax::{ast, SyntaxToken, T}; + +use crate::{InlayHint, InlayHintsConfig}; + +pub(super) fn hints( + acc: &mut Vec, + config: &InlayHintsConfig, + range: impl ast::RangeItem, +) -> Option<()> { + (config.range_exclusive_hints && range.end().is_some()) + .then(|| { + range.op_token().filter(|token| token.kind() == T![..]).map(|token| { + acc.push(inlay_hint(token)); + }) + }) + .flatten() +} + +fn inlay_hint(token: SyntaxToken) -> InlayHint { + InlayHint { + range: token.text_range(), + position: crate::InlayHintPosition::After, + pad_left: false, + pad_right: false, + kind: crate::InlayKind::RangeExclusive, + label: crate::InlayHintLabel::from("<"), + text_edit: None, + needs_resolve: false, + } +} + +#[cfg(test)] +mod tests { + use crate::{ + inlay_hints::tests::{check_with_config, DISABLED_CONFIG}, + InlayHintsConfig, + }; + + #[test] + fn range_exclusive_expression_bounded_above_hints() { + check_with_config( + InlayHintsConfig { range_exclusive_hints: true, ..DISABLED_CONFIG }, + r#" +fn main() { + let a = 0..10; + //^^< + let b = ..100; + //^^< + let c = (2 - 1)..(7 * 8) + //^^< +}"#, + ); + } + + #[test] + fn range_exclusive_expression_unbounded_above_no_hints() { + check_with_config( + InlayHintsConfig { range_exclusive_hints: true, ..DISABLED_CONFIG }, + r#" +fn main() { + let a = 0..; + let b = ..; +}"#, + ); + } + + #[test] + fn range_inclusive_expression_no_hints() { + check_with_config( + InlayHintsConfig { range_exclusive_hints: true, ..DISABLED_CONFIG }, + r#" +fn main() { + let a = 0..=10; + let b = ..=100; +}"#, + ); + } + + #[test] + fn range_exclusive_pattern_bounded_above_hints() { + check_with_config( + InlayHintsConfig { range_exclusive_hints: true, ..DISABLED_CONFIG }, + r#" +fn main() { + if let 0..10 = 0 {} + //^^< + if let ..100 = 0 {} + //^^< +}"#, + ); + } + + #[test] + fn range_exclusive_pattern_unbounded_above_no_hints() { + check_with_config( + InlayHintsConfig { range_exclusive_hints: true, ..DISABLED_CONFIG }, + r#" +fn main() { + if let 0.. = 0 {} + if let .. = 0 {} +}"#, + ); + } + + #[test] + fn range_inclusive_pattern_no_hints() { + check_with_config( + InlayHintsConfig { range_exclusive_hints: true, ..DISABLED_CONFIG }, + r#" +fn main() { + if let 0..=10 = 0 {} + if let ..=100 = 0 {} +}"#, + ); + } +} diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 6ff16b9e2f..c98e9fba12 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -99,7 +99,10 @@ pub use crate::{ }, join_lines::JoinLinesConfig, markup::Markup, - moniker::{MonikerDescriptorKind, MonikerKind, MonikerResult, PackageInformation}, + moniker::{ + MonikerDescriptorKind, MonikerKind, MonikerResult, PackageInformation, + SymbolInformationKind, + }, move_item::Direction, navigation_target::{NavigationTarget, UpmappingResult}, prime_caches::ParallelPrimeCachesProgress, @@ -411,11 +414,12 @@ impl Analysis { } /// Fuzzy searches for a symbol. - pub fn symbol_search(&self, query: Query) -> Cancellable> { + pub fn symbol_search(&self, query: Query, limit: usize) -> Cancellable> { self.with_db(|db| { symbol_index::world_symbols(db, query) .into_iter() // xx: should we make this a par iter? .filter_map(|s| s.try_to_nav(db)) + .take(limit) .map(UpmappingResult::call_site) .collect::>() }) diff --git a/crates/ide/src/moniker.rs b/crates/ide/src/moniker.rs index 8e8bb5e013..486329dade 100644 --- a/crates/ide/src/moniker.rs +++ b/crates/ide/src/moniker.rs @@ -1,7 +1,7 @@ //! This module generates [moniker](https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/#exportsImports) //! for LSIF and LSP. -use hir::{AsAssocItem, AssocItemContainer, Crate, DescendPreference, Semantics}; +use hir::{Adt, AsAssocItem, AssocItemContainer, Crate, DescendPreference, MacroKind, Semantics}; use ide_db::{ base_db::{CrateOrigin, FilePosition, LangCrateOrigin}, defs::{Definition, IdentClass}, @@ -25,6 +25,62 @@ pub enum MonikerDescriptorKind { Meta, } +// Subset of scip_types::SymbolInformation::Kind +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum SymbolInformationKind { + AssociatedType, + Attribute, + Constant, + Enum, + EnumMember, + Field, + Function, + Macro, + Method, + Module, + Parameter, + SelfParameter, + StaticMethod, + StaticVariable, + Struct, + Trait, + TraitMethod, + Type, + TypeAlias, + TypeParameter, + Union, + Variable, +} + +impl From for MonikerDescriptorKind { + fn from(value: SymbolInformationKind) -> Self { + match value { + SymbolInformationKind::AssociatedType => Self::TypeParameter, + SymbolInformationKind::Attribute => Self::Macro, + SymbolInformationKind::Constant => Self::Term, + SymbolInformationKind::Enum => Self::Type, + SymbolInformationKind::EnumMember => Self::Type, + SymbolInformationKind::Field => Self::Term, + SymbolInformationKind::Function => Self::Method, + SymbolInformationKind::Macro => Self::Macro, + SymbolInformationKind::Method => Self::Method, + SymbolInformationKind::Module => Self::Namespace, + SymbolInformationKind::Parameter => Self::Parameter, + SymbolInformationKind::SelfParameter => Self::Parameter, + SymbolInformationKind::StaticMethod => Self::Method, + SymbolInformationKind::StaticVariable => Self::Meta, + SymbolInformationKind::Struct => Self::Type, + SymbolInformationKind::Trait => Self::Type, + SymbolInformationKind::TraitMethod => Self::Method, + SymbolInformationKind::Type => Self::Type, + SymbolInformationKind::TypeAlias => Self::Type, + SymbolInformationKind::TypeParameter => Self::TypeParameter, + SymbolInformationKind::Union => Self::Type, + SymbolInformationKind::Variable => Self::Term, + } + } +} + #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct MonikerDescriptor { pub name: String, @@ -112,6 +168,69 @@ pub(crate) fn moniker( Some(RangeInfo::new(original_token.text_range(), navs)) } +pub(crate) fn def_to_kind(db: &RootDatabase, def: Definition) -> SymbolInformationKind { + use SymbolInformationKind::*; + + match def { + Definition::Macro(it) => match it.kind(db) { + MacroKind::Declarative => Macro, + MacroKind::Derive => Attribute, + MacroKind::BuiltIn => Macro, + MacroKind::Attr => Attribute, + MacroKind::ProcMacro => Macro, + }, + Definition::Field(..) | Definition::TupleField(..) => Field, + Definition::Module(..) => Module, + Definition::Function(it) => { + if it.as_assoc_item(db).is_some() { + if it.has_self_param(db) { + if it.has_body(db) { + Method + } else { + TraitMethod + } + } else { + StaticMethod + } + } else { + Function + } + } + Definition::Adt(Adt::Struct(..)) => Struct, + Definition::Adt(Adt::Union(..)) => Union, + Definition::Adt(Adt::Enum(..)) => Enum, + Definition::Variant(..) => EnumMember, + Definition::Const(..) => Constant, + Definition::Static(..) => StaticVariable, + Definition::Trait(..) => Trait, + Definition::TraitAlias(..) => Trait, + Definition::TypeAlias(it) => { + if it.as_assoc_item(db).is_some() { + AssociatedType + } else { + TypeAlias + } + } + Definition::BuiltinType(..) => Type, + Definition::SelfType(..) => TypeAlias, + Definition::GenericParam(..) => TypeParameter, + Definition::Local(it) => { + if it.is_self(db) { + SelfParameter + } else if it.is_param(db) { + Parameter + } else { + Variable + } + } + Definition::Label(..) => Variable, // For lack of a better variant + Definition::DeriveHelper(..) => Attribute, + Definition::BuiltinAttr(..) => Attribute, + Definition::ToolModule(..) => Module, + Definition::ExternCrateDecl(..) => Module, + } +} + pub(crate) fn def_to_moniker( db: &RootDatabase, def: Definition, @@ -134,7 +253,7 @@ pub(crate) fn def_to_moniker( description.extend(module.path_to_root(db).into_iter().filter_map(|x| { Some(MonikerDescriptor { name: x.name(db)?.display(db).to_string(), - desc: MonikerDescriptorKind::Namespace, + desc: def_to_kind(db, x.into()).into(), }) })); @@ -147,7 +266,7 @@ pub(crate) fn def_to_moniker( // we have to include the trait name as part of the moniker for uniqueness. description.push(MonikerDescriptor { name: trait_.name(db).display(db).to_string(), - desc: MonikerDescriptorKind::Type, + desc: def_to_kind(db, trait_.into()).into(), }); } AssocItemContainer::Impl(impl_) => { @@ -156,14 +275,14 @@ pub(crate) fn def_to_moniker( if let Some(adt) = impl_.self_ty(db).as_adt() { description.push(MonikerDescriptor { name: adt.name(db).display(db).to_string(), - desc: MonikerDescriptorKind::Type, + desc: def_to_kind(db, adt.into()).into(), }); } if let Some(trait_) = impl_.trait_(db) { description.push(MonikerDescriptor { name: trait_.name(db).display(db).to_string(), - desc: MonikerDescriptorKind::Type, + desc: def_to_kind(db, trait_.into()).into(), }); } } @@ -173,21 +292,26 @@ pub(crate) fn def_to_moniker( if let Definition::Field(it) = def { description.push(MonikerDescriptor { name: it.parent_def(db).name(db).display(db).to_string(), - desc: MonikerDescriptorKind::Type, + desc: def_to_kind(db, it.parent_def(db).into()).into(), }); } // Qualify locals/parameters by their parent definition name. if let Definition::Local(it) = def { - let parent_name = it.parent(db).name(db); - if let Some(name) = parent_name { - description.push(MonikerDescriptor { - name: name.display(db).to_string(), - desc: MonikerDescriptorKind::Method, - }); + let parent = Definition::try_from(it.parent(db)).ok(); + if let Some(parent) = parent { + let parent_name = parent.name(db); + if let Some(name) = parent_name { + description.push(MonikerDescriptor { + name: name.display(db).to_string(), + desc: def_to_kind(db, parent).into(), + }); + } } } + let desc = def_to_kind(db, def).into(); + let name_desc = match def { // These are handled by top-level guard (for performance). Definition::GenericParam(_) @@ -201,67 +325,54 @@ pub(crate) fn def_to_moniker( return None; } - MonikerDescriptor { - name: local.name(db).display(db).to_string(), - desc: MonikerDescriptorKind::Parameter, - } + MonikerDescriptor { name: local.name(db).display(db).to_string(), desc } + } + Definition::Macro(m) => { + MonikerDescriptor { name: m.name(db).display(db).to_string(), desc } + } + Definition::Function(f) => { + MonikerDescriptor { name: f.name(db).display(db).to_string(), desc } + } + Definition::Variant(v) => { + MonikerDescriptor { name: v.name(db).display(db).to_string(), desc } + } + Definition::Const(c) => { + MonikerDescriptor { name: c.name(db)?.display(db).to_string(), desc } + } + Definition::Trait(trait_) => { + MonikerDescriptor { name: trait_.name(db).display(db).to_string(), desc } + } + Definition::TraitAlias(ta) => { + MonikerDescriptor { name: ta.name(db).display(db).to_string(), desc } + } + Definition::TypeAlias(ta) => { + MonikerDescriptor { name: ta.name(db).display(db).to_string(), desc } + } + Definition::Module(m) => { + MonikerDescriptor { name: m.name(db)?.display(db).to_string(), desc } + } + Definition::BuiltinType(b) => { + MonikerDescriptor { name: b.name().display(db).to_string(), desc } } - Definition::Macro(m) => MonikerDescriptor { - name: m.name(db).display(db).to_string(), - desc: MonikerDescriptorKind::Macro, - }, - Definition::Function(f) => MonikerDescriptor { - name: f.name(db).display(db).to_string(), - desc: MonikerDescriptorKind::Method, - }, - Definition::Variant(v) => MonikerDescriptor { - name: v.name(db).display(db).to_string(), - desc: MonikerDescriptorKind::Type, - }, - Definition::Const(c) => MonikerDescriptor { - name: c.name(db)?.display(db).to_string(), - desc: MonikerDescriptorKind::Term, - }, - Definition::Trait(trait_) => MonikerDescriptor { - name: trait_.name(db).display(db).to_string(), - desc: MonikerDescriptorKind::Type, - }, - Definition::TraitAlias(ta) => MonikerDescriptor { - name: ta.name(db).display(db).to_string(), - desc: MonikerDescriptorKind::Type, - }, - Definition::TypeAlias(ta) => MonikerDescriptor { - name: ta.name(db).display(db).to_string(), - desc: MonikerDescriptorKind::TypeParameter, - }, - Definition::Module(m) => MonikerDescriptor { - name: m.name(db)?.display(db).to_string(), - desc: MonikerDescriptorKind::Namespace, - }, - Definition::BuiltinType(b) => MonikerDescriptor { - name: b.name().display(db).to_string(), - desc: MonikerDescriptorKind::Type, - }, Definition::SelfType(imp) => MonikerDescriptor { name: imp.self_ty(db).as_adt()?.name(db).display(db).to_string(), - desc: MonikerDescriptorKind::Type, - }, - Definition::Field(it) => MonikerDescriptor { - name: it.name(db).display(db).to_string(), - desc: MonikerDescriptorKind::Term, - }, - Definition::Adt(adt) => MonikerDescriptor { - name: adt.name(db).display(db).to_string(), - desc: MonikerDescriptorKind::Type, - }, - Definition::Static(s) => MonikerDescriptor { - name: s.name(db).display(db).to_string(), - desc: MonikerDescriptorKind::Meta, - }, - Definition::ExternCrateDecl(m) => MonikerDescriptor { - name: m.name(db).display(db).to_string(), - desc: MonikerDescriptorKind::Namespace, + desc, }, + Definition::Field(it) => { + MonikerDescriptor { name: it.name(db).display(db).to_string(), desc } + } + Definition::TupleField(it) => { + MonikerDescriptor { name: it.name().display(db).to_string(), desc } + } + Definition::Adt(adt) => { + MonikerDescriptor { name: adt.name(db).display(db).to_string(), desc } + } + Definition::Static(s) => { + MonikerDescriptor { name: s.name(db).display(db).to_string(), desc } + } + Definition::ExternCrateDecl(m) => { + MonikerDescriptor { name: m.name(db).display(db).to_string(), desc } + } }; description.push(name_desc); diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs index 6cb7d7724d..bc0574ca86 100644 --- a/crates/ide/src/navigation_target.rs +++ b/crates/ide/src/navigation_target.rs @@ -237,7 +237,7 @@ impl TryToNav for Definition { Definition::TraitAlias(it) => it.try_to_nav(db), Definition::TypeAlias(it) => it.try_to_nav(db), Definition::ExternCrateDecl(it) => Some(it.try_to_nav(db)?), - Definition::BuiltinType(_) => None, + Definition::BuiltinType(_) | Definition::TupleField(_) => None, Definition::ToolModule(_) => None, Definition::BuiltinAttr(_) => None, // FIXME: The focus range should be set to the helper declaration @@ -724,11 +724,8 @@ fn orig_range_with_focus( ) -> UpmappingResult<(FileRange, Option)> { let Some(name) = name else { return orig_range(db, hir_file, value) }; - let call_range = || { - db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id) - .kind - .original_call_range(db) - }; + let call_kind = + || db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id).kind; let def_range = || { db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id) @@ -755,7 +752,22 @@ fn orig_range_with_focus( } // name lies outside the node, so instead point to the macro call which // *should* contain the name - _ => call_range(), + _ => { + let kind = call_kind(); + let range = kind.clone().original_call_range_with_body(db); + //If the focus range is in the attribute/derive body, we + // need to point the call site to the entire body, if not, fall back + // to the name range of the attribute/derive call + // FIXME: Do this differently, this is very inflexible the caller + // should choose this behavior + if range.file_id == focus_range.file_id + && range.range.contains_range(focus_range.range) + { + range + } else { + kind.original_call_range(db) + } + } }, Some(focus_range), ), @@ -784,7 +796,7 @@ fn orig_range_with_focus( // node is in macro def, just show the focus _ => ( // show the macro call - (call_range(), None), + (call_kind().original_call_range(db), None), Some((focus_range, Some(focus_range))), ), } @@ -848,7 +860,7 @@ fn foo() { enum FooInner { } } "#, ); - let navs = analysis.symbol_search(Query::new("FooInner".to_string())).unwrap(); + let navs = analysis.symbol_search(Query::new("FooInner".to_string()), !0).unwrap(); expect![[r#" [ NavigationTarget { @@ -886,7 +898,7 @@ struct Foo; "#, ); - let navs = analysis.symbol_search(Query::new("foo".to_string())).unwrap(); + let navs = analysis.symbol_search(Query::new("foo".to_string()), !0).unwrap(); assert_eq!(navs.len(), 2) } } diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs index 1febfabfcb..3bf41defe3 100644 --- a/crates/ide/src/rename.rs +++ b/crates/ide/src/rename.rs @@ -366,6 +366,7 @@ fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: &str) -> Opt #[cfg(test)] mod tests { use expect_test::{expect, Expect}; + use ide_db::source_change::SourceChange; use stdx::trim_indent; use test_utils::assert_eq_text; use text_edit::TextEdit; @@ -418,7 +419,7 @@ mod tests { let (analysis, position) = fixture::position(ra_fixture); let source_change = analysis.rename(position, new_name).unwrap().expect("Expect returned a RenameError"); - expect.assert_debug_eq(&source_change) + expect.assert_eq(&filter_expect(source_change)) } fn check_expect_will_rename_file(new_name: &str, ra_fixture: &str, expect: Expect) { @@ -427,7 +428,7 @@ mod tests { .will_rename_file(position.file_id, new_name) .unwrap() .expect("Expect returned a RenameError"); - expect.assert_debug_eq(&source_change) + expect.assert_eq(&filter_expect(source_change)) } fn check_prepare(ra_fixture: &str, expect: Expect) { @@ -444,6 +445,19 @@ mod tests { }; } + fn filter_expect(source_change: SourceChange) -> String { + let source_file_edits = source_change + .source_file_edits + .into_iter() + .map(|(id, (text_edit, _))| (id, text_edit.into_iter().collect::>())) + .collect::>(); + + format!( + "source_file_edits: {:#?}\nfile_system_edits: {:#?}\n", + source_file_edits, source_change.file_system_edits + ) + } + #[test] fn test_prepare_rename_namelikes() { check_prepare(r"fn name$0<'lifetime>() {}", expect![[r#"3..7: name"#]]); @@ -916,37 +930,32 @@ mod foo$0; // empty "#, expect![[r#" - SourceChange { - source_file_edits: { + source_file_edits: [ + ( FileId( 1, - ): ( - TextEdit { - indels: [ - Indel { - insert: "foo2", - delete: 4..7, - }, - ], - }, - None, ), - }, - file_system_edits: [ - MoveFile { - src: FileId( + [ + Indel { + insert: "foo2", + delete: 4..7, + }, + ], + ), + ] + file_system_edits: [ + MoveFile { + src: FileId( + 2, + ), + dst: AnchoredPathBuf { + anchor: FileId( 2, ), - dst: AnchoredPathBuf { - anchor: FileId( - 2, - ), - path: "foo2.rs", - }, + path: "foo2.rs", }, - ], - is_snippet: false, - } + }, + ] "#]], ); } @@ -968,50 +977,43 @@ pub struct FooContent; use crate::foo$0::FooContent; "#, expect![[r#" - SourceChange { - source_file_edits: { + source_file_edits: [ + ( FileId( 0, - ): ( - TextEdit { - indels: [ - Indel { - insert: "quux", - delete: 8..11, - }, - ], - }, - None, ), + [ + Indel { + insert: "quux", + delete: 8..11, + }, + ], + ), + ( FileId( 2, - ): ( - TextEdit { - indels: [ - Indel { - insert: "quux", - delete: 11..14, - }, - ], - }, - None, ), - }, - file_system_edits: [ - MoveFile { - src: FileId( + [ + Indel { + insert: "quux", + delete: 11..14, + }, + ], + ), + ] + file_system_edits: [ + MoveFile { + src: FileId( + 1, + ), + dst: AnchoredPathBuf { + anchor: FileId( 1, ), - dst: AnchoredPathBuf { - anchor: FileId( - 1, - ), - path: "quux.rs", - }, + path: "quux.rs", }, - ], - is_snippet: false, - } + }, + ] "#]], ); } @@ -1027,43 +1029,38 @@ mod fo$0o; // empty "#, expect![[r#" - SourceChange { - source_file_edits: { + source_file_edits: [ + ( FileId( 0, - ): ( - TextEdit { - indels: [ - Indel { - insert: "foo2", - delete: 4..7, - }, - ], - }, - None, ), - }, - file_system_edits: [ - MoveDir { - src: AnchoredPathBuf { - anchor: FileId( - 1, - ), - path: "../foo", + [ + Indel { + insert: "foo2", + delete: 4..7, }, - src_id: FileId( + ], + ), + ] + file_system_edits: [ + MoveDir { + src: AnchoredPathBuf { + anchor: FileId( 1, ), - dst: AnchoredPathBuf { - anchor: FileId( - 1, - ), - path: "../foo2", - }, + path: "../foo", }, - ], - is_snippet: false, - } + src_id: FileId( + 1, + ), + dst: AnchoredPathBuf { + anchor: FileId( + 1, + ), + path: "../foo2", + }, + }, + ] "#]], ); } @@ -1080,37 +1077,32 @@ mod outer { mod fo$0o; } // empty "#, expect![[r#" - SourceChange { - source_file_edits: { + source_file_edits: [ + ( FileId( 0, - ): ( - TextEdit { - indels: [ - Indel { - insert: "bar", - delete: 16..19, - }, - ], - }, - None, ), - }, - file_system_edits: [ - MoveFile { - src: FileId( + [ + Indel { + insert: "bar", + delete: 16..19, + }, + ], + ), + ] + file_system_edits: [ + MoveFile { + src: FileId( + 1, + ), + dst: AnchoredPathBuf { + anchor: FileId( 1, ), - dst: AnchoredPathBuf { - anchor: FileId( - 1, - ), - path: "bar.rs", - }, + path: "bar.rs", }, - ], - is_snippet: false, - } + }, + ] "#]], ); } @@ -1156,50 +1148,43 @@ pub mod foo$0; // pub fn fun() {} "#, expect![[r#" - SourceChange { - source_file_edits: { + source_file_edits: [ + ( FileId( 0, - ): ( - TextEdit { - indels: [ - Indel { - insert: "foo2", - delete: 27..30, - }, - ], - }, - None, ), + [ + Indel { + insert: "foo2", + delete: 27..30, + }, + ], + ), + ( FileId( 1, - ): ( - TextEdit { - indels: [ - Indel { - insert: "foo2", - delete: 8..11, - }, - ], - }, - None, ), - }, - file_system_edits: [ - MoveFile { - src: FileId( + [ + Indel { + insert: "foo2", + delete: 8..11, + }, + ], + ), + ] + file_system_edits: [ + MoveFile { + src: FileId( + 2, + ), + dst: AnchoredPathBuf { + anchor: FileId( 2, ), - dst: AnchoredPathBuf { - anchor: FileId( - 2, - ), - path: "foo2.rs", - }, + path: "foo2.rs", }, - ], - is_snippet: false, - } + }, + ] "#]], ); } @@ -1229,54 +1214,49 @@ mod quux; // empty "#, expect![[r#" - SourceChange { - source_file_edits: { + source_file_edits: [ + ( FileId( 0, - ): ( - TextEdit { - indels: [ - Indel { - insert: "foo2", - delete: 4..7, - }, - ], - }, - None, ), + [ + Indel { + insert: "foo2", + delete: 4..7, + }, + ], + ), + ] + file_system_edits: [ + MoveFile { + src: FileId( + 1, + ), + dst: AnchoredPathBuf { + anchor: FileId( + 1, + ), + path: "foo2.rs", + }, }, - file_system_edits: [ - MoveFile { - src: FileId( + MoveDir { + src: AnchoredPathBuf { + anchor: FileId( 1, ), - dst: AnchoredPathBuf { - anchor: FileId( - 1, - ), - path: "foo2.rs", - }, + path: "foo", }, - MoveDir { - src: AnchoredPathBuf { - anchor: FileId( - 1, - ), - path: "foo", - }, - src_id: FileId( + src_id: FileId( + 1, + ), + dst: AnchoredPathBuf { + anchor: FileId( 1, ), - dst: AnchoredPathBuf { - anchor: FileId( - 1, - ), - path: "foo2", - }, + path: "foo2", }, - ], - is_snippet: false, - } + }, + ] "#]], ) } @@ -1344,12 +1324,9 @@ fn foo() {} mod bar$0; "#, expect![[r#" - SourceChange { - source_file_edits: {}, - file_system_edits: [], - is_snippet: false, - } - "#]], + source_file_edits: [] + file_system_edits: [] + "#]], ) } @@ -1370,58 +1347,53 @@ pub mod bar; pub fn baz() {} "#, expect![[r#" - SourceChange { - source_file_edits: { + source_file_edits: [ + ( FileId( 0, - ): ( - TextEdit { - indels: [ - Indel { - insert: "r#fn", - delete: 4..7, - }, - Indel { - insert: "r#fn", - delete: 22..25, - }, - ], - }, - None, ), + [ + Indel { + insert: "r#fn", + delete: 4..7, + }, + Indel { + insert: "r#fn", + delete: 22..25, + }, + ], + ), + ] + file_system_edits: [ + MoveFile { + src: FileId( + 1, + ), + dst: AnchoredPathBuf { + anchor: FileId( + 1, + ), + path: "fn.rs", + }, }, - file_system_edits: [ - MoveFile { - src: FileId( + MoveDir { + src: AnchoredPathBuf { + anchor: FileId( 1, ), - dst: AnchoredPathBuf { - anchor: FileId( - 1, - ), - path: "fn.rs", - }, + path: "foo", }, - MoveDir { - src: AnchoredPathBuf { - anchor: FileId( - 1, - ), - path: "foo", - }, - src_id: FileId( + src_id: FileId( + 1, + ), + dst: AnchoredPathBuf { + anchor: FileId( 1, ), - dst: AnchoredPathBuf { - anchor: FileId( - 1, - ), - path: "fn", - }, + path: "fn", }, - ], - is_snippet: false, - } + }, + ] "#]], ); } @@ -1443,58 +1415,53 @@ pub mod bar; pub fn baz() {} "#, expect![[r#" - SourceChange { - source_file_edits: { + source_file_edits: [ + ( FileId( 0, - ): ( - TextEdit { - indels: [ - Indel { - insert: "foo", - delete: 4..8, - }, - Indel { - insert: "foo", - delete: 23..27, - }, - ], - }, - None, ), + [ + Indel { + insert: "foo", + delete: 4..8, + }, + Indel { + insert: "foo", + delete: 23..27, + }, + ], + ), + ] + file_system_edits: [ + MoveFile { + src: FileId( + 1, + ), + dst: AnchoredPathBuf { + anchor: FileId( + 1, + ), + path: "foo.rs", + }, }, - file_system_edits: [ - MoveFile { - src: FileId( + MoveDir { + src: AnchoredPathBuf { + anchor: FileId( 1, ), - dst: AnchoredPathBuf { - anchor: FileId( - 1, - ), - path: "foo.rs", - }, + path: "fn", }, - MoveDir { - src: AnchoredPathBuf { - anchor: FileId( - 1, - ), - path: "fn", - }, - src_id: FileId( + src_id: FileId( + 1, + ), + dst: AnchoredPathBuf { + anchor: FileId( 1, ), - dst: AnchoredPathBuf { - anchor: FileId( - 1, - ), - path: "foo", - }, + path: "foo", }, - ], - is_snippet: false, - } + }, + ] "#]], ); } diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index d334e66d3d..352ce89820 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -555,28 +555,33 @@ mod tests { use crate::fixture; - use super::{RunnableTestKind::*, *}; - - fn check( - ra_fixture: &str, - // FIXME: fold this into `expect` as well - actions: &[RunnableTestKind], - expect: Expect, - ) { + fn check(ra_fixture: &str, expect: Expect) { let (analysis, position) = fixture::position(ra_fixture); let mut runnables = analysis.runnables(position.file_id).unwrap(); runnables.sort_by_key(|it| (it.nav.full_range.start(), it.nav.name.clone())); - expect.assert_debug_eq(&runnables); - assert_eq!( - actions, - runnables.into_iter().map(|it| it.test_kind()).collect::>().as_slice() - ); + + let result = runnables + .into_iter() + .map(|runnable| { + let mut a = format!("({:?}, {:?}", runnable.test_kind(), runnable.nav); + if runnable.use_name_in_title { + a.push_str(", true"); + } + if let Some(cfg) = runnable.cfg { + a.push_str(&format!(", {cfg:?}")); + } + a.push_str(")"); + a + }) + .collect::>(); + expect.assert_debug_eq(&result); } fn check_tests(ra_fixture: &str, expect: Expect) { let (analysis, position) = fixture::position(ra_fixture); let tests = analysis.related_tests(position, None).unwrap(); - expect.assert_debug_eq(&tests); + let navigation_targets = tests.into_iter().map(|runnable| runnable.nav).collect::>(); + expect.assert_debug_eq(&navigation_targets); } #[test] @@ -607,133 +612,15 @@ mod not_a_root { fn main() {} } "#, - &[TestMod, Bin, Bin, Test, Test, Test, Bench], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 0..253, - name: "", - kind: Module, - }, - kind: TestMod { - path: "", - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 1..13, - focus_range: 4..8, - name: "main", - kind: Function, - }, - kind: Bin, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 15..76, - focus_range: 42..71, - name: "__cortex_m_rt_main_trampoline", - kind: Function, - }, - kind: Bin, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 78..102, - focus_range: 89..97, - name: "test_foo", - kind: Function, - }, - kind: Test { - test_id: Path( - "test_foo", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 104..155, - focus_range: 136..150, - name: "test_full_path", - kind: Function, - }, - kind: Test { - test_id: Path( - "test_full_path", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 157..191, - focus_range: 178..186, - name: "test_foo", - kind: Function, - }, - kind: Test { - test_id: Path( - "test_foo", - ), - attr: TestAttr { - ignore: true, - }, - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 193..215, - focus_range: 205..210, - name: "bench", - kind: Function, - }, - kind: Bench { - test_id: Path( - "bench", - ), - }, - cfg: None, - }, + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..253, name: \"\", kind: Module })", + "(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", + "(Bin, NavigationTarget { file_id: FileId(0), full_range: 15..76, focus_range: 42..71, name: \"__cortex_m_rt_main_trampoline\", kind: Function })", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 78..102, focus_range: 89..97, name: \"test_foo\", kind: Function })", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 104..155, focus_range: 136..150, name: \"test_full_path\", kind: Function })", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 157..191, focus_range: 178..186, name: \"test_foo\", kind: Function })", + "(Bench, NavigationTarget { file_id: FileId(0), full_range: 193..215, focus_range: 205..210, name: \"bench\", kind: Function })", ] "#]], ); @@ -835,155 +722,17 @@ trait Test { /// ``` impl Test for StructWithRunnable {} "#, - &[Bin, DocTest, DocTest, DocTest, DocTest, DocTest, DocTest, DocTest, DocTest], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 1..13, - focus_range: 4..8, - name: "main", - kind: Function, - }, - kind: Bin, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 15..74, - name: "should_have_runnable", - }, - kind: DocTest { - test_id: Path( - "should_have_runnable", - ), - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 76..148, - name: "should_have_runnable_1", - }, - kind: DocTest { - test_id: Path( - "should_have_runnable_1", - ), - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 150..254, - name: "should_have_runnable_2", - }, - kind: DocTest { - test_id: Path( - "should_have_runnable_2", - ), - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 256..320, - name: "should_have_no_runnable_3", - }, - kind: DocTest { - test_id: Path( - "should_have_no_runnable_3", - ), - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 322..398, - name: "should_have_no_runnable_4", - }, - kind: DocTest { - test_id: Path( - "should_have_no_runnable_4", - ), - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 900..965, - name: "StructWithRunnable", - }, - kind: DocTest { - test_id: Path( - "StructWithRunnable", - ), - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 967..1024, - focus_range: 1003..1021, - name: "impl", - kind: Impl, - }, - kind: DocTest { - test_id: Path( - "StructWithRunnable", - ), - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 1088..1154, - focus_range: 1133..1151, - name: "impl", - kind: Impl, - }, - kind: DocTest { - test_id: Path( - "StructWithRunnable", - ), - }, - cfg: None, - }, + "(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 15..74, name: \"should_have_runnable\" })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 76..148, name: \"should_have_runnable_1\" })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 150..254, name: \"should_have_runnable_2\" })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 256..320, name: \"should_have_no_runnable_3\" })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 322..398, name: \"should_have_no_runnable_4\" })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 900..965, name: \"StructWithRunnable\" })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 967..1024, focus_range: 1003..1021, name: \"impl\", kind: Impl })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 1088..1154, focus_range: 1133..1151, name: \"impl\", kind: Impl })", ] "#]], ); @@ -1005,39 +754,10 @@ impl Data { fn foo() {} } "#, - &[Bin, DocTest], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 1..13, - focus_range: 4..8, - name: "main", - kind: Function, - }, - kind: Bin, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 44..98, - name: "foo", - }, - kind: DocTest { - test_id: Path( - "Data::foo", - ), - }, - cfg: None, - }, + "(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 44..98, name: \"foo\" })", ] "#]], ); @@ -1059,39 +779,10 @@ impl Data<'a> { fn foo() {} } "#, - &[Bin, DocTest], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 1..13, - focus_range: 4..8, - name: "main", - kind: Function, - }, - kind: Bin, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 52..106, - name: "foo", - }, - kind: DocTest { - test_id: Path( - "Data<'a>::foo", - ), - }, - cfg: None, - }, + "(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 52..106, name: \"foo\" })", ] "#]], ); @@ -1113,39 +804,10 @@ impl Data<'a, T, U> { fn foo() {} } "#, - &[Bin, DocTest], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 1..13, - focus_range: 4..8, - name: "main", - kind: Function, - }, - kind: Bin, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 70..124, - name: "foo", - }, - kind: DocTest { - test_id: Path( - "Data<'a,T,U>::foo", - ), - }, - cfg: None, - }, + "(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 70..124, name: \"foo\" })", ] "#]], ); @@ -1167,39 +829,10 @@ impl Data { fn foo() {} } "#, - &[Bin, DocTest], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 1..13, - focus_range: 4..8, - name: "main", - kind: Function, - }, - kind: Bin, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 79..133, - name: "foo", - }, - kind: DocTest { - test_id: Path( - "Data::foo", - ), - }, - cfg: None, - }, + "(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 79..133, name: \"foo\" })", ] "#]], ); @@ -1221,39 +854,10 @@ impl<'a, T, const N: usize> Data<'a, T, N> { fn foo() {} } "#, - &[Bin, DocTest], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 1..13, - focus_range: 4..8, - name: "main", - kind: Function, - }, - kind: Bin, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 100..154, - name: "foo", - }, - kind: DocTest { - test_id: Path( - "Data<'a,T,N>::foo", - ), - }, - cfg: None, - }, + "(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 100..154, name: \"foo\" })", ] "#]], ); @@ -1269,47 +873,10 @@ mod test_mod { fn test_foo1() {} } "#, - &[TestMod, Test], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 1..51, - focus_range: 5..13, - name: "test_mod", - kind: Module, - description: "mod test_mod", - }, - kind: TestMod { - path: "test_mod", - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 20..49, - focus_range: 35..44, - name: "test_foo1", - kind: Function, - }, - kind: Test { - test_id: Path( - "test_mod::test_foo1", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, - }, + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 1..51, focus_range: 5..13, name: \"test_mod\", kind: Module, description: \"mod test_mod\" })", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 20..49, focus_range: 35..44, name: \"test_foo1\", kind: Function })", ] "#]], ); @@ -1342,123 +909,14 @@ mod root_tests { mod nested_tests_4 {} } "#, - &[TestMod, TestMod, Test, Test, TestMod, Test], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 22..323, - focus_range: 26..40, - name: "nested_tests_0", - kind: Module, - description: "mod nested_tests_0", - }, - kind: TestMod { - path: "root_tests::nested_tests_0", - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 51..192, - focus_range: 55..69, - name: "nested_tests_1", - kind: Module, - description: "mod nested_tests_1", - }, - kind: TestMod { - path: "root_tests::nested_tests_0::nested_tests_1", - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 84..126, - focus_range: 107..121, - name: "nested_test_11", - kind: Function, - }, - kind: Test { - test_id: Path( - "root_tests::nested_tests_0::nested_tests_1::nested_test_11", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 140..182, - focus_range: 163..177, - name: "nested_test_12", - kind: Function, - }, - kind: Test { - test_id: Path( - "root_tests::nested_tests_0::nested_tests_1::nested_test_12", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 202..286, - focus_range: 206..220, - name: "nested_tests_2", - kind: Module, - description: "mod nested_tests_2", - }, - kind: TestMod { - path: "root_tests::nested_tests_0::nested_tests_2", - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 235..276, - focus_range: 258..271, - name: "nested_test_2", - kind: Function, - }, - kind: Test { - test_id: Path( - "root_tests::nested_tests_0::nested_tests_2::nested_test_2", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, - }, + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 22..323, focus_range: 26..40, name: \"nested_tests_0\", kind: Module, description: \"mod nested_tests_0\" })", + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 51..192, focus_range: 55..69, name: \"nested_tests_1\", kind: Module, description: \"mod nested_tests_1\" })", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 84..126, focus_range: 107..121, name: \"nested_test_11\", kind: Function })", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 140..182, focus_range: 163..177, name: \"nested_test_12\", kind: Function })", + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 202..286, focus_range: 206..220, name: \"nested_tests_2\", kind: Module, description: \"mod nested_tests_2\" })", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 235..276, focus_range: 258..271, name: \"nested_test_2\", kind: Function })", ] "#]], ); @@ -1474,52 +932,10 @@ $0 #[cfg(feature = "foo")] fn test_foo1() {} "#, - &[TestMod, Test], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 0..51, - name: "", - kind: Module, - }, - kind: TestMod { - path: "", - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 1..50, - focus_range: 36..45, - name: "test_foo1", - kind: Function, - }, - kind: Test { - test_id: Path( - "test_foo1", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: Some( - Atom( - KeyValue { - key: "feature", - value: "foo", - }, - ), - ), - }, + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..51, name: \"\", kind: Module })", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 1..50, focus_range: 36..45, name: \"test_foo1\", kind: Function }, Atom(KeyValue { key: \"feature\", value: \"foo\" }))", ] "#]], ); @@ -1535,62 +951,10 @@ $0 #[cfg(all(feature = "foo", feature = "bar"))] fn test_foo1() {} "#, - &[TestMod, Test], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 0..73, - name: "", - kind: Module, - }, - kind: TestMod { - path: "", - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 1..72, - focus_range: 58..67, - name: "test_foo1", - kind: Function, - }, - kind: Test { - test_id: Path( - "test_foo1", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: Some( - All( - [ - Atom( - KeyValue { - key: "feature", - value: "foo", - }, - ), - Atom( - KeyValue { - key: "feature", - value: "bar", - }, - ), - ], - ), - ), - }, + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..73, name: \"\", kind: Module })", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 1..72, focus_range: 58..67, name: \"test_foo1\", kind: Function }, All([Atom(KeyValue { key: \"feature\", value: \"foo\" }), Atom(KeyValue { key: \"feature\", value: \"bar\" })]))", ] "#]], ); @@ -1606,7 +970,6 @@ mod test_mod { fn foo1() {} } "#, - &[], expect![[r#" [] "#]], @@ -1628,25 +991,9 @@ impl Foo { fn foo() {} } "#, - &[DocTest], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 27..81, - name: "foo", - }, - kind: DocTest { - test_id: Path( - "foo::Foo::foo", - ), - }, - cfg: None, - }, + "(DocTest, NavigationTarget { file_id: FileId(1), full_range: 27..81, name: \"foo\" })", ] "#]], ); @@ -1683,110 +1030,14 @@ mod tests { gen2!(); gen_main!(); "#, - &[TestMod, TestMod, Test, Test, TestMod, Bin], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 0..315, - name: "", - kind: Module, - }, - kind: TestMod { - path: "", - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 267..292, - focus_range: 271..276, - name: "tests", - kind: Module, - description: "mod tests", - }, - kind: TestMod { - path: "tests", - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 283..290, - name: "foo_test", - kind: Function, - }, - kind: Test { - test_id: Path( - "tests::foo_test", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, - }, - Runnable { - use_name_in_title: true, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 293..301, - name: "foo_test2", - kind: Function, - }, - kind: Test { - test_id: Path( - "tests2::foo_test2", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, - }, - Runnable { - use_name_in_title: true, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 293..301, - name: "tests2", - kind: Module, - description: "mod tests2", - }, - kind: TestMod { - path: "tests2", - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 302..314, - name: "main", - kind: Function, - }, - kind: Bin, - cfg: None, - }, + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..315, name: \"\", kind: Module })", + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 267..292, focus_range: 271..276, name: \"tests\", kind: Module, description: \"mod tests\" })", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 283..290, name: \"foo_test\", kind: Function })", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 293..301, name: \"foo_test2\", kind: Function }, true)", + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 293..301, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)", + "(Bin, NavigationTarget { file_id: FileId(0), full_range: 302..314, name: \"main\", kind: Function })", ] "#]], ); @@ -1812,85 +1063,12 @@ macro_rules! foo { } foo!(); "#, - &[Test, Test, Test, TestMod], expect![[r#" [ - Runnable { - use_name_in_title: true, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 210..217, - name: "foo0", - kind: Function, - }, - kind: Test { - test_id: Path( - "foo_tests::foo0", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, - }, - Runnable { - use_name_in_title: true, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 210..217, - name: "foo1", - kind: Function, - }, - kind: Test { - test_id: Path( - "foo_tests::foo1", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, - }, - Runnable { - use_name_in_title: true, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 210..217, - name: "foo2", - kind: Function, - }, - kind: Test { - test_id: Path( - "foo_tests::foo2", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, - }, - Runnable { - use_name_in_title: true, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 210..217, - name: "foo_tests", - kind: Module, - description: "mod foo_tests", - }, - kind: TestMod { - path: "foo_tests", - }, - cfg: None, - }, + "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo0\", kind: Function }, true)", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo1\", kind: Function }, true)", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo2\", kind: Function }, true)", + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)", ] "#]], ); @@ -1909,7 +1087,6 @@ mod tests { fn t() {} } "#, - &[], expect![[r#" [] "#]], @@ -1929,26 +1106,9 @@ fn t0() {} #[test] fn t1() {} "#, - &[TestMod], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 1..7, - focus_range: 5..6, - name: "m", - kind: Module, - description: "mod m", - }, - kind: TestMod { - path: "m", - }, - cfg: None, - }, + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 1..7, focus_range: 5..6, name: \"m\", kind: Module, description: \"mod m\" })", ] "#]], ); @@ -1967,66 +1127,11 @@ fn t0() {} #[test] fn t1() {} "#, - &[TestMod, Test, Test], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 0..39, - name: "m", - kind: Module, - }, - kind: TestMod { - path: "m", - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 1..19, - focus_range: 12..14, - name: "t0", - kind: Function, - }, - kind: Test { - test_id: Path( - "m::t0", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 1, - ), - full_range: 20..38, - focus_range: 31..33, - name: "t1", - kind: Function, - }, - kind: Test { - test_id: Path( - "m::t1", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, - }, + "(TestMod, NavigationTarget { file_id: FileId(1), full_range: 0..39, name: \"m\", kind: Module })", + "(Test, NavigationTarget { file_id: FileId(1), full_range: 1..19, focus_range: 12..14, name: \"t0\", kind: Function })", + "(Test, NavigationTarget { file_id: FileId(1), full_range: 20..38, focus_range: 31..33, name: \"t1\", kind: Function })", ] "#]], ); @@ -2047,68 +1152,11 @@ mod module { fn t1() {} } "#, - &[TestMod, Test, Test], expect![[r#" [ - Runnable { - use_name_in_title: true, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 26..94, - focus_range: 30..36, - name: "module", - kind: Module, - description: "mod module", - }, - kind: TestMod { - path: "module", - }, - cfg: None, - }, - Runnable { - use_name_in_title: true, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 43..65, - focus_range: 58..60, - name: "t0", - kind: Function, - }, - kind: Test { - test_id: Path( - "module::t0", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, - }, - Runnable { - use_name_in_title: true, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 70..92, - focus_range: 85..87, - name: "t1", - kind: Function, - }, - kind: Test { - test_id: Path( - "module::t1", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, - }, + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 26..94, focus_range: 30..36, name: \"module\", kind: Module, description: \"mod module\" }, true)", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 43..65, focus_range: 58..60, name: \"t0\", kind: Function }, true)", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 70..92, focus_range: 85..87, name: \"t1\", kind: Function }, true)", ] "#]], ); @@ -2143,26 +1191,14 @@ mod tests { "#, expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 31..85, - focus_range: 46..54, - name: "foo_test", - kind: Function, - }, - kind: Test { - test_id: Path( - "tests::foo_test", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, + NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 31..85, + focus_range: 46..54, + name: "foo_test", + kind: Function, }, ] "#]], @@ -2188,26 +1224,14 @@ mod tests { "#, expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 71..122, - focus_range: 86..94, - name: "foo_test", - kind: Function, - }, - kind: Test { - test_id: Path( - "tests::foo_test", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, + NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 71..122, + focus_range: 86..94, + name: "foo_test", + kind: Function, }, ] "#]], @@ -2240,26 +1264,14 @@ mod tests { "#, expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 133..183, - focus_range: 148..156, - name: "foo_test", - kind: Function, - }, - kind: Test { - test_id: Path( - "tests::foo_test", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, + NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 133..183, + focus_range: 148..156, + name: "foo_test", + kind: Function, }, ] "#]], @@ -2292,47 +1304,23 @@ mod tests { "#, expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 121..185, - focus_range: 136..145, - name: "foo2_test", - kind: Function, - }, - kind: Test { - test_id: Path( - "tests::foo2_test", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, + NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 121..185, + focus_range: 136..145, + name: "foo2_test", + kind: Function, }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 52..115, - focus_range: 67..75, - name: "foo_test", - kind: Function, - }, - kind: Test { - test_id: Path( - "tests::foo_test", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, + NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 52..115, + focus_range: 67..75, + name: "foo_test", + kind: Function, }, ] "#]], @@ -2354,39 +1342,10 @@ impl Data<'a, A, 12, C, D> { fn foo() {} } "#, - &[Bin, DocTest], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 1..13, - focus_range: 4..8, - name: "main", - kind: Function, - }, - kind: Bin, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 121..156, - name: "foo", - }, - kind: DocTest { - test_id: Path( - "Data<'a,A,12,C,D>::foo", - ), - }, - cfg: None, - }, + "(Bin, NavigationTarget { file_id: FileId(0), full_range: 1..13, focus_range: 4..8, name: \"main\", kind: Function })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 121..156, name: \"foo\" })", ] "#]], ); @@ -2416,77 +1375,12 @@ impl Foo, ()> { fn t() {} } "#, - &[DocTest, DocTest, DocTest, DocTest], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 20..103, - focus_range: 47..56, - name: "impl", - kind: Impl, - }, - kind: DocTest { - test_id: Path( - "Foo", - ), - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 63..101, - name: "t", - }, - kind: DocTest { - test_id: Path( - "Foo::t", - ), - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 105..188, - focus_range: 126..146, - name: "impl", - kind: Impl, - }, - kind: DocTest { - test_id: Path( - "Foo,()>", - ), - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 153..186, - name: "t", - }, - kind: DocTest { - test_id: Path( - "Foo,()>::t", - ), - }, - cfg: None, - }, + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 20..103, focus_range: 47..56, name: \"impl\", kind: Impl })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 63..101, name: \"t\" })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 105..188, focus_range: 126..146, name: \"impl\", kind: Impl })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 153..186, name: \"t\" })", ] "#]], ); @@ -2512,7 +1406,6 @@ macro_rules! foo { }; } "#, - &[], expect![[r#" [] "#]], @@ -2532,25 +1425,9 @@ macro_rules! foo { }; } "#, - &[DocTest], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 1..94, - name: "foo", - }, - kind: DocTest { - test_id: Path( - "foo", - ), - }, - cfg: None, - }, + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 1..94, name: \"foo\" })", ] "#]], ); @@ -2596,149 +1473,16 @@ mod r#mod { impl r#trait for r#struct {} } "#, - &[TestMod, Test, DocTest, DocTest, DocTest, DocTest, DocTest, DocTest], expect![[r#" [ - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 1..461, - focus_range: 5..10, - name: "r#mod", - kind: Module, - description: "mod r#mod", - }, - kind: TestMod { - path: "r#mod", - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 17..41, - focus_range: 32..36, - name: "r#fn", - kind: Function, - }, - kind: Test { - test_id: Path( - "r#mod::r#fn", - ), - attr: TestAttr { - ignore: false, - }, - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 47..84, - name: "r#for", - container_name: "r#mod", - }, - kind: DocTest { - test_id: Path( - "r#mod::r#for", - ), - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 90..146, - name: "r#struct", - container_name: "r#mod", - }, - kind: DocTest { - test_id: Path( - "r#mod::r#struct", - ), - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 152..266, - focus_range: 189..205, - name: "impl", - kind: Impl, - }, - kind: DocTest { - test_id: Path( - "r#struct", - ), - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 216..260, - name: "r#fn", - }, - kind: DocTest { - test_id: Path( - "r#mod::r#struct::r#fn", - ), - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 323..367, - name: "r#fn", - }, - kind: DocTest { - test_id: Path( - "r#mod::r#struct::r#fn", - ), - }, - cfg: None, - }, - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 401..459, - focus_range: 445..456, - name: "impl", - kind: Impl, - }, - kind: DocTest { - test_id: Path( - "r#struct", - ), - }, - cfg: None, - }, + "(TestMod, NavigationTarget { file_id: FileId(0), full_range: 1..461, focus_range: 5..10, name: \"r#mod\", kind: Module, description: \"mod r#mod\" })", + "(Test, NavigationTarget { file_id: FileId(0), full_range: 17..41, focus_range: 32..36, name: \"r#fn\", kind: Function })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 47..84, name: \"r#for\", container_name: \"r#mod\" })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 90..146, name: \"r#struct\", container_name: \"r#mod\" })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 152..266, focus_range: 189..205, name: \"impl\", kind: Impl })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 216..260, name: \"r#fn\" })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 323..367, name: \"r#fn\" })", + "(DocTest, NavigationTarget { file_id: FileId(0), full_range: 401..459, focus_range: 445..456, name: \"impl\", kind: Impl })", ] "#]], ) diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index 3724dc2822..5b7094e6bc 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -17,7 +17,7 @@ use crate::navigation_target::UpmappingResult; use crate::{ hover::hover_for_definition, inlay_hints::AdjustmentHintsMode, - moniker::{def_to_moniker, MonikerResult}, + moniker::{def_to_kind, def_to_moniker, MonikerResult, SymbolInformationKind}, parent_module::crates_for, Analysis, Fold, HoverConfig, HoverResult, InlayHint, InlayHintsConfig, TryToNav, }; @@ -46,6 +46,10 @@ pub struct TokenStaticData { pub definition: Option, pub references: Vec, pub moniker: Option, + pub display_name: Option, + pub enclosing_moniker: Option, + pub signature: Option, + pub kind: SymbolInformationKind, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -129,6 +133,7 @@ impl StaticIndex<'_> { closure_capture_hints: false, closing_brace_hints_min_lines: Some(25), fields_to_resolve: InlayFieldsToResolve::empty(), + range_exclusive_hints: false, }, file_id, None, @@ -172,6 +177,12 @@ impl StaticIndex<'_> { }), references: vec![], moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)), + display_name: def.name(self.db).map(|name| name.display(self.db).to_string()), + enclosing_moniker: current_crate + .zip(def.enclosing_definition(self.db)) + .and_then(|(cc, enclosing_def)| def_to_moniker(self.db, enclosing_def, cc)), + signature: def.label(self.db), + kind: def_to_kind(self.db, def), }); self.def_map.insert(def, it); it diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs index 0558f658fd..d686652bb3 100644 --- a/crates/ide/src/syntax_highlighting/highlight.rs +++ b/crates/ide/src/syntax_highlighting/highlight.rs @@ -1,5 +1,6 @@ //! Computes color for a single element. +use either::Either; use hir::{AsAssocItem, HasVisibility, MacroFileIdExt, Semantics}; use ide_db::{ defs::{Definition, IdentClass, NameClass, NameRefClass}, @@ -359,7 +360,9 @@ pub(super) fn highlight_def( let db = sema.db; let mut h = match def { Definition::Macro(m) => Highlight::new(HlTag::Symbol(m.kind(sema.db).into())), - Definition::Field(_) => Highlight::new(HlTag::Symbol(SymbolKind::Field)), + Definition::Field(_) | Definition::TupleField(_) => { + Highlight::new(HlTag::Symbol(SymbolKind::Field)) + } Definition::Module(module) => { let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Module)); if module.is_crate_root() { @@ -647,8 +650,11 @@ fn highlight_name_ref_by_syntax( let h = HlTag::Symbol(SymbolKind::Field); let is_union = ast::FieldExpr::cast(parent) .and_then(|field_expr| sema.resolve_field(&field_expr)) - .map_or(false, |field| { - matches!(field.parent_def(sema.db), hir::VariantDef::Union(_)) + .map_or(false, |field| match field { + Either::Left(field) => { + matches!(field.parent_def(sema.db), hir::VariantDef::Union(_)) + } + Either::Right(_) => false, }); if is_union { h | HlMod::Unsafe diff --git a/crates/ide/src/syntax_highlighting/inject.rs b/crates/ide/src/syntax_highlighting/inject.rs index 71f4d07245..6bf13ffd06 100644 --- a/crates/ide/src/syntax_highlighting/inject.rs +++ b/crates/ide/src/syntax_highlighting/inject.rs @@ -301,7 +301,7 @@ fn module_def_to_hl_tag(def: Definition) -> HlTag { Definition::TypeAlias(_) => SymbolKind::TypeAlias, Definition::BuiltinType(_) => return HlTag::BuiltinType, Definition::Macro(_) => SymbolKind::Macro, - Definition::Field(_) => SymbolKind::Field, + Definition::Field(_) | Definition::TupleField(_) => SymbolKind::Field, Definition::SelfType(_) => SymbolKind::Impl, Definition::Local(_) => SymbolKind::Local, Definition::GenericParam(gp) => match gp { diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html index e8b3a38c9e..4063cf9f75 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html @@ -96,7 +96,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd include!(concat!("foo/", "foo.rs")); fn main() { - format_args!("Hello, {}!", 92); + format_args!("Hello, {}!", (92,).0); dont_color_me_braces!(); noop!(noop!(1)); } diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index afb6c555b4..864c6d1cad 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -103,7 +103,7 @@ macro without_args { include!(concat!("foo/", "foo.rs")); fn main() { - format_args!("Hello, {}!", 92); + format_args!("Hello, {}!", (92,).0); dont_color_me_braces!(); noop!(noop!(1)); } diff --git a/crates/ide/src/view_memory_layout.rs b/crates/ide/src/view_memory_layout.rs index 3802978f49..53f998e545 100644 --- a/crates/ide/src/view_memory_layout.rs +++ b/crates/ide/src/view_memory_layout.rs @@ -69,7 +69,7 @@ impl FieldOrTupleIdx { .as_str() .map(|s| s.to_owned()) .unwrap_or_else(|| format!(".{}", f.name(db).as_tuple_index().unwrap())), - FieldOrTupleIdx::TupleIdx(i) => format!(".{i}").to_owned(), + FieldOrTupleIdx::TupleIdx(i) => format!(".{i}"), } } } @@ -203,7 +203,7 @@ pub(crate) fn view_memory_layout( let mut nodes = vec![MemoryLayoutNode { item_name, - typename: typename.clone(), + typename, size: layout.size(), offset: 0, alignment: layout.align(), diff --git a/crates/proc-macro-srv/proc-macro-test/Cargo.toml b/crates/proc-macro-srv/proc-macro-test/Cargo.toml index 55be6bc23b..90545bb513 100644 --- a/crates/proc-macro-srv/proc-macro-test/Cargo.toml +++ b/crates/proc-macro-srv/proc-macro-test/Cargo.toml @@ -13,7 +13,7 @@ doctest = false cargo_metadata = "0.18.1" # local deps -toolchain = { path = "../../toolchain", version = "0.0.0" } +toolchain.workspace = true [features] sysroot-abi = [] diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs index d89c4598af..bc1fcd08e2 100644 --- a/crates/project-model/src/cargo_workspace.rs +++ b/crates/project-model/src/cargo_workspace.rs @@ -274,7 +274,7 @@ impl CargoWorkspace { other_options.append( &mut targets .into_iter() - .flat_map(|target| ["--filter-platform".to_owned().to_string(), target]) + .flat_map(|target| ["--filter-platform".to_owned(), target]) .collect(), ); } diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs index fe046dd146..d52e448d74 100644 --- a/crates/project-model/src/sysroot.rs +++ b/crates/project-model/src/sysroot.rs @@ -6,7 +6,7 @@ use std::{env, fs, iter, ops, path::PathBuf, process::Command}; -use anyhow::{format_err, Result}; +use anyhow::{format_err, Context, Result}; use base_db::CrateName; use la_arena::{Arena, Idx}; use paths::{AbsPath, AbsPathBuf}; @@ -119,12 +119,15 @@ impl Sysroot { get_rustc_src(&self.root) } - pub fn discover_rustc(&self) -> Result { + pub fn discover_rustc(&self) -> anyhow::Result { let rustc = self.root.join("bin/rustc"); tracing::debug!(?rustc, "checking for rustc binary at location"); match fs::metadata(&rustc) { Ok(_) => Ok(rustc), - Err(e) => Err(e), + Err(e) => Err(e).context(format!( + "failed to discover rustc in sysroot: {:?}", + AsRef::::as_ref(&self.root) + )), } } diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 4057493fa3..00cc7c30ee 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -1277,7 +1277,7 @@ fn add_target_crate_root( inject_cargo_env(pkg, &mut env); if let Ok(cname) = String::from_str(cargo_name) { // CARGO_CRATE_NAME is the name of the Cargo target with - converted to _, such as the name of the library, binary, example, integration test, or benchmark. - env.set("CARGO_CRATE_NAME", cname.replace("-", "_")); + env.set("CARGO_CRATE_NAME", cname.replace('-', "_")); } if let Some(envs) = build_data.map(|it| &it.envs) { diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 1908c73b3b..e69302dbac 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -792,6 +792,7 @@ impl flags::AnalysisStats { max_length: Some(25), closing_brace_hints_min_lines: Some(20), fields_to_resolve: InlayFieldsToResolve::empty(), + range_exclusive_hints: true, }, file_id, None, diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs index 95c8798d43..c86b2c0ba4 100644 --- a/crates/rust-analyzer/src/cli/scip.rs +++ b/crates/rust-analyzer/src/cli/scip.rs @@ -7,8 +7,8 @@ use std::{ }; use ide::{ - LineCol, MonikerDescriptorKind, StaticIndex, StaticIndexedFile, TextRange, TokenId, - TokenStaticData, + LineCol, MonikerDescriptorKind, MonikerResult, StaticIndex, StaticIndexedFile, + SymbolInformationKind, TextRange, TokenId, }; use ide_db::LineIndexDatabase; use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; @@ -78,6 +78,7 @@ impl flags::Scip { let mut symbols_emitted: HashSet = HashSet::default(); let mut tokens_to_symbol: HashMap = HashMap::new(); + let mut tokens_to_enclosing_symbol: HashMap> = HashMap::new(); for StaticIndexedFile { file_id, tokens, .. } in si.files { let mut local_count = 0; @@ -109,10 +110,24 @@ impl flags::Scip { let symbol = tokens_to_symbol .entry(id) .or_insert_with(|| { - let symbol = token_to_symbol(token).unwrap_or_else(&mut new_local_symbol); + let symbol = token + .moniker + .as_ref() + .map(moniker_to_symbol) + .unwrap_or_else(&mut new_local_symbol); scip::symbol::format_symbol(symbol) }) .clone(); + let enclosing_symbol = tokens_to_enclosing_symbol + .entry(id) + .or_insert_with(|| { + token + .enclosing_moniker + .as_ref() + .map(moniker_to_symbol) + .map(scip::symbol::format_symbol) + }) + .clone(); let mut symbol_roles = Default::default(); @@ -128,15 +143,22 @@ impl flags::Scip { .map(|hover| hover.markup.as_str()) .filter(|it| !it.is_empty()) .map(|it| vec![it.to_owned()]); + let signature_documentation = + token.signature.clone().map(|text| scip_types::Document { + relative_path: relative_path.clone(), + language: "rust".to_string(), + text, + ..Default::default() + }); let symbol_info = scip_types::SymbolInformation { symbol: symbol.clone(), documentation: documentation.unwrap_or_default(), relationships: Vec::new(), special_fields: Default::default(), - kind: Default::default(), - display_name: String::new(), - signature_documentation: Default::default(), - enclosing_symbol: String::new(), + kind: symbol_kind(token.kind).into(), + display_name: token.display_name.clone().unwrap_or_default(), + signature_documentation: signature_documentation.into(), + enclosing_symbol: enclosing_symbol.unwrap_or_default(), }; symbols.push(symbol_info) @@ -228,14 +250,36 @@ fn new_descriptor(name: &str, suffix: scip_types::descriptor::Suffix) -> scip_ty } } -/// Loosely based on `def_to_moniker` -/// -/// Only returns a Symbol when it's a non-local symbol. -/// So if the visibility isn't outside of a document, then it will return None -fn token_to_symbol(token: &TokenStaticData) -> Option { - use scip_types::descriptor::Suffix::*; +fn symbol_kind(kind: SymbolInformationKind) -> scip_types::symbol_information::Kind { + use scip_types::symbol_information::Kind as ScipKind; + match kind { + SymbolInformationKind::AssociatedType => ScipKind::AssociatedType, + SymbolInformationKind::Attribute => ScipKind::Attribute, + SymbolInformationKind::Constant => ScipKind::Constant, + SymbolInformationKind::Enum => ScipKind::Enum, + SymbolInformationKind::EnumMember => ScipKind::EnumMember, + SymbolInformationKind::Field => ScipKind::Field, + SymbolInformationKind::Function => ScipKind::Function, + SymbolInformationKind::Macro => ScipKind::Macro, + SymbolInformationKind::Method => ScipKind::Method, + SymbolInformationKind::Module => ScipKind::Module, + SymbolInformationKind::Parameter => ScipKind::Parameter, + SymbolInformationKind::SelfParameter => ScipKind::SelfParameter, + SymbolInformationKind::StaticMethod => ScipKind::StaticMethod, + SymbolInformationKind::StaticVariable => ScipKind::StaticVariable, + SymbolInformationKind::Struct => ScipKind::Struct, + SymbolInformationKind::Trait => ScipKind::Trait, + SymbolInformationKind::TraitMethod => ScipKind::TraitMethod, + SymbolInformationKind::Type => ScipKind::Type, + SymbolInformationKind::TypeAlias => ScipKind::TypeAlias, + SymbolInformationKind::TypeParameter => ScipKind::TypeParameter, + SymbolInformationKind::Union => ScipKind::Union, + SymbolInformationKind::Variable => ScipKind::Variable, + } +} - let moniker = token.moniker.as_ref()?; +fn moniker_to_symbol(moniker: &MonikerResult) -> scip_types::Symbol { + use scip_types::descriptor::Suffix::*; let package_name = moniker.package_information.name.clone(); let version = moniker.package_information.version.clone(); @@ -260,7 +304,7 @@ fn token_to_symbol(token: &TokenStaticData) -> Option { }) .collect(); - Some(scip_types::Symbol { + scip_types::Symbol { scheme: "rust-analyzer".into(), package: Some(scip_types::Package { manager: "cargo".to_string(), @@ -271,7 +315,7 @@ fn token_to_symbol(token: &TokenStaticData) -> Option { .into(), descriptors, special_fields: Default::default(), - }) + } } #[cfg(test)] @@ -309,7 +353,7 @@ mod test { for &(range, id) in &file.tokens { if range.contains(offset - TextSize::from(1)) { let token = si.tokens.get(id).unwrap(); - found_symbol = token_to_symbol(token); + found_symbol = token.moniker.as_ref().map(moniker_to_symbol); break; } } @@ -359,6 +403,21 @@ pub mod module { ); } + #[test] + fn symbol_for_trait_alias() { + check_symbol( + r#" +//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library +#![feature(trait_alias)] +pub mod module { + pub trait MyTrait {} + pub trait MyTraitAlias$0 = MyTrait; +} +"#, + "rust-analyzer cargo foo 0.1.0 module/MyTraitAlias#", + ); + } + #[test] fn symbol_for_trait_constant() { check_symbol( @@ -525,4 +584,15 @@ pub mod example_mod { "rust-analyzer cargo main . foo/Bar#", ); } + + #[test] + fn symbol_for_for_type_alias() { + check_symbol( + r#" + //- /lib.rs crate:main + pub type MyTypeAlias$0 = u8; + "#, + "rust-analyzer cargo main . MyTypeAlias#", + ); + } } diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 88fb370844..fe009f82a7 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -399,6 +399,8 @@ config_data! { /// Whether to show function parameter name inlay hints at the call /// site. inlayHints_parameterHints_enable: bool = "true", + /// Whether to show exclusive range inlay hints. + inlayHints_rangeExclusiveHints_enable: bool = "false", /// Whether to show inlay hints for compiler inserted reborrows. /// This setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#. inlayHints_reborrowHints_enable: ReborrowHintsDef = "\"never\"", @@ -1464,6 +1466,7 @@ impl Config { } else { None }, + range_exclusive_hints: self.data.inlayHints_rangeExclusiveHints_enable, fields_to_resolve: InlayFieldsToResolve { resolve_text_edits: client_capability_fields.contains("textEdits"), resolve_hint_tooltip: client_capability_fields.contains("tooltip"), diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index f1317ce2b4..13544558c5 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -458,7 +458,6 @@ pub(crate) fn handle_workspace_symbol( let config = snap.config.workspace_symbol(); let (all_symbols, libs) = decide_search_scope_and_kind(¶ms, &config); - let limit = config.search_limit; let query = { let query: String = params.query.chars().filter(|&c| c != '#' && c != '*').collect(); @@ -469,14 +468,11 @@ pub(crate) fn handle_workspace_symbol( if libs { q.libs(); } - q.limit(limit); q }; - let mut res = exec_query(&snap, query)?; + let mut res = exec_query(&snap, query, config.search_limit)?; if res.is_empty() && !all_symbols { - let mut query = Query::new(params.query); - query.limit(limit); - res = exec_query(&snap, query)?; + res = exec_query(&snap, Query::new(params.query), config.search_limit)?; } return Ok(Some(lsp_types::WorkspaceSymbolResponse::Nested(res))); @@ -519,9 +515,10 @@ pub(crate) fn handle_workspace_symbol( fn exec_query( snap: &GlobalStateSnapshot, query: Query, + limit: usize, ) -> anyhow::Result> { let mut res = Vec::new(); - for nav in snap.analysis.symbol_search(query)? { + for nav in snap.analysis.symbol_search(query, limit)? { let container_name = nav.container_name.as_ref().map(|v| v.to_string()); let info = lsp_types::WorkspaceSymbol { diff --git a/crates/span/Cargo.toml b/crates/span/Cargo.toml index 69b88b5a17..a4abba29bb 100644 --- a/crates/span/Cargo.toml +++ b/crates/span/Cargo.toml @@ -1,6 +1,8 @@ [package] name = "span" version = "0.0.0" +description = "TBD" + rust-version.workspace = true edition.workspace = true license.workspace = true @@ -18,4 +20,4 @@ syntax.workspace = true stdx.workspace = true [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs index 71e269f74b..43909fff02 100644 --- a/crates/stdx/src/lib.rs +++ b/crates/stdx/src/lib.rs @@ -59,6 +59,17 @@ impl TupleExt for (T, U) { } } +impl TupleExt for (T, U, V) { + type Head = T; + type Tail = V; + fn head(self) -> Self::Head { + self.0 + } + fn tail(self) -> Self::Tail { + self.2 + } +} + pub fn to_lower_snake_case(s: &str) -> String { to_snake_case(s, char::to_lowercase) } diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs index 1e691befff..cc90d2dd1d 100644 --- a/crates/syntax/src/ast.rs +++ b/crates/syntax/src/ast.rs @@ -136,6 +136,16 @@ where { } +/// Trait to describe operations common to both `RangeExpr` and `RangePat`. +pub trait RangeItem { + type Bound; + + fn start(&self) -> Option; + fn end(&self) -> Option; + fn op_kind(&self) -> Option; + fn op_token(&self) -> Option; +} + mod support { use super::{AstChildren, AstNode, SyntaxKind, SyntaxNode, SyntaxToken}; diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs index 4c2878f49f..aa56f10d60 100644 --- a/crates/syntax/src/ast/edit_in_place.rs +++ b/crates/syntax/src/ast/edit_in_place.rs @@ -293,13 +293,12 @@ impl ast::GenericParamList { } /// Removes the corresponding generic arg - pub fn remove_generic_arg(&self, generic_arg: &ast::GenericArg) -> Option { + pub fn remove_generic_arg(&self, generic_arg: &ast::GenericArg) { let param_to_remove = self.find_generic_arg(generic_arg); if let Some(param) = ¶m_to_remove { self.remove_generic_param(param.clone()); } - param_to_remove } /// Constructs a matching [`ast::GenericArgList`] diff --git a/crates/syntax/src/ast/expr_ext.rs b/crates/syntax/src/ast/expr_ext.rs index 36980b146e..18a56e2823 100644 --- a/crates/syntax/src/ast/expr_ext.rs +++ b/crates/syntax/src/ast/expr_ext.rs @@ -13,6 +13,8 @@ use crate::{ SyntaxNode, SyntaxToken, T, }; +use super::RangeItem; + impl ast::HasAttrs for ast::Expr {} impl ast::Expr { @@ -227,16 +229,12 @@ impl ast::RangeExpr { Some((ix, token, bin_op)) }) } +} - pub fn op_kind(&self) -> Option { - self.op_details().map(|t| t.2) - } +impl RangeItem for ast::RangeExpr { + type Bound = ast::Expr; - pub fn op_token(&self) -> Option { - self.op_details().map(|t| t.1) - } - - pub fn start(&self) -> Option { + fn start(&self) -> Option { let op_ix = self.op_details()?.0; self.syntax() .children_with_tokens() @@ -244,13 +242,21 @@ impl ast::RangeExpr { .find_map(|it| ast::Expr::cast(it.into_node()?)) } - pub fn end(&self) -> Option { + fn end(&self) -> Option { let op_ix = self.op_details()?.0; self.syntax() .children_with_tokens() .skip(op_ix + 1) .find_map(|it| ast::Expr::cast(it.into_node()?)) } + + fn op_token(&self) -> Option { + self.op_details().map(|t| t.1) + } + + fn op_kind(&self) -> Option { + self.op_details().map(|t| t.2) + } } impl ast::IndexExpr { diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 2abbfc81f6..b1dd1fe8c8 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -263,9 +263,6 @@ pub fn impl_( ast_from_text(&format!("impl{gen_params} {path_type}{tr_gen_args}{where_clause}{{{}}}", body)) } -// FIXME : We must make *_gen_args' type ast::GenericArgList but in order to do so we must implement in `edit_in_place.rs` -// `add_generic_arg()` just like `add_generic_param()` -// is implemented for `ast::GenericParamList` pub fn impl_trait( is_unsafe: bool, trait_gen_params: Option, diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index a7e4899fb7..8618018c0b 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs @@ -14,6 +14,8 @@ use crate::{ ted, NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, TokenText, T, }; +use super::{RangeItem, RangeOp}; + impl ast::Lifetime { pub fn text(&self) -> TokenText<'_> { text_of_first_token(self.syntax()) @@ -875,8 +877,10 @@ impl ast::Module { } } -impl ast::RangePat { - pub fn start(&self) -> Option { +impl RangeItem for ast::RangePat { + type Bound = ast::Pat; + + fn start(&self) -> Option { self.syntax() .children_with_tokens() .take_while(|it| !(it.kind() == T![..] || it.kind() == T![..=])) @@ -884,13 +888,37 @@ impl ast::RangePat { .find_map(ast::Pat::cast) } - pub fn end(&self) -> Option { + fn end(&self) -> Option { self.syntax() .children_with_tokens() .skip_while(|it| !(it.kind() == T![..] || it.kind() == T![..=])) .filter_map(|it| it.into_node()) .find_map(ast::Pat::cast) } + + fn op_token(&self) -> Option { + self.syntax().children_with_tokens().find_map(|it| { + let token = it.into_token()?; + + match token.kind() { + T![..] => Some(token), + T![..=] => Some(token), + _ => None, + } + }) + } + + fn op_kind(&self) -> Option { + self.syntax().children_with_tokens().find_map(|it| { + let token = it.into_token()?; + + match token.kind() { + T![..] => Some(RangeOp::Exclusive), + T![..=] => Some(RangeOp::Inclusive), + _ => None, + } + }) + } } impl ast::TokenTree { diff --git a/crates/syntax/src/ast/prec.rs b/crates/syntax/src/ast/prec.rs index 8e04ab8bed..9ddf5a0a98 100644 --- a/crates/syntax/src/ast/prec.rs +++ b/crates/syntax/src/ast/prec.rs @@ -1,7 +1,7 @@ //! Precedence representation. use crate::{ - ast::{self, BinaryOp, Expr, HasArgList}, + ast::{self, BinaryOp, Expr, HasArgList, RangeItem}, match_ast, AstNode, SyntaxNode, }; diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs index 2b1bbac08e..eabbda2c39 100644 --- a/crates/syntax/src/validation.rs +++ b/crates/syntax/src/validation.rs @@ -9,7 +9,7 @@ use rustc_dependencies::lexer::unescape::{self, unescape_literal, Mode}; use crate::{ algo, - ast::{self, HasAttrs, HasVisibility, IsString}, + ast::{self, HasAttrs, HasVisibility, IsString, RangeItem}, match_ast, AstNode, SyntaxError, SyntaxKind::{CONST, FN, INT_NUMBER, TYPE_ALIAS}, SyntaxNode, SyntaxToken, TextSize, T, diff --git a/docs/dev/guide.md b/docs/dev/guide.md index a5f1811bf2..bb77aa0eaa 100644 --- a/docs/dev/guide.md +++ b/docs/dev/guide.md @@ -2,13 +2,15 @@ ## About the guide -This guide describes the current state of rust-analyzer as of 2019-01-20 (git -tag [guide-2019-01]). Its purpose is to document various problems and +This guide describes the current state of rust-analyzer as of the 2024-01-01 release +(git tag [2024-01-01]). Its purpose is to document various problems and architectural solutions related to the problem of building IDE-first compiler -for Rust. There is a video version of this guide as well: +for Rust. There is a video version of this guide as well - +however, it's based on an older 2019-01-20 release (git tag [guide-2019-01]): https://youtu.be/ANKBNiSWyfc. [guide-2019-01]: https://github.com/rust-lang/rust-analyzer/tree/guide-2019-01 +[2024-01-01]: https://github.com/rust-lang/rust-analyzer/tree/2024-01-01 ## The big picture @@ -40,8 +42,8 @@ terms of files and offsets, and **not** in terms of Rust concepts like structs, traits, etc. The "typed" API with Rust specific types is slightly lower in the stack, we'll talk about it later. -[`AnalysisHost`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/lib.rs#L265-L284 -[`Analysis`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/lib.rs#L291-L478 +[`AnalysisHost`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/ide/src/lib.rs#L161-L213 +[`Analysis`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/ide/src/lib.rs#L220-L761 The reason for this separation of `Analysis` and `AnalysisHost` is that we want to apply changes "uniquely", but we might also want to fork an `Analysis` and send it to @@ -65,18 +67,23 @@ Next, let's talk about what the inputs to the `Analysis` are, precisely. rust-analyzer never does any I/O itself, all inputs get passed explicitly via the `AnalysisHost::apply_change` method, which accepts a single argument, a -`Change`. [`Change`] is a builder for a single change -"transaction", so it suffices to study its methods to understand all of the -input data. +`Change`. [`Change`] is a wrapper for `FileChange` that adds proc-macro knowledge. +[`FileChange`] is a builder for a single change "transaction", so it suffices +to study its methods to understand all the input data. -[`Change`]: https://github.com/rust-lang/rust-analyzer/blob/master/crates/base_db/src/change.rs#L14-L89 +[`Change`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-expand/src/change.rs#L10-L42 +[`FileChange`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/base-db/src/change.rs#L14-L78 -The `(add|change|remove)_file` methods control the set of the input files, where -each file has an integer id (`FileId`, picked by the client), text (`String`) -and a filesystem path. Paths are tricky; they'll be explained below, in source roots -section, together with the `add_root` method. The `add_library` method allows us to add a -group of files which are assumed to rarely change. It's mostly an optimization -and does not change the fundamental picture. +The `change_file` method controls the set of the input files, where each file +has an integer id (`FileId`, picked by the client) and text (`Option>`). +Paths are tricky; they'll be explained below, in source roots section, +together with the `set_roots` method. The "source root" [`is_library`] flag +along with the concept of [`durability`] allows us to add a group of files which +are assumed to rarely change. It's mostly an optimization and does not change +the fundamental picture. + +[`is_library`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/base-db/src/input.rs#L38 +[`durability`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/base-db/src/change.rs#L80-L86 The `set_crate_graph` method allows us to control how the input files are partitioned into compilation units -- crates. It also controls (in theory, not implemented @@ -118,7 +125,7 @@ can have `#[path="/dev/random"] mod foo;`. To solve (or explicitly refuse to solve) these problems rust-analyzer uses the concept of a "source root". Roughly speaking, source roots are the contents of a -directory on a file systems, like `/home/matklad/projects/rustraytracer/**.rs`. +directory on a file system, like `/home/matklad/projects/rustraytracer/**.rs`. More precisely, all files (`FileId`s) are partitioned into disjoint `SourceRoot`s. Each file has a relative UTF-8 path within the `SourceRoot`. @@ -134,49 +141,53 @@ the source root, even `/dev/random`. ## Language Server Protocol -Now let's see how the `Analysis` API is exposed via the JSON RPC based language server protocol. The -hard part here is managing changes (which can come either from the file system +Now let's see how the `Analysis` API is exposed via the JSON RPC based language server protocol. +The hard part here is managing changes (which can come either from the file system or from the editor) and concurrency (we want to spawn background jobs for things like syntax highlighting). We use the event loop pattern to manage the zoo, and -the loop is the [`main_loop_inner`] function. The [`main_loop`] does a one-time -initialization and tearing down of the resources. +the loop is the [`GlobalState::run`] function initiated by [`main_loop`] after +[`GlobalState::new`] does a one-time initialization and tearing down of the resources. -[`main_loop`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L51-L110 -[`main_loop_inner`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L156-L258 +[`main_loop`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/main_loop.rs#L31-L54 +[`GlobalState::new`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/global_state.rs#L148-L215 +[`GlobalState::run`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/main_loop.rs#L114-L140 Let's walk through a typical analyzer session! First, we need to figure out what to analyze. To do this, we run `cargo metadata` to learn about Cargo packages for current workspace and dependencies, -and we run `rustc --print sysroot` and scan the "sysroot" (the directory containing the current Rust toolchain's files) to learn about crates like -`std`. Currently we load this configuration once at the start of the server, but -it should be possible to dynamically reconfigure it later without restart. +and we run `rustc --print sysroot` and scan the "sysroot" +(the directory containing the current Rust toolchain's files) to learn about crates +like `std`. This happens in the [`GlobalState::fetch_workspaces`] method. +We load this configuration at the start of the server in [`GlobalState::new`], +but it's also triggered by workspace change events and requests to reload the +workspace from the client. -[main_loop.rs#L62-L70](https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L62-L70) +[`GlobalState::fetch_workspaces`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/reload.rs#L186-L257 The [`ProjectModel`] we get after this step is very Cargo and sysroot specific, -it needs to be lowered to get the input in the form of `Change`. This -happens in [`ServerWorldState::new`] method. Specifically +it needs to be lowered to get the input in the form of `Change`. This happens +in [`GlobalState::process_changes`] method. Specifically -* Create a `SourceRoot` for each Cargo package and sysroot. +* Create `SourceRoot`s for each Cargo package(s) and sysroot. * Schedule a filesystem scan of the roots. * Create an analyzer's `Crate` for each Cargo **target** and sysroot crate. * Setup dependencies between the crates. -[`ProjectModel`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/project_model.rs#L16-L20 -[`ServerWorldState::new`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/server_world.rs#L38-L160 +[`ProjectModel`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/project-model/src/workspace.rs#L57-L100 +[`GlobalState::process_changes`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/global_state.rs#L217-L356 The results of the scan (which may take a while) will be processed in the body of the main loop, just like any other change. Here's where we handle: -* [File system changes](https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L194) -* [Changes from the editor](https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L377) +* [File system changes](https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/main_loop.rs#L273) +* [Changes from the editor](https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/main_loop.rs#L801-L803) After a single loop's turn, we group the changes into one `Change` and [apply] it. This always happens on the main thread and blocks the loop. -[apply]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/server_world.rs#L216 +[apply]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/global_state.rs#L333 To handle requests, like ["goto definition"], we create an instance of the `Analysis` and [`schedule`] the task (which consumes `Analysis`) on the @@ -186,9 +197,9 @@ executing "goto definition" on the threadpool and a new change comes in, the task will be canceled as soon as the main loop calls `apply_change` on the `AnalysisHost`. -["goto definition"]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/server_world.rs#L216 -[`schedule`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L426-L455 -[The task]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop/handlers.rs#L205-L223 +["goto definition"]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/main_loop.rs#L767 +[`schedule`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/dispatch.rs#L138 +[The task]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/handlers/request.rs#L610-L623 This concludes the overview of the analyzer's programing *interface*. Next, let's dig into the implementation! @@ -247,16 +258,17 @@ confusing. This illustration by @killercup might help: ## Salsa Input Queries All analyzer information is stored in a salsa database. `Analysis` and -`AnalysisHost` types are newtype wrappers for [`RootDatabase`] -- a salsa -database. +`AnalysisHost` types are essentially newtype wrappers for [`RootDatabase`] +-- a salsa database. -[`RootDatabase`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/db.rs#L88-L134 +[`RootDatabase`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/ide-db/src/lib.rs#L69-L324 -Salsa input queries are defined in [`FilesDatabase`] (which is a part of -`RootDatabase`). They closely mirror the familiar `Change` structure: -indeed, what `apply_change` does is it sets the values of input queries. +Salsa input queries are defined in [`SourceDatabase`] and [`SourceDatabaseExt`] +(which are a part of `RootDatabase`). They closely mirror the familiar `Change` +structure: indeed, what `apply_change` does is it sets the values of input queries. -[`FilesDatabase`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/base_db/src/input.rs#L150-L174 +[`SourceDatabase`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/base-db/src/lib.rs#L58-L65 +[`SourceDatabaseExt`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/base-db/src/lib.rs#L76-L88 ## From text to semantic model @@ -270,18 +282,18 @@ functions: for example, the same source file might get included as a module in several crates or a single crate might be present in the compilation DAG several times, with different sets of `cfg`s enabled. The IDE-specific task of mapping source code into a semantic model is inherently imprecise for -this reason and gets handled by the [`source_binder`]. +this reason and gets handled by the [`source_analyzer`]. -[`source_binder`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/source_binder.rs +[`source_analyzer`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir/src/source_analyzer.rs -The semantic interface is declared in the [`code_model_api`] module. Each entity is +The semantic interface is declared in the [`semantics`] module. Each entity is identified by an integer ID and has a bunch of methods which take a salsa database as an argument and returns other entities (which are also IDs). Internally, these methods invoke various queries on the database to build the model on demand. Here's [the list of queries]. -[`code_model_api`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/code_model_api.rs -[the list of queries]: https://github.com/rust-lang/rust-analyzer/blob/7e84440e25e19529e4ff8a66e521d1b06349c6ec/crates/ra_hir/src/db.rs#L20-L106 +[`semantics`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir/src/semantics.rs +[the list of queries]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-ty/src/db.rs#L29-L275 The first step of building the model is parsing the source code. @@ -327,7 +339,7 @@ The implementation is based on the generic [rowan] crate on top of which a [libsyntax]: https://github.com/apple/swift/tree/5e2c815edfd758f9b1309ce07bfc01c4bc20ec23/lib/Syntax [rowan]: https://github.com/rust-analyzer/rowan/tree/100a36dc820eb393b74abe0d20ddf99077b61f88 -[rust-specific]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_syntax/src/ast/generated.rs +[rust-specific]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/syntax/src/ast/generated.rs The next step in constructing the semantic model is ... @@ -336,9 +348,9 @@ The next step in constructing the semantic model is ... The algorithm for building a tree of modules is to start with a crate root (remember, each `Crate` from a `CrateGraph` has a `FileId`), collect all `mod` declarations and recursively process child modules. This is handled by the -[`module_tree_query`], with two slight variations. +[`crate_def_map_query`], with two slight variations. -[`module_tree_query`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/module_tree.rs#L115-L133 +[`crate_def_map_query`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-def/src/nameres.rs#L307-L324 First, rust-analyzer builds a module tree for all crates in a source root simultaneously. The main reason for this is historical (`module_tree` predates @@ -347,21 +359,21 @@ part of any crate. That is, if you create a file but do not include it as a submodule anywhere, you still get semantic completion, and you get a warning about a free-floating module (the actual warning is not implemented yet). -The second difference is that `module_tree_query` does not *directly* depend on -the "parse" query (which is confusingly called `source_file`). Why would calling -the parse directly be bad? Suppose the user changes the file slightly, by adding -an insignificant whitespace. Adding whitespace changes the parse tree (because -it includes whitespace), and that means recomputing the whole module tree. +The second difference is that `crate_def_map_query` does not *directly* depend on +the `SourceDatabase::parse` query. Why would calling the parse directly be bad? +Suppose the user changes the file slightly, by adding an insignificant whitespace. +Adding whitespace changes the parse tree (because it includes whitespace), +and that means recomputing the whole module tree. -We deal with this problem by introducing an intermediate [`submodules_query`]. +We deal with this problem by introducing an intermediate [`block_def_map_query`]. This query processes the syntax tree and extracts a set of declared submodule -names. Now, changing the whitespace results in `submodules_query` being +names. Now, changing the whitespace results in `block_def_map_query` being re-executed for a *single* module, but because the result of this query stays -the same, we don't have to re-execute [`module_tree_query`]. In fact, we only +the same, we don't have to re-execute [`crate_def_map_query`]. In fact, we only need to re-execute it when we add/remove new files or when we change mod declarations. -[`submodules_query`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/module_tree.rs#L41 +[`block_def_map_query`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-def/src/nameres.rs#L326-L354 We store the resulting modules in a `Vec`-based indexed arena. The indices in the arena becomes module IDs. And this brings us to the next topic: @@ -383,20 +395,23 @@ change the location. However, such "ID" types ceases to be a `Copy`able integer general can become pretty large if we account for nesting (for example: "third parameter of the `foo` function of the `bar` `impl` in the `baz` module"). -[`LocationInterner`] allows us to combine the benefits of positional and numeric -IDs. It is a bidirectional append-only map between locations and consecutive -integers which can "intern" a location and return an integer ID back. The salsa -database we use includes a couple of [interners]. How to "garbage collect" -unused locations is an open question. +[`Intern` and `Lookup`] traits allows us to combine the benefits of positional and numeric +IDs. Implementing both traits effectively creates a bidirectional append-only map +between locations and integer IDs (typically newtype wrappers for [`salsa::InternId`]) +which can "intern" a location and return an integer ID back. The salsa database we use +includes a couple of [interners]. How to "garbage collect" unused locations +is an open question. -[`LocationInterner`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_db/src/loc2id.rs#L65-L71 -[interners]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/db.rs#L22-L23 +[`Intern` and `Lookup`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-expand/src/lib.rs#L96-L106 +[interners]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-expand/src/lib.rs#L108-L122 +[`salsa::InternId`]: https://docs.rs/salsa/0.16.1/salsa/struct.InternId.html -For example, we use `LocationInterner` to assign IDs to definitions of functions, -structs, enums, etc. The location, [`DefLoc`] contains two bits of information: +For example, we use `Intern` and `Lookup` implementations to assign IDs to +definitions of functions, structs, enums, etc. The location, [`ItemLoc`] contains +two bits of information: * the ID of the module which contains the definition, -* the ID of the specific item in the modules source code. +* the ID of the specific item in the module's source code. We "could" use a text offset for the location of a particular item, but that would play badly with salsa: offsets change after edits. So, as a rule of thumb, we avoid @@ -404,7 +419,7 @@ using offsets, text ranges or syntax trees as keys and values for queries. What we do instead is we store "index" of the item among all of the items of a file (so, a positional based ID, but localized to a single file). -[`DefLoc`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/ids.rs#L129-L139 +[`ItemLoc`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-def/src/lib.rs#L209-L212 One thing we've glossed over for the time being is support for macros. We have only proof of concept handling of macros at the moment, but they are extremely @@ -424,20 +439,21 @@ enum HirFileId { } ``` -`MacroCallId` is an interned ID that specifies a particular macro invocation. -Its `MacroCallLoc` contains: +`MacroCallId` is an interned ID that identifies a particular macro invocation. +Simplifying, it's a `HirFileId` of a file containing the call plus the offset +of the macro call in the file. -* `ModuleId` of the containing module -* `HirFileId` of the containing file or pseudo file -* an index of this particular macro invocation in this file (positional id - again). - -Note how `HirFileId` is defined in terms of `MacroCallLoc` which is defined in +Note how `HirFileId` is defined in terms of `MacroCallId` which is defined in terms of `HirFileId`! This does not recur infinitely though: any chain of `HirFileId`s bottoms out in `HirFileId::FileId`, that is, some source file actually written by the user. -[`HirFileId`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/ids.rs#L31-L93 +Note also that in the actual implementation, the two variants are encoded in +a single `u32`, which are differentiated by the MSB (most significant bit). +If the MSB is 0, the value represents a `FileId`, otherwise the remaining +31 bits represent a `MacroCallId`. + +[`HirFileId`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/span/src/lib.rs#L148-L160 Now that we understand how to identify a definition, in a source or in a macro-generated file, we can discuss name resolution a bit. @@ -451,13 +467,13 @@ each module into a position-independent representation which does not change if we modify bodies of the items. After that we [loop] resolving all imports until we've reached a fixed point. -[lower]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/nameres/lower.rs#L113-L147 -[loop]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/nameres.rs#L186-L196 +[lower]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-def/src/item_tree.rs#L110-L154 +[loop]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-def/src/nameres/collector.rs#L404-L437 And, given all our preparation with IDs and a position-independent representation, it is satisfying to [test] that typing inside function body does not invalidate name resolution results. -[test]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/nameres/tests.rs#L376 +[test]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-def/src/nameres/tests/incremental.rs#L31 An interesting fact about name resolution is that it "erases" all of the intermediate paths from the imports: in the end, we know which items are defined @@ -481,21 +497,18 @@ store the syntax node as a part of name resolution: this will break incrementality, due to the fact that syntax changes after every file modification. -We solve this problem during the lowering step of name resolution. The lowering -query actually produces a *pair* of outputs: `LoweredModule` and [`SourceMap`]. -The `LoweredModule` module contains [imports], but in a position-independent form. -The `SourceMap` contains a mapping from position-independent imports to -(position-dependent) syntax nodes. +We solve this problem during the lowering step of name resolution. Along with +the [`ItemTree`] output, the lowering query additionally produces an [`AstIdMap`] +via an [`ast_id_map`] query. The `ItemTree` contains [imports], but in a +position-independent form based on [`AstId`]. The `AstIdMap` contains a mapping +from position-independent `AstId`s to (position-dependent) syntax nodes. -The result of this basic lowering query changes after every modification. But -there's an intermediate [projection query] which returns only the first -position-independent part of the lowering. The result of this query is stable. -Naturally, name resolution [uses] this stable projection query. +[`ItemTree`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-def/src/item_tree.rs +[`AstIdMap`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-expand/src/ast_id_map.rs#L136-L142 +[`ast_id_map`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-def/src/item_tree/lower.rs#L32 +[imports]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-def/src/item_tree.rs#L559-L563 +[`AstId`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-expand/src/ast_id_map.rs#L29 -[imports]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/nameres/lower.rs#L52-L59 -[`SourceMap`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/nameres/lower.rs#L52-L59 -[projection query]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/nameres/lower.rs#L97-L103 -[uses]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/query_definitions.rs#L49 ## Type inference @@ -517,10 +530,10 @@ construct a mapping from `ExprId`s to types. [@flodiebold]: https://github.com/flodiebold [#327]: https://github.com/rust-lang/rust-analyzer/pull/327 -[lower the AST]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/expr.rs -[positional ID]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/expr.rs#L13-L15 -[a source map]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/expr.rs#L41-L44 -[type inference]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/ty.rs#L1208-L1223 +[lower the AST]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-def/src/body.rs +[positional ID]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-def/src/hir.rs#L37 +[a source map]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-def/src/body.rs#L84-L88 +[type inference]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/hir-ty/src/infer.rs#L76-L131 ## Tying it all together: completion @@ -537,36 +550,38 @@ types (by converting a file url into a numeric `FileId`), [ask analysis for completion] and serialize results into the LSP. The [completion implementation] is finally the place where we start doing the actual -work. The first step is to collect the `CompletionContext` -- a struct which +work. The first step is to collect the [`CompletionContext`] -- a struct which describes the cursor position in terms of Rust syntax and semantics. For -example, `function_syntax: Option<&'a ast::FnDef>` stores a reference to -the enclosing function *syntax*, while `function: Option` is the -`Def` for this function. +example, `expected_name: Option` is the syntactic representation +for the expected name of what we're completing (usually the parameter name of +a function argument), while `expected_type: Option` is the semantic model +for the expected type of what we're completing. To construct the context, we first do an ["IntelliJ Trick"]: we insert a dummy identifier at the cursor's position and parse this modified file, to get a reasonably looking syntax tree. Then we do a bunch of "classification" routines -to figure out the context. For example, we [find an ancestor `fn` node] and we get a -[semantic model] for it (using the lossy `source_binder` infrastructure). +to figure out the context. For example, we [find an parent `fn` node], get a +[semantic model] for it (using the lossy `source_analyzer` infrastructure) +and use it to determine the [expected type at the cursor position]. The second step is to run a [series of independent completion routines]. Let's take a closer look at [`complete_dot`], which completes fields and methods in -`foo.bar|`. First we extract a semantic function and a syntactic receiver -expression out of the `Context`. Then we run type-inference for this single -function and map our syntactic expression to `ExprId`. Using the ID, we figure -out the type of the receiver expression. Then we add all fields & methods from -the type to completion. +`foo.bar|`. First we extract a semantic receiver type out of the `DotAccess` +argument. Then, using the semantic model for the type, we determine if the +receiver implements the `Future` trait, and add a `.await` completion item in +the affirmative case. Finally, we add all fields & methods from the type to +completion. -[receiving a message]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L203 -[schedule it on the threadpool]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L428 -[catch]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L436-L442 -[the handler]: https://salsa.zulipchat.com/#narrow/stream/181542-rfcs.2Fsalsa-query-group/topic/design.20next.20steps -[ask analysis for completion]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/lib.rs#L439-L444 -[ask analysis for completion]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/lib.rs#L439-L444 -[completion implementation]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion.rs#L46-L62 -[`CompletionContext`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion/completion_context.rs#L14-L37 -["IntelliJ Trick"]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion/completion_context.rs#L72-L75 -[find an ancestor `fn` node]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion/completion_context.rs#L116-L120 -[semantic model]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion/completion_context.rs#L123 -[series of independent completion routines]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion.rs#L52-L59 -[`complete_dot`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion/complete_dot.rs#L6-L22 +[receiving a message]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/main_loop.rs#L213 +[schedule it on the threadpool]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/dispatch.rs#L197-L211 +[catch]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/dispatch.rs#L292 +[the handler]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/rust-analyzer/src/handlers/request.rs#L850-L876 +[ask analysis for completion]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/ide/src/lib.rs#L605-L615 +[completion implementation]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/ide-completion/src/lib.rs#L148-L229 +[`CompletionContext`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/ide-completion/src/context.rs#L407-L441 +["IntelliJ Trick"]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/ide-completion/src/context.rs#L644-L648 +[find an parent `fn` node]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/ide-completion/src/context/analysis.rs#L463 +[semantic model]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/ide-completion/src/context/analysis.rs#L466 +[expected type at the cursor position]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/ide-completion/src/context/analysis.rs#L467 +[series of independent completion routines]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/ide-completion/src/lib.rs#L157-L226 +[`complete_dot`]: https://github.com/rust-lang/rust-analyzer/blob/2024-01-01/crates/ide-completion/src/completions/dot.rs#L11-L41 diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index c3f249e0ce..ecc90abff1 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -596,6 +596,11 @@ Maximum length for inlay hints. Set to null to have an unlimited length. Whether to show function parameter name inlay hints at the call site. -- +[[rust-analyzer.inlayHints.rangeExclusiveHints.enable]]rust-analyzer.inlayHints.rangeExclusiveHints.enable (default: `false`):: ++ +-- +Whether to show exclusive range inlay hints. +-- [[rust-analyzer.inlayHints.reborrowHints.enable]]rust-analyzer.inlayHints.reborrowHints.enable (default: `"never"`):: + -- diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc index 9fc19a7d07..fa8413c19a 100644 --- a/docs/user/manual.adoc +++ b/docs/user/manual.adoc @@ -186,18 +186,13 @@ $ pacman -S rust-analyzer ==== Gentoo Linux -`rust-analyzer` is available in the GURU repository: +There are two ways to install `rust-analyzer` under Gentoo: -- https://gitweb.gentoo.org/repo/proj/guru.git/tree/dev-util/rust-analyzer?id=9895cea62602cfe599bd48e0fb02127411ca6e81[`dev-util/rust-analyzer`] builds from source -- https://gitweb.gentoo.org/repo/proj/guru.git/tree/dev-util/rust-analyzer-bin?id=9895cea62602cfe599bd48e0fb02127411ca6e81[`dev-util/rust-analyzer-bin`] installs an official binary release +- when installing `dev-lang/rust` or `dev-lang/rust-bin`, enable the `rust-analyzer` and `rust-src` USE flags +- use the `rust-analyzer` component in `rustup` (see instructions above) -If not already, GURU must be enabled (e.g. using `app-eselect/eselect-repository`) and sync'd before running `emerge`: - -[source,bash] ----- -$ eselect repository enable guru && emaint sync -r guru -$ emerge rust-analyzer-bin ----- +Note that in both cases, the version installed lags for a couple of months behind the official releases on GitHub. +To obtain a newer one, you can download a binary from GitHub Releases or building from source. ==== macOS diff --git a/editors/code/package.json b/editors/code/package.json index 27ed8ac502..8307f6833e 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -1308,6 +1308,11 @@ "default": true, "type": "boolean" }, + "rust-analyzer.inlayHints.rangeExclusiveHints.enable": { + "markdownDescription": "Whether to show exclusive range inlay hints.", + "default": false, + "type": "boolean" + }, "rust-analyzer.inlayHints.reborrowHints.enable": { "markdownDescription": "Whether to show inlay hints for compiler inserted reborrows.\nThis setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#.", "default": "never", diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index 987d936943..51a0aece82 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts @@ -5,6 +5,7 @@ import * as vscode from "vscode"; import type { Env } from "./client"; import { log } from "./util"; import { expectNotUndefined, unwrapUndefinable } from "./undefinable"; +import type { JsonProject } from "./rust_project"; export type RunnableEnvCfgItem = { mask?: string; diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts index 63ae386c8a..55163241c2 100644 --- a/editors/code/src/ctx.ts +++ b/editors/code/src/ctx.ts @@ -23,6 +23,7 @@ import { execRevealDependency } from "./commands"; import { PersistentState } from "./persistent_state"; import { bootstrap } from "./bootstrap"; import type { RustAnalyzerExtensionApi } from "./main"; +import type { JsonProject } from "./rust_project"; // We only support local folders, not eg. Live Share (`vlsl:` scheme), so don't activate if // only those are in use. We use "Empty" to represent these scenarios diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index 3073353674..599cfb4ff7 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts @@ -6,6 +6,7 @@ import { type CommandFactory, Ctx, fetchWorkspace } from "./ctx"; import * as diagnostics from "./diagnostics"; import { activateTaskProvider } from "./tasks"; import { setContextValue } from "./util"; +import type { JsonProject } from "./rust_project"; const RUST_PROJECT_CONTEXT_NAME = "inRustProject"; diff --git a/editors/code/src/rust_project.ts b/editors/code/src/rust_project.ts index 187a1a96c1..bf65ad43ba 100644 --- a/editors/code/src/rust_project.ts +++ b/editors/code/src/rust_project.ts @@ -1,4 +1,4 @@ -interface JsonProject { +export interface JsonProject { /// Path to the directory with *source code* of /// sysroot crates. /// @@ -21,7 +21,7 @@ interface JsonProject { crates: Crate[]; } -interface Crate { +export interface Crate { /// Optional crate name used for display purposes, /// without affecting semantics. See the `deps` /// key for semantically-significant crate names. @@ -82,7 +82,7 @@ interface Crate { proc_macro_dylib_path?: string; } -interface Dep { +export interface Dep { /// Index of a crate in the `crates` array. crate: number; /// Name as should appear in the (implicit) diff --git a/editors/code/tsconfig.json b/editors/code/tsconfig.json index c74284a00d..87cfd1b2ee 100644 --- a/editors/code/tsconfig.json +++ b/editors/code/tsconfig.json @@ -2,7 +2,7 @@ "extends": "@tsconfig/strictest/tsconfig.json", "compilerOptions": { "esModuleInterop": false, - "module": "CommonJS", + "module": "Node16", "moduleResolution": "Node16", "target": "ES2021", "outDir": "out", diff --git a/lib/line-index/README.md b/lib/line-index/README.md new file mode 100644 index 0000000000..93ac03696a --- /dev/null +++ b/lib/line-index/README.md @@ -0,0 +1,30 @@ +# line-index + +This crate is developed as part of `rust-analyzer`. + +line-index is a library to convert between text offsets and corresponding line/column coordinates. + +## Installation + +To add this crate to a project simply run `cargo add line-index`. + +## Usage + +The main structure is `LineIndex`. + +It is constructed with an UTF-8 string, but also supports UTF-16 and UTF-32 offsets. + +### Example + +```rust +use line_index::LineIndex; + +let line_index = LineIndex::new("This is a\nmulti-line\ntext."); +line_index.line_col(3.into()); // LineCol { line: 0, col: 3 } +line_index.line_col(13.into()); // LineCol { line: 1, col: 3 } +line_index.offset(LineCol { line: 2, col: 3 }); // Some (24) +``` + +## SemVer + +This crate uses [semver](https://semver.org/) versioning.