mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 13:33:31 +00:00
internal: Rewrite ImportMap::search_dependencies
This commit is contained in:
parent
9279c6566b
commit
9b3052104c
2 changed files with 254 additions and 275 deletions
|
@ -3,13 +3,13 @@
|
||||||
use std::{fmt, hash::BuildHasherDefault};
|
use std::{fmt, hash::BuildHasherDefault};
|
||||||
|
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use fst::{self, raw::IndexedValue, Streamer};
|
use fst::{self, raw::IndexedValue, Automaton, Streamer};
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_hash::{FxHashSet, FxHasher};
|
use rustc_hash::{FxHashSet, FxHasher};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use stdx::format_to;
|
use stdx::{format_to, TupleExt};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -20,12 +20,10 @@ use crate::{
|
||||||
AssocItemId, ModuleDefId, ModuleId, TraitId,
|
AssocItemId, ModuleDefId, ModuleId, TraitId,
|
||||||
};
|
};
|
||||||
|
|
||||||
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>;
|
|
||||||
|
|
||||||
/// Item import details stored in the `ImportMap`.
|
/// Item import details stored in the `ImportMap`.
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub struct ImportInfo {
|
pub struct ImportInfo {
|
||||||
/// A name that can be used to import the item, relative to the crate's root.
|
/// A name that can be used to import the item, relative to the container.
|
||||||
pub name: Name,
|
pub name: Name,
|
||||||
/// The module containing this item.
|
/// The module containing this item.
|
||||||
pub container: ModuleId,
|
pub container: ModuleId,
|
||||||
|
@ -35,22 +33,22 @@ pub struct ImportInfo {
|
||||||
pub is_unstable: bool,
|
pub is_unstable: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
type ImportMapIndex = FxIndexMap<ItemInNs, (SmallVec<[ImportInfo; 1]>, IsTraitAssocItem)>;
|
|
||||||
|
|
||||||
/// A map from publicly exported items to its name.
|
/// A map from publicly exported items to its name.
|
||||||
///
|
///
|
||||||
/// Reexports of items are taken into account, ie. if something is exported under multiple
|
/// Reexports of items are taken into account.
|
||||||
/// names, the one with the shortest import path will be used.
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct ImportMap {
|
pub struct ImportMap {
|
||||||
map: ImportMapIndex,
|
/// Maps from `ItemInNs` to information of imports that bring the item into scope.
|
||||||
/// List of keys stored in `map`, sorted lexicographically by their `ModPath`. Indexed by the
|
item_to_info_map: ImportMapIndex,
|
||||||
/// values returned by running `fst`.
|
/// List of keys stored in [`Self::item_to_info_map`], sorted lexicographically by their
|
||||||
|
/// [`Name`]. Indexed by the values returned by running `fst`.
|
||||||
///
|
///
|
||||||
/// Since a name can refer to multiple items due to namespacing, we store all items with the
|
/// Since a name can refer to multiple items due to namespacing and import aliases, we store all
|
||||||
/// same name right after each other. This allows us to find all items after the FST gives us
|
/// items with the same name right after each other. This allows us to find all items after the
|
||||||
/// the index of the first one.
|
/// fst gives us the index of the first one.
|
||||||
importables: Vec<ItemInNs>,
|
///
|
||||||
|
/// The [`u32`] is the index into the smallvec in the value of [`Self::item_to_info_map`].
|
||||||
|
importables: Vec<(ItemInNs, u32)>,
|
||||||
fst: fst::Map<Vec<u8>>,
|
fst: fst::Map<Vec<u8>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -60,10 +58,13 @@ enum IsTraitAssocItem {
|
||||||
No,
|
No,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||||
|
type ImportMapIndex = FxIndexMap<ItemInNs, (SmallVec<[ImportInfo; 1]>, IsTraitAssocItem)>;
|
||||||
|
|
||||||
impl ImportMap {
|
impl ImportMap {
|
||||||
pub fn dump(&self, db: &dyn DefDatabase) -> String {
|
pub fn dump(&self, db: &dyn DefDatabase) -> String {
|
||||||
let mut out = String::new();
|
let mut out = String::new();
|
||||||
for (k, v) in self.map.iter() {
|
for (k, v) in self.item_to_info_map.iter() {
|
||||||
format_to!(out, "{:?} ({:?}) -> ", k, v.1);
|
format_to!(out, "{:?} ({:?}) -> ", k, v.1);
|
||||||
for v in &v.0 {
|
for v in &v.0 {
|
||||||
format_to!(out, "{}:{:?}, ", v.name.display(db.upcast()), v.container);
|
format_to!(out, "{}:{:?}, ", v.name.display(db.upcast()), v.container);
|
||||||
|
@ -76,20 +77,18 @@ impl ImportMap {
|
||||||
pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
|
pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
|
||||||
let _p = profile::span("import_map_query");
|
let _p = profile::span("import_map_query");
|
||||||
|
|
||||||
let map = collect_import_map(db, krate);
|
let map = Self::collect_import_map(db, krate);
|
||||||
|
|
||||||
let mut importables: Vec<_> = map
|
let mut importables: Vec<_> = map
|
||||||
.iter()
|
.iter()
|
||||||
// We've only collected items, whose name cannot be tuple field.
|
// We've only collected items, whose name cannot be tuple field so unwrapping is fine.
|
||||||
.flat_map(|(&item, (info, is_assoc))| {
|
.flat_map(|(&item, (info, _))| {
|
||||||
info.iter().map(move |info| {
|
info.iter().enumerate().map(move |(idx, info)| {
|
||||||
(item, *is_assoc, info.name.as_str().unwrap().to_ascii_lowercase())
|
(item, info.name.as_str().unwrap().to_ascii_lowercase(), idx as u32)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
importables.sort_by(|(_, l_is_assoc, lhs_name), (_, r_is_assoc, rhs_name)| {
|
importables.sort_by(|(_, lhs_name, _), (_, rhs_name, _)| lhs_name.cmp(rhs_name));
|
||||||
lhs_name.cmp(rhs_name).then_with(|| l_is_assoc.cmp(r_is_assoc))
|
|
||||||
});
|
|
||||||
importables.dedup();
|
importables.dedup();
|
||||||
|
|
||||||
// Build the FST, taking care not to insert duplicate values.
|
// Build the FST, taking care not to insert duplicate values.
|
||||||
|
@ -97,24 +96,23 @@ impl ImportMap {
|
||||||
let iter = importables
|
let iter = importables
|
||||||
.iter()
|
.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.dedup_by(|(_, (_, _, lhs)), (_, (_, _, rhs))| lhs == rhs);
|
.dedup_by(|(_, (_, lhs, _)), (_, (_, rhs, _))| lhs == rhs);
|
||||||
for (start_idx, (_, _, name)) in iter {
|
for (start_idx, (_, name, _)) in iter {
|
||||||
let _ = builder.insert(name, start_idx as u64);
|
let _ = builder.insert(name, start_idx as u64);
|
||||||
}
|
}
|
||||||
|
|
||||||
Arc::new(ImportMap {
|
Arc::new(ImportMap {
|
||||||
map,
|
item_to_info_map: map,
|
||||||
fst: builder.into_map(),
|
fst: builder.into_map(),
|
||||||
importables: importables.into_iter().map(|(item, _, _)| item).collect(),
|
importables: importables.into_iter().map(|(item, _, idx)| (item, idx)).collect(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn import_info_for(&self, item: ItemInNs) -> Option<&[ImportInfo]> {
|
pub fn import_info_for(&self, item: ItemInNs) -> Option<&[ImportInfo]> {
|
||||||
self.map.get(&item).map(|(info, _)| &**info)
|
self.item_to_info_map.get(&item).map(|(info, _)| &**info)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex {
|
fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex {
|
||||||
let _p = profile::span("collect_import_map");
|
let _p = profile::span("collect_import_map");
|
||||||
|
|
||||||
let def_map = db.crate_def_map(krate);
|
let def_map = db.crate_def_map(krate);
|
||||||
|
@ -173,7 +171,7 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex {
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() {
|
if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() {
|
||||||
collect_trait_assoc_items(
|
Self::collect_trait_assoc_items(
|
||||||
db,
|
db,
|
||||||
&mut map,
|
&mut map,
|
||||||
tr,
|
tr,
|
||||||
|
@ -196,15 +194,15 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex {
|
||||||
}
|
}
|
||||||
map.shrink_to_fit();
|
map.shrink_to_fit();
|
||||||
map
|
map
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_trait_assoc_items(
|
fn collect_trait_assoc_items(
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
map: &mut ImportMapIndex,
|
map: &mut ImportMapIndex,
|
||||||
tr: TraitId,
|
tr: TraitId,
|
||||||
is_type_in_ns: bool,
|
is_type_in_ns: bool,
|
||||||
trait_import_info: &ImportInfo,
|
trait_import_info: &ImportInfo,
|
||||||
) {
|
) {
|
||||||
let _p = profile::span("collect_trait_assoc_items");
|
let _p = profile::span("collect_trait_assoc_items");
|
||||||
for &(ref assoc_item_name, item) in &db.trait_data(tr).items {
|
for &(ref assoc_item_name, item) in &db.trait_data(tr).items {
|
||||||
let module_def_id = match item {
|
let module_def_id = match item {
|
||||||
|
@ -236,21 +234,21 @@ fn collect_trait_assoc_items(
|
||||||
infos.reserve_exact(1);
|
infos.reserve_exact(1);
|
||||||
infos.push(assoc_item_info);
|
infos.push(assoc_item_info);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for ImportMap {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
// `fst` and `importables` are built from `map`, so we don't need to compare them.
|
|
||||||
self.map == other.map
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Eq for ImportMap {}
|
impl Eq for ImportMap {}
|
||||||
|
impl PartialEq for ImportMap {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
// `fst` and `importables` are built from `map`, so we don't need to compare them.
|
||||||
|
self.item_to_info_map == other.item_to_info_map
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl fmt::Debug for ImportMap {
|
impl fmt::Debug for ImportMap {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let mut importable_names: Vec<_> = self
|
let mut importable_names: Vec<_> = self
|
||||||
.map
|
.item_to_info_map
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(item, (infos, _))| {
|
.map(|(item, (infos, _))| {
|
||||||
let l = infos.len();
|
let l = infos.len();
|
||||||
|
@ -348,39 +346,6 @@ impl Query {
|
||||||
_ => true,
|
_ => true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks whether the import map entry matches the query.
|
|
||||||
fn import_matches(&self, import: &ImportInfo, enforce_lowercase: bool) -> bool {
|
|
||||||
let _p = profile::span("import_map::Query::import_matches");
|
|
||||||
|
|
||||||
// FIXME: Can we get rid of the alloc here?
|
|
||||||
let input = import.name.to_smol_str();
|
|
||||||
let mut _s_slot;
|
|
||||||
let case_insensitive = enforce_lowercase || !self.case_sensitive;
|
|
||||||
let input = if case_insensitive {
|
|
||||||
_s_slot = String::from(input);
|
|
||||||
_s_slot.make_ascii_lowercase();
|
|
||||||
&*_s_slot
|
|
||||||
} else {
|
|
||||||
&*input
|
|
||||||
};
|
|
||||||
|
|
||||||
let query_string = if case_insensitive { &self.lowercased } else { &self.query };
|
|
||||||
|
|
||||||
match self.search_mode {
|
|
||||||
SearchMode::Exact => input == *query_string,
|
|
||||||
SearchMode::Prefix => input.starts_with(query_string),
|
|
||||||
SearchMode::Fuzzy => {
|
|
||||||
let mut input_chars = input.chars();
|
|
||||||
for query_char in query_string.chars() {
|
|
||||||
if !input_chars.any(|it| it == query_char) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Searches dependencies of `krate` for an importable name matching `query`.
|
/// Searches dependencies of `krate` for an importable name matching `query`.
|
||||||
|
@ -398,69 +363,85 @@ pub fn search_dependencies(
|
||||||
let import_maps: Vec<_> =
|
let import_maps: Vec<_> =
|
||||||
graph[krate].dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect();
|
graph[krate].dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect();
|
||||||
|
|
||||||
let automaton = fst::automaton::Subsequence::new(&query.lowercased);
|
|
||||||
|
|
||||||
let mut op = fst::map::OpBuilder::new();
|
let mut op = fst::map::OpBuilder::new();
|
||||||
|
|
||||||
|
match query.search_mode {
|
||||||
|
SearchMode::Exact => {
|
||||||
|
let automaton = fst::automaton::Str::new(&query.lowercased);
|
||||||
|
|
||||||
for map in &import_maps {
|
for map in &import_maps {
|
||||||
op = op.add(map.fst.search(&automaton));
|
op = op.add(map.fst.search(&automaton));
|
||||||
}
|
}
|
||||||
|
search_maps(&import_maps, op.union(), query)
|
||||||
|
}
|
||||||
|
SearchMode::Fuzzy => {
|
||||||
|
let automaton = fst::automaton::Subsequence::new(&query.lowercased);
|
||||||
|
|
||||||
let mut stream = op.union();
|
for map in &import_maps {
|
||||||
|
op = op.add(map.fst.search(&automaton));
|
||||||
|
}
|
||||||
|
search_maps(&import_maps, op.union(), query)
|
||||||
|
}
|
||||||
|
SearchMode::Prefix => {
|
||||||
|
let automaton = fst::automaton::Str::new(&query.lowercased).starts_with();
|
||||||
|
|
||||||
|
for map in &import_maps {
|
||||||
|
op = op.add(map.fst.search(&automaton));
|
||||||
|
}
|
||||||
|
search_maps(&import_maps, op.union(), query)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_maps(
|
||||||
|
import_maps: &[Arc<ImportMap>],
|
||||||
|
mut stream: fst::map::Union<'_>,
|
||||||
|
query: &Query,
|
||||||
|
) -> FxHashSet<ItemInNs> {
|
||||||
let mut res = FxHashSet::default();
|
let mut res = FxHashSet::default();
|
||||||
let mut common_importable_data_scratch = vec![];
|
while let Some((key, indexed_values)) = stream.next() {
|
||||||
// FIXME: Improve this, its rather unreadable and does duplicate amount of work
|
for &IndexedValue { index: import_map_idx, value } in indexed_values {
|
||||||
while let Some((_, indexed_values)) = stream.next() {
|
let import_map = &import_maps[import_map_idx];
|
||||||
for &IndexedValue { index, value } in indexed_values {
|
let importables = &import_map.importables[value as usize..];
|
||||||
let import_map = &import_maps[index];
|
|
||||||
let importables @ [importable, ..] = &import_map.importables[value as usize..] else {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
let &(ref importable_data, is_trait_assoc_item) = &import_map.map[importable];
|
|
||||||
if !query.matches_assoc_mode(is_trait_assoc_item) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetch all the known names of this importable item (to handle import aliases/renames)
|
let iter = importables
|
||||||
common_importable_data_scratch.extend(
|
|
||||||
importable_data
|
|
||||||
.iter()
|
|
||||||
.filter(|&info| query.import_matches(info, true))
|
|
||||||
// Name shared by the importable items in this group.
|
|
||||||
.map(|info| info.name.to_smol_str()),
|
|
||||||
);
|
|
||||||
if common_importable_data_scratch.is_empty() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
common_importable_data_scratch.sort();
|
|
||||||
common_importable_data_scratch.dedup();
|
|
||||||
|
|
||||||
let iter =
|
|
||||||
common_importable_data_scratch.drain(..).flat_map(|common_importable_name| {
|
|
||||||
// Add the items from this name group. Those are all subsequent items in
|
|
||||||
// `importables` whose name match `common_importable_name`.
|
|
||||||
|
|
||||||
importables
|
|
||||||
.iter()
|
.iter()
|
||||||
.copied()
|
.copied()
|
||||||
.take_while(move |item| {
|
.map(|(item, info_idx)| {
|
||||||
let &(ref import_infos, assoc_mode) = &import_map.map[item];
|
let (import_infos, assoc_mode) = &import_map.item_to_info_map[&item];
|
||||||
query.matches_assoc_mode(assoc_mode)
|
(item, &import_infos[info_idx as usize], *assoc_mode)
|
||||||
&& import_infos.iter().any(|info| {
|
|
||||||
info.name
|
|
||||||
.to_smol_str()
|
|
||||||
.eq_ignore_ascii_case(&common_importable_name)
|
|
||||||
})
|
})
|
||||||
|
// we put all entries with the same lowercased name in a row, so stop once we find a
|
||||||
|
// different name in the importables
|
||||||
|
.take_while(|&(_, info, _)| {
|
||||||
|
info.name.to_smol_str().as_bytes().eq_ignore_ascii_case(&key)
|
||||||
})
|
})
|
||||||
.filter(move |item| {
|
.filter(|&(_, info, assoc_mode)| {
|
||||||
!query.case_sensitive || {
|
if !query.matches_assoc_mode(assoc_mode) {
|
||||||
// we've already checked the common importables name case-insensitively
|
return false;
|
||||||
let &(ref import_infos, _) = &import_map.map[item];
|
}
|
||||||
import_infos.iter().any(|info| query.import_matches(info, false))
|
if !query.case_sensitive {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
let name = info.name.to_smol_str();
|
||||||
|
match query.search_mode {
|
||||||
|
SearchMode::Exact => name == query.query,
|
||||||
|
SearchMode::Prefix => name.starts_with(&query.query),
|
||||||
|
SearchMode::Fuzzy => {
|
||||||
|
let mut name = &*name;
|
||||||
|
query.query.chars().all(|query_char| {
|
||||||
|
match name.match_indices(query_char).next() {
|
||||||
|
Some((index, _)) => {
|
||||||
|
name = &name[index + 1..];
|
||||||
|
true
|
||||||
|
}
|
||||||
|
None => false,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
res.extend(iter);
|
res.extend(iter.map(TupleExt::head));
|
||||||
|
|
||||||
if res.len() >= query.limit {
|
if res.len() >= query.limit {
|
||||||
return res;
|
return res;
|
||||||
|
@ -484,7 +465,7 @@ mod tests {
|
||||||
impl ImportMap {
|
impl ImportMap {
|
||||||
fn fmt_for_test(&self, db: &dyn DefDatabase) -> String {
|
fn fmt_for_test(&self, db: &dyn DefDatabase) -> String {
|
||||||
let mut importable_paths: Vec<_> = self
|
let mut importable_paths: Vec<_> = self
|
||||||
.map
|
.item_to_info_map
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|(item, (info, _))| info.iter().map(move |info| (item, info)))
|
.flat_map(|(item, (info, _))| info.iter().map(move |info| (item, info)))
|
||||||
.map(|(item, info)| {
|
.map(|(item, info)| {
|
||||||
|
@ -911,28 +892,28 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn search_mode() {
|
fn search_mode() {
|
||||||
let ra_fixture = r#"
|
let ra_fixture = r#"
|
||||||
//- /main.rs crate:main deps:dep
|
//- /main.rs crate:main deps:dep
|
||||||
//- /dep.rs crate:dep deps:tdep
|
//- /dep.rs crate:dep deps:tdep
|
||||||
use tdep::fmt as fmt_dep;
|
use tdep::fmt as fmt_dep;
|
||||||
pub mod fmt {
|
pub mod fmt {
|
||||||
pub trait Display {
|
pub trait Display {
|
||||||
fn fmt();
|
fn fmt();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! Fmt {
|
macro_rules! Fmt {
|
||||||
() => {};
|
() => {};
|
||||||
}
|
}
|
||||||
pub struct Fmt;
|
pub struct Fmt;
|
||||||
|
|
||||||
pub fn format() {}
|
pub fn format() {}
|
||||||
pub fn no() {}
|
pub fn no() {}
|
||||||
|
|
||||||
//- /tdep.rs crate:tdep
|
//- /tdep.rs crate:tdep
|
||||||
pub mod fmt {
|
pub mod fmt {
|
||||||
pub struct NotImportableFromMain;
|
pub struct NotImportableFromMain;
|
||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
check_search(
|
check_search(
|
||||||
ra_fixture,
|
ra_fixture,
|
||||||
|
@ -1000,19 +981,6 @@ mod tests {
|
||||||
dep::fmt::Display::fmt (a)
|
dep::fmt::Display::fmt (a)
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
|
||||||
check_search(
|
|
||||||
ra_fixture,
|
|
||||||
"main",
|
|
||||||
Query::new("fmt".to_string()),
|
|
||||||
expect![[r#"
|
|
||||||
dep::Fmt (m)
|
|
||||||
dep::Fmt (t)
|
|
||||||
dep::Fmt (v)
|
|
||||||
dep::fmt (t)
|
|
||||||
dep::fmt::Display::fmt (a)
|
|
||||||
"#]],
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -59,6 +59,17 @@ impl<T, U> TupleExt for (T, U) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<T, U, V> TupleExt for (T, U, V) {
|
||||||
|
type Head = T;
|
||||||
|
type Tail = V;
|
||||||
|
fn head(self) -> Self::Head {
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
fn tail(self) -> Self::Tail {
|
||||||
|
self.2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn to_lower_snake_case(s: &str) -> String {
|
pub fn to_lower_snake_case(s: &str) -> String {
|
||||||
to_snake_case(s, char::to_lowercase)
|
to_snake_case(s, char::to_lowercase)
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue