mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 05:23:24 +00:00
Merge commit '21b06c1beb9bb59369ffd652f5d617bcf6952e05' into sync-from-ra
This commit is contained in:
parent
cac74d98f6
commit
e37cf75791
59 changed files with 1080 additions and 477 deletions
9
.github/workflows/metrics.yaml
vendored
9
.github/workflows/metrics.yaml
vendored
|
@ -67,7 +67,7 @@ jobs:
|
||||||
other_metrics:
|
other_metrics:
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
names: [self, ripgrep-13.0.0, webrender-2022, diesel-1.4.8, hyper-0.14.18]
|
names: [self, rustc_tests, ripgrep-13.0.0, webrender-2022, diesel-1.4.8, hyper-0.14.18]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [setup_cargo, build_metrics]
|
needs: [setup_cargo, build_metrics]
|
||||||
|
|
||||||
|
@ -118,6 +118,11 @@ jobs:
|
||||||
with:
|
with:
|
||||||
name: self-${{ github.sha }}
|
name: self-${{ github.sha }}
|
||||||
|
|
||||||
|
- name: Download rustc_tests metrics
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: rustc_tests-${{ github.sha }}
|
||||||
|
|
||||||
- name: Download ripgrep-13.0.0 metrics
|
- name: Download ripgrep-13.0.0 metrics
|
||||||
uses: actions/download-artifact@v3
|
uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
|
@ -146,7 +151,7 @@ jobs:
|
||||||
chmod 700 ~/.ssh
|
chmod 700 ~/.ssh
|
||||||
|
|
||||||
git clone --depth 1 git@github.com:rust-analyzer/metrics.git
|
git clone --depth 1 git@github.com:rust-analyzer/metrics.git
|
||||||
jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5]" build.json self.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json
|
jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5] * .[6]" build.json self.json rustc_tests.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json
|
||||||
cd metrics
|
cd metrics
|
||||||
git add .
|
git add .
|
||||||
git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈
|
git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈
|
||||||
|
|
11
Cargo.lock
generated
11
Cargo.lock
generated
|
@ -684,7 +684,7 @@ dependencies = [
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"itertools",
|
"itertools",
|
||||||
"limit",
|
"limit",
|
||||||
"line-index 0.1.0-pre.1",
|
"line-index 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"memchr",
|
"memchr",
|
||||||
"nohash-hasher",
|
"nohash-hasher",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
|
@ -881,9 +881,7 @@ version = "0.0.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "line-index"
|
name = "line-index"
|
||||||
version = "0.1.0-pre.1"
|
version = "0.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "2cad96769710c1745e11d4f940a8ff36000ade4bbada4285b001cb8aa2f745ce"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"nohash-hasher",
|
"nohash-hasher",
|
||||||
"text-size",
|
"text-size",
|
||||||
|
@ -891,7 +889,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "line-index"
|
name = "line-index"
|
||||||
version = "0.1.0"
|
version = "0.1.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "67d61795376ae2683928c218fda7d7d7db136fd38c06b7552904667f0d55580a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"nohash-hasher",
|
"nohash-hasher",
|
||||||
"text-size",
|
"text-size",
|
||||||
|
@ -1545,6 +1545,7 @@ dependencies = [
|
||||||
"triomphe",
|
"triomphe",
|
||||||
"vfs",
|
"vfs",
|
||||||
"vfs-notify",
|
"vfs-notify",
|
||||||
|
"walkdir",
|
||||||
"winapi",
|
"winapi",
|
||||||
"xflags",
|
"xflags",
|
||||||
"xshell",
|
"xshell",
|
||||||
|
|
|
@ -85,7 +85,7 @@ rustc-dependencies = { path = "./crates/rustc-dependencies", version = "0.0.0" }
|
||||||
proc-macro-test = { path = "./crates/proc-macro-test" }
|
proc-macro-test = { path = "./crates/proc-macro-test" }
|
||||||
|
|
||||||
# In-tree crates that are published separately and follow semver. See lib/README.md
|
# In-tree crates that are published separately and follow semver. See lib/README.md
|
||||||
line-index = { version = "0.1.0-pre.1" }
|
line-index = { version = "0.1.1" }
|
||||||
la-arena = { version = "0.3.1" }
|
la-arena = { version = "0.3.1" }
|
||||||
lsp-server = { version = "0.7.4" }
|
lsp-server = { version = "0.7.4" }
|
||||||
|
|
||||||
|
|
|
@ -151,21 +151,26 @@ impl fmt::Debug for HirFileIdRepr {
|
||||||
|
|
||||||
impl From<FileId> for HirFileId {
|
impl From<FileId> for HirFileId {
|
||||||
fn from(id: FileId) -> Self {
|
fn from(id: FileId) -> Self {
|
||||||
assert!(id.index() < Self::MAX_FILE_ID);
|
_ = Self::ASSERT_MAX_FILE_ID_IS_SAME;
|
||||||
|
assert!(id.index() <= Self::MAX_HIR_FILE_ID, "FileId index {} is too large", id.index());
|
||||||
HirFileId(id.index())
|
HirFileId(id.index())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<MacroFileId> for HirFileId {
|
impl From<MacroFileId> for HirFileId {
|
||||||
fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self {
|
fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self {
|
||||||
|
_ = Self::ASSERT_MAX_FILE_ID_IS_SAME;
|
||||||
let id = id.as_u32();
|
let id = id.as_u32();
|
||||||
assert!(id < Self::MAX_FILE_ID);
|
assert!(id <= Self::MAX_HIR_FILE_ID, "MacroCallId index {} is too large", id);
|
||||||
HirFileId(id | Self::MACRO_FILE_TAG_MASK)
|
HirFileId(id | Self::MACRO_FILE_TAG_MASK)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HirFileId {
|
impl HirFileId {
|
||||||
const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
|
const ASSERT_MAX_FILE_ID_IS_SAME: () =
|
||||||
|
[()][(Self::MAX_HIR_FILE_ID != FileId::MAX_FILE_ID) as usize];
|
||||||
|
|
||||||
|
const MAX_HIR_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
|
||||||
const MACRO_FILE_TAG_MASK: u32 = 1 << 31;
|
const MACRO_FILE_TAG_MASK: u32 = 1 << 31;
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
//!
|
//!
|
||||||
//! The actual definitions were copied from rustc's `compiler/rustc_feature/src/builtin_attrs.rs`.
|
//! The actual definitions were copied from rustc's `compiler/rustc_feature/src/builtin_attrs.rs`.
|
||||||
//!
|
//!
|
||||||
//! It was last synchronized with upstream commit e29821ff85a2a3000d226f99f62f89464028d5d6.
|
//! It was last synchronized with upstream commit c3def263a44e07e09ae6d57abfc8650227fb4972.
|
||||||
//!
|
//!
|
||||||
//! The macros were adjusted to only expand to the attribute name, since that is all we need to do
|
//! The macros were adjusted to only expand to the attribute name, since that is all we need to do
|
||||||
//! name resolution, and `BUILTIN_ATTRIBUTES` is almost entirely unchanged from the original, to
|
//! name resolution, and `BUILTIN_ATTRIBUTES` is almost entirely unchanged from the original, to
|
||||||
|
@ -240,7 +240,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
|
||||||
template!(List: "address, kcfi, memory, thread"), DuplicatesOk,
|
template!(List: "address, kcfi, memory, thread"), DuplicatesOk,
|
||||||
experimental!(no_sanitize)
|
experimental!(no_sanitize)
|
||||||
),
|
),
|
||||||
gated!(coverage, Normal, template!(Word, List: "on|off"), WarnFollowing, experimental!(coverage)),
|
gated!(coverage, Normal, template!(Word, List: "on|off"), WarnFollowing, coverage_attribute, experimental!(coverage)),
|
||||||
|
|
||||||
ungated!(
|
ungated!(
|
||||||
doc, Normal, template!(List: "hidden|inline|...", NameValueStr: "string"), DuplicatesOk
|
doc, Normal, template!(List: "hidden|inline|...", NameValueStr: "string"), DuplicatesOk
|
||||||
|
@ -364,7 +364,6 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
|
||||||
allow_internal_unsafe, Normal, template!(Word), WarnFollowing,
|
allow_internal_unsafe, Normal, template!(Word), WarnFollowing,
|
||||||
"allow_internal_unsafe side-steps the unsafe_code lint",
|
"allow_internal_unsafe side-steps the unsafe_code lint",
|
||||||
),
|
),
|
||||||
ungated!(rustc_safe_intrinsic, Normal, template!(Word), DuplicatesOk),
|
|
||||||
rustc_attr!(rustc_allowed_through_unstable_modules, Normal, template!(Word), WarnFollowing,
|
rustc_attr!(rustc_allowed_through_unstable_modules, Normal, template!(Word), WarnFollowing,
|
||||||
"rustc_allowed_through_unstable_modules special cases accidental stabilizations of stable items \
|
"rustc_allowed_through_unstable_modules special cases accidental stabilizations of stable items \
|
||||||
through unstable paths"),
|
through unstable paths"),
|
||||||
|
@ -453,6 +452,12 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
|
||||||
ErrorFollowing,
|
ErrorFollowing,
|
||||||
INTERNAL_UNSTABLE
|
INTERNAL_UNSTABLE
|
||||||
),
|
),
|
||||||
|
rustc_attr!(
|
||||||
|
rustc_confusables, Normal,
|
||||||
|
template!(List: r#""name1", "name2", ..."#),
|
||||||
|
ErrorFollowing,
|
||||||
|
INTERNAL_UNSTABLE,
|
||||||
|
),
|
||||||
// Enumerates "identity-like" conversion methods to suggest on type mismatch.
|
// Enumerates "identity-like" conversion methods to suggest on type mismatch.
|
||||||
rustc_attr!(
|
rustc_attr!(
|
||||||
rustc_conversion_suggestion, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
|
rustc_conversion_suggestion, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
|
||||||
|
@ -488,6 +493,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
|
||||||
rustc_attr!(
|
rustc_attr!(
|
||||||
rustc_do_not_const_check, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
|
rustc_do_not_const_check, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
|
||||||
),
|
),
|
||||||
|
// Ensure the argument to this function is &&str during const-check.
|
||||||
|
rustc_attr!(
|
||||||
|
rustc_const_panic_str, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
|
||||||
|
),
|
||||||
|
|
||||||
// ==========================================================================
|
// ==========================================================================
|
||||||
// Internal attributes, Layout related:
|
// Internal attributes, Layout related:
|
||||||
|
@ -520,6 +529,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
|
||||||
rustc_pass_by_value, Normal, template!(Word), ErrorFollowing,
|
rustc_pass_by_value, Normal, template!(Word), ErrorFollowing,
|
||||||
"#[rustc_pass_by_value] is used to mark types that must be passed by value instead of reference."
|
"#[rustc_pass_by_value] is used to mark types that must be passed by value instead of reference."
|
||||||
),
|
),
|
||||||
|
rustc_attr!(
|
||||||
|
rustc_never_returns_null_ptr, Normal, template!(Word), ErrorFollowing,
|
||||||
|
"#[rustc_never_returns_null_ptr] is used to mark functions returning non-null pointers."
|
||||||
|
),
|
||||||
rustc_attr!(
|
rustc_attr!(
|
||||||
rustc_coherence_is_core, AttributeType::CrateLevel, template!(Word), ErrorFollowing, @only_local: true,
|
rustc_coherence_is_core, AttributeType::CrateLevel, template!(Word), ErrorFollowing, @only_local: true,
|
||||||
"#![rustc_coherence_is_core] allows inherent methods on builtin types, only intended to be used in `core`."
|
"#![rustc_coherence_is_core] allows inherent methods on builtin types, only intended to be used in `core`."
|
||||||
|
@ -533,7 +546,11 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
|
||||||
"#[rustc_allow_incoherent_impl] has to be added to all impl items of an incoherent inherent impl."
|
"#[rustc_allow_incoherent_impl] has to be added to all impl items of an incoherent inherent impl."
|
||||||
),
|
),
|
||||||
rustc_attr!(
|
rustc_attr!(
|
||||||
rustc_deny_explicit_impl, AttributeType::Normal, template!(Word), ErrorFollowing, @only_local: false,
|
rustc_deny_explicit_impl,
|
||||||
|
AttributeType::Normal,
|
||||||
|
template!(List: "implement_via_object = (true|false)"),
|
||||||
|
ErrorFollowing,
|
||||||
|
@only_local: true,
|
||||||
"#[rustc_deny_explicit_impl] enforces that a trait can have no user-provided impls"
|
"#[rustc_deny_explicit_impl] enforces that a trait can have no user-provided impls"
|
||||||
),
|
),
|
||||||
rustc_attr!(
|
rustc_attr!(
|
||||||
|
@ -614,6 +631,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
|
||||||
rustc_doc_primitive, Normal, template!(NameValueStr: "primitive name"), ErrorFollowing,
|
rustc_doc_primitive, Normal, template!(NameValueStr: "primitive name"), ErrorFollowing,
|
||||||
r#"`rustc_doc_primitive` is a rustc internal attribute"#,
|
r#"`rustc_doc_primitive` is a rustc internal attribute"#,
|
||||||
),
|
),
|
||||||
|
rustc_attr!(
|
||||||
|
rustc_safe_intrinsic, Normal, template!(Word), WarnFollowing,
|
||||||
|
"the `#[rustc_safe_intrinsic]` attribute is used internally to mark intrinsics as safe"
|
||||||
|
),
|
||||||
|
|
||||||
// ==========================================================================
|
// ==========================================================================
|
||||||
// Internal attributes, Testing:
|
// Internal attributes, Testing:
|
||||||
|
@ -625,13 +646,16 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
|
||||||
rustc_attr!(TEST, rustc_insignificant_dtor, Normal, template!(Word), WarnFollowing),
|
rustc_attr!(TEST, rustc_insignificant_dtor, Normal, template!(Word), WarnFollowing),
|
||||||
rustc_attr!(TEST, rustc_strict_coherence, Normal, template!(Word), WarnFollowing),
|
rustc_attr!(TEST, rustc_strict_coherence, Normal, template!(Word), WarnFollowing),
|
||||||
rustc_attr!(TEST, rustc_variance, Normal, template!(Word), WarnFollowing),
|
rustc_attr!(TEST, rustc_variance, Normal, template!(Word), WarnFollowing),
|
||||||
|
rustc_attr!(TEST, rustc_variance_of_opaques, Normal, template!(Word), WarnFollowing),
|
||||||
|
rustc_attr!(TEST, rustc_hidden_type_of_opaques, Normal, template!(Word), WarnFollowing),
|
||||||
rustc_attr!(TEST, rustc_layout, Normal, template!(List: "field1, field2, ..."), WarnFollowing),
|
rustc_attr!(TEST, rustc_layout, Normal, template!(List: "field1, field2, ..."), WarnFollowing),
|
||||||
|
rustc_attr!(TEST, rustc_abi, Normal, template!(List: "field1, field2, ..."), WarnFollowing),
|
||||||
rustc_attr!(TEST, rustc_regions, Normal, template!(Word), WarnFollowing),
|
rustc_attr!(TEST, rustc_regions, Normal, template!(Word), WarnFollowing),
|
||||||
rustc_attr!(
|
rustc_attr!(
|
||||||
TEST, rustc_error, Normal,
|
TEST, rustc_error, Normal,
|
||||||
template!(Word, List: "span_delayed_bug_from_inside_query"), WarnFollowingWordOnly
|
template!(Word, List: "span_delayed_bug_from_inside_query"), WarnFollowingWordOnly
|
||||||
),
|
),
|
||||||
rustc_attr!(TEST, rustc_dump_user_substs, Normal, template!(Word), WarnFollowing),
|
rustc_attr!(TEST, rustc_dump_user_args, Normal, template!(Word), WarnFollowing),
|
||||||
rustc_attr!(TEST, rustc_evaluate_where_clauses, Normal, template!(Word), WarnFollowing),
|
rustc_attr!(TEST, rustc_evaluate_where_clauses, Normal, template!(Word), WarnFollowing),
|
||||||
rustc_attr!(
|
rustc_attr!(
|
||||||
TEST, rustc_if_this_changed, Normal, template!(Word, List: "DepNode"), DuplicatesOk
|
TEST, rustc_if_this_changed, Normal, template!(Word, List: "DepNode"), DuplicatesOk
|
||||||
|
|
|
@ -5,8 +5,7 @@
|
||||||
//! node for a *child*, and get its hir.
|
//! node for a *child*, and get its hir.
|
||||||
|
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir_expand::HirFileId;
|
use hir_expand::{attrs::collect_attrs, HirFileId};
|
||||||
use syntax::ast::HasDocComments;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::DefDatabase,
|
db::DefDatabase,
|
||||||
|
@ -118,8 +117,8 @@ impl ChildBySource for ItemScope {
|
||||||
|(ast_id, calls)| {
|
|(ast_id, calls)| {
|
||||||
let adt = ast_id.to_node(db.upcast());
|
let adt = ast_id.to_node(db.upcast());
|
||||||
calls.for_each(|(attr_id, call_id, calls)| {
|
calls.for_each(|(attr_id, call_id, calls)| {
|
||||||
if let Some(Either::Left(attr)) =
|
if let Some((_, Either::Left(attr))) =
|
||||||
adt.doc_comments_and_attrs().nth(attr_id.ast_index())
|
collect_attrs(&adt).nth(attr_id.ast_index())
|
||||||
{
|
{
|
||||||
res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into()));
|
res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into()));
|
||||||
}
|
}
|
||||||
|
|
|
@ -222,11 +222,10 @@ impl GenericParams {
|
||||||
let module = loc.container.module(db);
|
let module = loc.container.module(db);
|
||||||
let func_data = db.function_data(id);
|
let func_data = db.function_data(id);
|
||||||
|
|
||||||
// Don't create an `Expander` nor call `loc.source(db)` if not needed since this
|
// Don't create an `Expander` if not needed since this
|
||||||
// causes a reparse after the `ItemTree` has been created.
|
// could cause a reparse after the `ItemTree` has been created due to the spanmap.
|
||||||
let mut expander = Lazy::new(|| {
|
let mut expander =
|
||||||
(module.def_map(db), Expander::new(db, loc.source(db).file_id, module))
|
Lazy::new(|| (module.def_map(db), Expander::new(db, loc.id.file_id(), module)));
|
||||||
});
|
|
||||||
for param in func_data.params.iter() {
|
for param in func_data.params.iter() {
|
||||||
generic_params.fill_implicit_impl_trait_args(db, &mut expander, param);
|
generic_params.fill_implicit_impl_trait_args(db, &mut expander, param);
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,7 @@ use indexmap::IndexMap;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_hash::{FxHashSet, FxHasher};
|
use rustc_hash::{FxHashSet, FxHasher};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
|
use stdx::format_to;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -53,13 +54,25 @@ pub struct ImportMap {
|
||||||
fst: fst::Map<Vec<u8>>,
|
fst: fst::Map<Vec<u8>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
#[derive(Copy, Clone, Debug, PartialEq, Eq, Ord, PartialOrd)]
|
||||||
enum IsTraitAssocItem {
|
enum IsTraitAssocItem {
|
||||||
Yes,
|
Yes,
|
||||||
No,
|
No,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ImportMap {
|
impl ImportMap {
|
||||||
|
pub fn dump(&self, db: &dyn DefDatabase) -> String {
|
||||||
|
let mut out = String::new();
|
||||||
|
for (k, v) in self.map.iter() {
|
||||||
|
format_to!(out, "{:?} ({:?}) -> ", k, v.1);
|
||||||
|
for v in &v.0 {
|
||||||
|
format_to!(out, "{}:{:?}, ", v.name.display(db.upcast()), v.container);
|
||||||
|
}
|
||||||
|
format_to!(out, "\n");
|
||||||
|
}
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
|
pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
|
||||||
let _p = profile::span("import_map_query");
|
let _p = profile::span("import_map_query");
|
||||||
|
|
||||||
|
@ -68,26 +81,31 @@ impl ImportMap {
|
||||||
let mut importables: Vec<_> = map
|
let mut importables: Vec<_> = map
|
||||||
.iter()
|
.iter()
|
||||||
// We've only collected items, whose name cannot be tuple field.
|
// We've only collected items, whose name cannot be tuple field.
|
||||||
.flat_map(|(&item, (info, _))| {
|
.flat_map(|(&item, (info, is_assoc))| {
|
||||||
info.iter()
|
info.iter().map(move |info| {
|
||||||
.map(move |info| (item, info.name.as_str().unwrap().to_ascii_lowercase()))
|
(item, *is_assoc, info.name.as_str().unwrap().to_ascii_lowercase())
|
||||||
|
})
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
importables.sort_by(|(_, lhs_name), (_, rhs_name)| lhs_name.cmp(rhs_name));
|
importables.sort_by(|(_, l_is_assoc, lhs_name), (_, r_is_assoc, rhs_name)| {
|
||||||
|
lhs_name.cmp(rhs_name).then_with(|| l_is_assoc.cmp(r_is_assoc))
|
||||||
|
});
|
||||||
importables.dedup();
|
importables.dedup();
|
||||||
|
|
||||||
// Build the FST, taking care not to insert duplicate values.
|
// Build the FST, taking care not to insert duplicate values.
|
||||||
let mut builder = fst::MapBuilder::memory();
|
let mut builder = fst::MapBuilder::memory();
|
||||||
let iter =
|
let iter = importables
|
||||||
importables.iter().enumerate().dedup_by(|(_, (_, lhs)), (_, (_, rhs))| lhs == rhs);
|
.iter()
|
||||||
for (start_idx, (_, name)) in iter {
|
.enumerate()
|
||||||
|
.dedup_by(|(_, (_, _, lhs)), (_, (_, _, rhs))| lhs == rhs);
|
||||||
|
for (start_idx, (_, _, name)) in iter {
|
||||||
let _ = builder.insert(name, start_idx as u64);
|
let _ = builder.insert(name, start_idx as u64);
|
||||||
}
|
}
|
||||||
|
|
||||||
Arc::new(ImportMap {
|
Arc::new(ImportMap {
|
||||||
map,
|
map,
|
||||||
fst: builder.into_map(),
|
fst: builder.into_map(),
|
||||||
importables: importables.into_iter().map(|(item, _)| item).collect(),
|
importables: importables.into_iter().map(|(item, _, _)| item).collect(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -328,20 +346,20 @@ impl Query {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks whether the import map entry matches the query.
|
/// Checks whether the import map entry matches the query.
|
||||||
fn import_matches(
|
fn import_matches(&self, import: &ImportInfo, enforce_lowercase: bool) -> bool {
|
||||||
&self,
|
|
||||||
db: &dyn DefDatabase,
|
|
||||||
import: &ImportInfo,
|
|
||||||
enforce_lowercase: bool,
|
|
||||||
) -> bool {
|
|
||||||
let _p = profile::span("import_map::Query::import_matches");
|
let _p = profile::span("import_map::Query::import_matches");
|
||||||
|
|
||||||
// FIXME: Can we get rid of the alloc here?
|
// FIXME: Can we get rid of the alloc here?
|
||||||
let mut input = import.name.display(db.upcast()).to_string();
|
let input = import.name.to_smol_str();
|
||||||
|
let mut _s_slot;
|
||||||
let case_insensitive = enforce_lowercase || !self.case_sensitive;
|
let case_insensitive = enforce_lowercase || !self.case_sensitive;
|
||||||
if case_insensitive {
|
let input = if case_insensitive {
|
||||||
input.make_ascii_lowercase();
|
_s_slot = String::from(input);
|
||||||
}
|
_s_slot.make_ascii_lowercase();
|
||||||
|
&*_s_slot
|
||||||
|
} else {
|
||||||
|
&*input
|
||||||
|
};
|
||||||
|
|
||||||
let query_string = if case_insensitive { &self.lowercased } else { &self.query };
|
let query_string = if case_insensitive { &self.lowercased } else { &self.query };
|
||||||
|
|
||||||
|
@ -351,7 +369,7 @@ impl Query {
|
||||||
SearchMode::Fuzzy => {
|
SearchMode::Fuzzy => {
|
||||||
let mut input_chars = input.chars();
|
let mut input_chars = input.chars();
|
||||||
for query_char in query_string.chars() {
|
for query_char in query_string.chars() {
|
||||||
if input_chars.find(|&it| it == query_char).is_none() {
|
if !input_chars.any(|it| it == query_char) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -372,6 +390,7 @@ pub fn search_dependencies(
|
||||||
let _p = profile::span("search_dependencies").detail(|| format!("{query:?}"));
|
let _p = profile::span("search_dependencies").detail(|| format!("{query:?}"));
|
||||||
|
|
||||||
let graph = db.crate_graph();
|
let graph = db.crate_graph();
|
||||||
|
|
||||||
let import_maps: Vec<_> =
|
let import_maps: Vec<_> =
|
||||||
graph[krate].dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect();
|
graph[krate].dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect();
|
||||||
|
|
||||||
|
@ -386,22 +405,23 @@ pub fn search_dependencies(
|
||||||
|
|
||||||
let mut res = FxHashSet::default();
|
let mut res = FxHashSet::default();
|
||||||
let mut common_importable_data_scratch = vec![];
|
let mut common_importable_data_scratch = vec![];
|
||||||
|
// FIXME: Improve this, its rather unreadable and does duplicate amount of work
|
||||||
while let Some((_, indexed_values)) = stream.next() {
|
while let Some((_, indexed_values)) = stream.next() {
|
||||||
for &IndexedValue { index, value } in indexed_values {
|
for &IndexedValue { index, value } in indexed_values {
|
||||||
let import_map = &import_maps[index];
|
let import_map = &import_maps[index];
|
||||||
let importables @ [importable, ..] = &import_map.importables[value as usize..] else {
|
let importables @ [importable, ..] = &import_map.importables[value as usize..] else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
let &(ref importable_data, is_trait_assoc_item) = &import_map.map[importable];
|
let &(ref importable_data, is_trait_assoc_item) = &import_map.map[importable];
|
||||||
if !query.matches_assoc_mode(is_trait_assoc_item) {
|
if !query.matches_assoc_mode(is_trait_assoc_item) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Fetch all the known names of this importable item (to handle import aliases/renames)
|
||||||
common_importable_data_scratch.extend(
|
common_importable_data_scratch.extend(
|
||||||
importable_data
|
importable_data
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|&info| query.import_matches(db, info, true))
|
.filter(|&info| query.import_matches(info, true))
|
||||||
// Name shared by the importable items in this group.
|
// Name shared by the importable items in this group.
|
||||||
.map(|info| info.name.to_smol_str()),
|
.map(|info| info.name.to_smol_str()),
|
||||||
);
|
);
|
||||||
|
@ -415,6 +435,7 @@ pub fn search_dependencies(
|
||||||
common_importable_data_scratch.drain(..).flat_map(|common_importable_name| {
|
common_importable_data_scratch.drain(..).flat_map(|common_importable_name| {
|
||||||
// Add the items from this name group. Those are all subsequent items in
|
// Add the items from this name group. Those are all subsequent items in
|
||||||
// `importables` whose name match `common_importable_name`.
|
// `importables` whose name match `common_importable_name`.
|
||||||
|
|
||||||
importables
|
importables
|
||||||
.iter()
|
.iter()
|
||||||
.copied()
|
.copied()
|
||||||
|
@ -430,11 +451,8 @@ pub fn search_dependencies(
|
||||||
.filter(move |item| {
|
.filter(move |item| {
|
||||||
!query.case_sensitive || {
|
!query.case_sensitive || {
|
||||||
// we've already checked the common importables name case-insensitively
|
// we've already checked the common importables name case-insensitively
|
||||||
let &(ref import_infos, assoc_mode) = &import_map.map[item];
|
let &(ref import_infos, _) = &import_map.map[item];
|
||||||
query.matches_assoc_mode(assoc_mode)
|
import_infos.iter().any(|info| query.import_matches(info, false))
|
||||||
&& import_infos
|
|
||||||
.iter()
|
|
||||||
.any(|info| query.import_matches(db, info, false))
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
|
@ -106,11 +106,6 @@ impl ItemTree {
|
||||||
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
|
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
|
||||||
let _p = profile::span("file_item_tree_query").detail(|| format!("{file_id:?}"));
|
let _p = profile::span("file_item_tree_query").detail(|| format!("{file_id:?}"));
|
||||||
let syntax = db.parse_or_expand(file_id);
|
let syntax = db.parse_or_expand(file_id);
|
||||||
if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax)
|
|
||||||
{
|
|
||||||
// FIXME: not 100% sure why these crop up, but return an empty tree to avoid a panic
|
|
||||||
return Default::default();
|
|
||||||
}
|
|
||||||
|
|
||||||
let ctx = lower::Ctx::new(db, file_id);
|
let ctx = lower::Ctx::new(db, file_id);
|
||||||
let mut top_attrs = None;
|
let mut top_attrs = None;
|
||||||
|
@ -129,6 +124,9 @@ impl ItemTree {
|
||||||
ctx.lower_macro_stmts(stmts)
|
ctx.lower_macro_stmts(stmts)
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
|
if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax) {
|
||||||
|
return Default::default();
|
||||||
|
}
|
||||||
panic!("cannot create item tree for file {file_id:?} from {syntax:?} {syntax}");
|
panic!("cannot create item tree for file {file_id:?} from {syntax:?} {syntax}");
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -569,6 +569,8 @@ pub struct ConstBlockLoc {
|
||||||
pub root: hir::ExprId,
|
pub root: hir::ExprId,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Something that holds types, required for the current const arg lowering implementation as they
|
||||||
|
/// need to be able to query where they are defined.
|
||||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
|
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
|
||||||
pub enum TypeOwnerId {
|
pub enum TypeOwnerId {
|
||||||
FunctionId(FunctionId),
|
FunctionId(FunctionId),
|
||||||
|
@ -581,9 +583,6 @@ pub enum TypeOwnerId {
|
||||||
TypeAliasId(TypeAliasId),
|
TypeAliasId(TypeAliasId),
|
||||||
ImplId(ImplId),
|
ImplId(ImplId),
|
||||||
EnumVariantId(EnumVariantId),
|
EnumVariantId(EnumVariantId),
|
||||||
// FIXME(const-generic-body): ModuleId should not be a type owner. This needs to be fixed to make `TypeOwnerId` actually
|
|
||||||
// useful for assigning ids to in type consts.
|
|
||||||
ModuleId(ModuleId),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TypeOwnerId {
|
impl TypeOwnerId {
|
||||||
|
@ -597,9 +596,7 @@ impl TypeOwnerId {
|
||||||
TypeOwnerId::TypeAliasId(it) => GenericDefId::TypeAliasId(it),
|
TypeOwnerId::TypeAliasId(it) => GenericDefId::TypeAliasId(it),
|
||||||
TypeOwnerId::ImplId(it) => GenericDefId::ImplId(it),
|
TypeOwnerId::ImplId(it) => GenericDefId::ImplId(it),
|
||||||
TypeOwnerId::EnumVariantId(it) => GenericDefId::EnumVariantId(it),
|
TypeOwnerId::EnumVariantId(it) => GenericDefId::EnumVariantId(it),
|
||||||
TypeOwnerId::InTypeConstId(_) | TypeOwnerId::ModuleId(_) | TypeOwnerId::StaticId(_) => {
|
TypeOwnerId::InTypeConstId(_) | TypeOwnerId::StaticId(_) => return None,
|
||||||
return None
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -614,8 +611,7 @@ impl_from!(
|
||||||
TraitAliasId,
|
TraitAliasId,
|
||||||
TypeAliasId,
|
TypeAliasId,
|
||||||
ImplId,
|
ImplId,
|
||||||
EnumVariantId,
|
EnumVariantId
|
||||||
ModuleId
|
|
||||||
for TypeOwnerId
|
for TypeOwnerId
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -713,12 +709,15 @@ pub struct InTypeConstLoc {
|
||||||
pub id: AstId<ast::ConstArg>,
|
pub id: AstId<ast::ConstArg>,
|
||||||
/// The thing this const arg appears in
|
/// The thing this const arg appears in
|
||||||
pub owner: TypeOwnerId,
|
pub owner: TypeOwnerId,
|
||||||
pub thing: Box<dyn OpaqueInternableThing>,
|
// FIXME(const-generic-body): The expected type should not be
|
||||||
|
pub expected_ty: Box<dyn OpaqueInternableThing>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq for InTypeConstLoc {
|
impl PartialEq for InTypeConstLoc {
|
||||||
fn eq(&self, other: &Self) -> bool {
|
fn eq(&self, other: &Self) -> bool {
|
||||||
self.id == other.id && self.owner == other.owner && &*self.thing == &*other.thing
|
self.id == other.id
|
||||||
|
&& self.owner == other.owner
|
||||||
|
&& &*self.expected_ty == &*other.expected_ty
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1041,7 +1040,6 @@ impl HasModule for TypeOwnerId {
|
||||||
TypeOwnerId::TypeAliasId(it) => it.lookup(db).module(db),
|
TypeOwnerId::TypeAliasId(it) => it.lookup(db).module(db),
|
||||||
TypeOwnerId::ImplId(it) => it.lookup(db).container,
|
TypeOwnerId::ImplId(it) => it.lookup(db).container,
|
||||||
TypeOwnerId::EnumVariantId(it) => it.parent.lookup(db).container,
|
TypeOwnerId::EnumVariantId(it) => it.parent.lookup(db).container,
|
||||||
TypeOwnerId::ModuleId(it) => *it,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,37 +13,97 @@ fn test_vec() {
|
||||||
check(
|
check(
|
||||||
r#"
|
r#"
|
||||||
macro_rules! vec {
|
macro_rules! vec {
|
||||||
($($item:expr),*) => {{
|
() => (
|
||||||
let mut v = Vec::new();
|
$crate::__rust_force_expr!($crate::vec::Vec::new())
|
||||||
$( v.push($item); )*
|
);
|
||||||
v
|
($elem:expr; $n:expr) => (
|
||||||
}};
|
$crate::__rust_force_expr!($crate::vec::from_elem($elem, $n))
|
||||||
|
);
|
||||||
|
($($x:expr),+ $(,)?) => (
|
||||||
|
$crate::__rust_force_expr!(<[_]>::into_vec(
|
||||||
|
// This rustc_box is not required, but it produces a dramatic improvement in compile
|
||||||
|
// time when constructing arrays with many elements.
|
||||||
|
#[rustc_box]
|
||||||
|
$crate::boxed::Box::new([$($x),+])
|
||||||
|
))
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
macro_rules! __rust_force_expr {
|
||||||
|
($e:expr) => {
|
||||||
|
$e
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
vec!();
|
vec!();
|
||||||
vec![1u32,2];
|
vec![1u32,2];
|
||||||
|
vec![a.];
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
macro_rules! vec {
|
macro_rules! vec {
|
||||||
($($item:expr),*) => {{
|
() => (
|
||||||
let mut v = Vec::new();
|
$crate::__rust_force_expr!($crate::vec::Vec::new())
|
||||||
$( v.push($item); )*
|
);
|
||||||
v
|
($elem:expr; $n:expr) => (
|
||||||
}};
|
$crate::__rust_force_expr!($crate::vec::from_elem($elem, $n))
|
||||||
|
);
|
||||||
|
($($x:expr),+ $(,)?) => (
|
||||||
|
$crate::__rust_force_expr!(<[_]>::into_vec(
|
||||||
|
// This rustc_box is not required, but it produces a dramatic improvement in compile
|
||||||
|
// time when constructing arrays with many elements.
|
||||||
|
#[rustc_box]
|
||||||
|
$crate::boxed::Box::new([$($x),+])
|
||||||
|
))
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
macro_rules! __rust_force_expr {
|
||||||
|
($e:expr) => {
|
||||||
|
$e
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
{
|
$crate::__rust_force_expr!($crate:: vec:: Vec:: new());
|
||||||
let mut v = Vec::new();
|
$crate::__rust_force_expr!(<[_]>:: into_vec(#[rustc_box]$crate:: boxed:: Box:: new([1u32, 2])));
|
||||||
v
|
/* error: expected Expr */$crate::__rust_force_expr!($crate:: vec:: from_elem((a.), $n));
|
||||||
|
}
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
// FIXME we should ahev testing infra for multi level expansion tests
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
macro_rules! __rust_force_expr {
|
||||||
|
($e:expr) => {
|
||||||
|
$e
|
||||||
};
|
};
|
||||||
{
|
}
|
||||||
let mut v = Vec::new();
|
|
||||||
v.push(1u32);
|
fn main() {
|
||||||
v.push(2);
|
__rust_force_expr!(crate:: vec:: Vec:: new());
|
||||||
v
|
__rust_force_expr!(<[_]>:: into_vec(#[rustc_box] crate:: boxed:: Box:: new([1u32, 2])));
|
||||||
|
__rust_force_expr/*+errors*/!(crate:: vec:: from_elem((a.), $n));
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
macro_rules! __rust_force_expr {
|
||||||
|
($e:expr) => {
|
||||||
|
$e
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
(crate ::vec::Vec::new());
|
||||||
|
(<[_]>::into_vec(#[rustc_box] crate ::boxed::Box::new([1u32, 2])));
|
||||||
|
/* error: expected Expr *//* parse error: expected field name or number */
|
||||||
|
/* parse error: expected expression */
|
||||||
|
/* parse error: expected R_PAREN */
|
||||||
|
/* parse error: expected COMMA */
|
||||||
|
/* parse error: expected expression, item or let statement */
|
||||||
|
(crate ::vec::from_elem((a.), $n));
|
||||||
|
}
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -589,6 +589,16 @@ impl Resolver {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn type_owner(&self) -> Option<TypeOwnerId> {
|
||||||
|
self.scopes().find_map(|scope| match scope {
|
||||||
|
Scope::BlockScope(_) => None,
|
||||||
|
&Scope::GenericParams { def, .. } => Some(def.into()),
|
||||||
|
&Scope::ImplDefScope(id) => Some(id.into()),
|
||||||
|
&Scope::AdtScope(adt) => Some(adt.into()),
|
||||||
|
Scope::ExprScope(it) => Some(it.owner.into()),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
pub fn impl_def(&self) -> Option<ImplId> {
|
pub fn impl_def(&self) -> Option<ImplId> {
|
||||||
self.scopes().find_map(|scope| match scope {
|
self.scopes().find_map(|scope| match scope {
|
||||||
Scope::ImplDefScope(def) => Some(*def),
|
Scope::ImplDefScope(def) => Some(*def),
|
||||||
|
@ -1079,7 +1089,6 @@ impl HasResolver for TypeOwnerId {
|
||||||
TypeOwnerId::TypeAliasId(it) => it.resolver(db),
|
TypeOwnerId::TypeAliasId(it) => it.resolver(db),
|
||||||
TypeOwnerId::ImplId(it) => it.resolver(db),
|
TypeOwnerId::ImplId(it) => it.resolver(db),
|
||||||
TypeOwnerId::EnumVariantId(it) => it.resolver(db),
|
TypeOwnerId::EnumVariantId(it) => it.resolver(db),
|
||||||
TypeOwnerId::ModuleId(it) => it.resolver(db),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,17 +10,17 @@ use limit::Limit;
|
||||||
use mbe::{syntax_node_to_token_tree, ValueResult};
|
use mbe::{syntax_node_to_token_tree, ValueResult};
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasAttrs, HasDocComments},
|
ast::{self, HasAttrs},
|
||||||
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
|
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
|
||||||
};
|
};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast_id_map::AstIdMap,
|
ast_id_map::AstIdMap,
|
||||||
attrs::RawAttrs,
|
attrs::{collect_attrs, RawAttrs},
|
||||||
builtin_attr_macro::pseudo_derive_attr_expansion,
|
builtin_attr_macro::pseudo_derive_attr_expansion,
|
||||||
builtin_fn_macro::EagerExpander,
|
builtin_fn_macro::EagerExpander,
|
||||||
fixup::{self, SyntaxFixupUndoInfo},
|
fixup::{self, reverse_fixups, SyntaxFixupUndoInfo},
|
||||||
hygiene::{apply_mark, SyntaxContextData, Transparency},
|
hygiene::{apply_mark, SyntaxContextData, Transparency},
|
||||||
span::{RealSpanMap, SpanMap, SpanMapRef},
|
span::{RealSpanMap, SpanMap, SpanMapRef},
|
||||||
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
|
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
|
||||||
|
@ -216,9 +216,9 @@ pub fn expand_speculative(
|
||||||
// Attributes may have an input token tree, build the subtree and map for this as well
|
// Attributes may have an input token tree, build the subtree and map for this as well
|
||||||
// then try finding a token id for our token if it is inside this input subtree.
|
// then try finding a token id for our token if it is inside this input subtree.
|
||||||
let item = ast::Item::cast(speculative_args.clone())?;
|
let item = ast::Item::cast(speculative_args.clone())?;
|
||||||
item.doc_comments_and_attrs()
|
collect_attrs(&item)
|
||||||
.nth(invoc_attr_index.ast_index())
|
.nth(invoc_attr_index.ast_index())
|
||||||
.and_then(Either::left)
|
.and_then(|x| Either::left(x.1))
|
||||||
}?;
|
}?;
|
||||||
match attr.token_tree() {
|
match attr.token_tree() {
|
||||||
Some(token_tree) => {
|
Some(token_tree) => {
|
||||||
|
@ -421,6 +421,15 @@ fn macro_arg(
|
||||||
syntax::NodeOrToken::Token(_) => true,
|
syntax::NodeOrToken::Token(_) => true,
|
||||||
});
|
});
|
||||||
fixups.remove.extend(censor);
|
fixups.remove.extend(censor);
|
||||||
|
{
|
||||||
|
let mut tt = mbe::syntax_node_to_token_tree_modified(
|
||||||
|
&syntax,
|
||||||
|
map.as_ref(),
|
||||||
|
fixups.append.clone(),
|
||||||
|
fixups.remove.clone(),
|
||||||
|
);
|
||||||
|
reverse_fixups(&mut tt, &fixups.undo_info);
|
||||||
|
}
|
||||||
(
|
(
|
||||||
mbe::syntax_node_to_token_tree_modified(
|
mbe::syntax_node_to_token_tree_modified(
|
||||||
&syntax,
|
&syntax,
|
||||||
|
@ -479,10 +488,9 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
|
||||||
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => return None,
|
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => return None,
|
||||||
MacroCallKind::Attr { invoc_attr_index, .. } => {
|
MacroCallKind::Attr { invoc_attr_index, .. } => {
|
||||||
cov_mark::hit!(attribute_macro_attr_censoring);
|
cov_mark::hit!(attribute_macro_attr_censoring);
|
||||||
ast::Item::cast(node.clone())?
|
collect_attrs(&ast::Item::cast(node.clone())?)
|
||||||
.doc_comments_and_attrs()
|
|
||||||
.nth(invoc_attr_index.ast_index())
|
.nth(invoc_attr_index.ast_index())
|
||||||
.and_then(Either::left)
|
.and_then(|x| Either::left(x.1))
|
||||||
.map(|attr| attr.syntax().clone())
|
.map(|attr| attr.syntax().clone())
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.collect()
|
.collect()
|
||||||
|
|
|
@ -88,7 +88,7 @@ pub fn expand_eager_macro_input(
|
||||||
let loc = MacroCallLoc {
|
let loc = MacroCallLoc {
|
||||||
def,
|
def,
|
||||||
krate,
|
krate,
|
||||||
eager: Some(Box::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })),
|
eager: Some(Arc::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })),
|
||||||
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
|
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
|
||||||
call_site,
|
call_site,
|
||||||
};
|
};
|
||||||
|
|
|
@ -8,12 +8,13 @@ use base_db::{
|
||||||
use la_arena::RawIdx;
|
use la_arena::RawIdx;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
|
use stdx::never;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AstNode, HasLoopBody},
|
ast::{self, AstNode, HasLoopBody},
|
||||||
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
|
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
|
||||||
};
|
};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
use tt::Spacing;
|
use tt::{Spacing, Span};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
span::SpanMapRef,
|
span::SpanMapRef,
|
||||||
|
@ -45,19 +46,20 @@ impl SyntaxFixupUndoInfo {
|
||||||
// replacement -> censor + append
|
// replacement -> censor + append
|
||||||
// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
|
// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
|
||||||
// to remove later
|
// to remove later
|
||||||
|
const FIXUP_DUMMY_FILE: FileId = FileId::from_raw(FileId::MAX_FILE_ID);
|
||||||
|
const FIXUP_DUMMY_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(RawIdx::from_u32(!0));
|
||||||
|
const FIXUP_DUMMY_RANGE: TextRange = TextRange::empty(TextSize::new(0));
|
||||||
|
const FIXUP_DUMMY_RANGE_END: TextSize = TextSize::new(!0);
|
||||||
|
|
||||||
pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups {
|
pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups {
|
||||||
let mut append = FxHashMap::<SyntaxElement, _>::default();
|
let mut append = FxHashMap::<SyntaxElement, _>::default();
|
||||||
let mut remove = FxHashSet::<SyntaxNode>::default();
|
let mut remove = FxHashSet::<SyntaxNode>::default();
|
||||||
let mut preorder = node.preorder();
|
let mut preorder = node.preorder();
|
||||||
let mut original = Vec::new();
|
let mut original = Vec::new();
|
||||||
let dummy_range = TextRange::empty(TextSize::new(0));
|
let dummy_range = FIXUP_DUMMY_RANGE;
|
||||||
// we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as
|
// we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as
|
||||||
// the index into the replacement vec but only if the end points to !0
|
// the index into the replacement vec but only if the end points to !0
|
||||||
let dummy_anchor = SpanAnchor {
|
let dummy_anchor = SpanAnchor { file_id: FIXUP_DUMMY_FILE, ast_id: FIXUP_DUMMY_AST_ID };
|
||||||
file_id: FileId::from_raw(!0),
|
|
||||||
ast_id: ErasedFileAstId::from_raw(RawIdx::from(!0)),
|
|
||||||
};
|
|
||||||
let fake_span = |range| SpanData {
|
let fake_span = |range| SpanData {
|
||||||
range: dummy_range,
|
range: dummy_range,
|
||||||
anchor: dummy_anchor,
|
anchor: dummy_anchor,
|
||||||
|
@ -76,7 +78,7 @@ pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> Synta
|
||||||
let replacement = Leaf::Ident(Ident {
|
let replacement = Leaf::Ident(Ident {
|
||||||
text: "__ra_fixup".into(),
|
text: "__ra_fixup".into(),
|
||||||
span: SpanData {
|
span: SpanData {
|
||||||
range: TextRange::new(TextSize::new(idx), TextSize::new(!0)),
|
range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END),
|
||||||
anchor: dummy_anchor,
|
anchor: dummy_anchor,
|
||||||
ctx: span_map.span_for_range(node_range).ctx,
|
ctx: span_map.span_for_range(node_range).ctx,
|
||||||
},
|
},
|
||||||
|
@ -299,6 +301,13 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
|
||||||
pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
|
pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
|
||||||
let Some(undo_info) = undo_info.original.as_deref() else { return };
|
let Some(undo_info) = undo_info.original.as_deref() else { return };
|
||||||
let undo_info = &**undo_info;
|
let undo_info = &**undo_info;
|
||||||
|
if never!(
|
||||||
|
tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE
|
||||||
|
|| tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE
|
||||||
|
) {
|
||||||
|
tt.delimiter.close = SpanData::DUMMY;
|
||||||
|
tt.delimiter.open = SpanData::DUMMY;
|
||||||
|
}
|
||||||
reverse_fixups_(tt, undo_info);
|
reverse_fixups_(tt, undo_info);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -310,17 +319,28 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
|
||||||
.filter(|tt| match tt {
|
.filter(|tt| match tt {
|
||||||
tt::TokenTree::Leaf(leaf) => {
|
tt::TokenTree::Leaf(leaf) => {
|
||||||
let span = leaf.span();
|
let span = leaf.span();
|
||||||
span.anchor.file_id != FileId::from_raw(!0) || span.range.end() == TextSize::new(!0)
|
let is_real_leaf = span.anchor.file_id != FIXUP_DUMMY_FILE;
|
||||||
|
let is_replaced_node = span.range.end() == FIXUP_DUMMY_RANGE_END;
|
||||||
|
is_real_leaf || is_replaced_node
|
||||||
}
|
}
|
||||||
tt::TokenTree::Subtree(_) => true,
|
tt::TokenTree::Subtree(_) => true,
|
||||||
})
|
})
|
||||||
.flat_map(|tt| match tt {
|
.flat_map(|tt| match tt {
|
||||||
tt::TokenTree::Subtree(mut tt) => {
|
tt::TokenTree::Subtree(mut tt) => {
|
||||||
|
if tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE
|
||||||
|
|| tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE
|
||||||
|
{
|
||||||
|
// Even though fixup never creates subtrees with fixup spans, the old proc-macro server
|
||||||
|
// might copy them if the proc-macro asks for it, so we need to filter those out
|
||||||
|
// here as well.
|
||||||
|
return SmallVec::new_const();
|
||||||
|
}
|
||||||
reverse_fixups_(&mut tt, undo_info);
|
reverse_fixups_(&mut tt, undo_info);
|
||||||
SmallVec::from_const([tt.into()])
|
SmallVec::from_const([tt.into()])
|
||||||
}
|
}
|
||||||
tt::TokenTree::Leaf(leaf) => {
|
tt::TokenTree::Leaf(leaf) => {
|
||||||
if leaf.span().anchor.file_id == FileId::from_raw(!0) {
|
if leaf.span().anchor.file_id == FIXUP_DUMMY_FILE {
|
||||||
|
// we have a fake node here, we need to replace it again with the original
|
||||||
let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone();
|
let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone();
|
||||||
if original.delimiter.kind == tt::DelimiterKind::Invisible {
|
if original.delimiter.kind == tt::DelimiterKind::Invisible {
|
||||||
original.token_trees.into()
|
original.token_trees.into()
|
||||||
|
@ -328,6 +348,7 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
|
||||||
SmallVec::from_const([original.into()])
|
SmallVec::from_const([original.into()])
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
// just a normal leaf
|
||||||
SmallVec::from_const([leaf.into()])
|
SmallVec::from_const([leaf.into()])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,7 @@ pub mod span;
|
||||||
pub mod files;
|
pub mod files;
|
||||||
mod fixup;
|
mod fixup;
|
||||||
|
|
||||||
|
use attrs::collect_attrs;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use std::{fmt, hash::Hash};
|
use std::{fmt, hash::Hash};
|
||||||
|
@ -32,7 +33,7 @@ use base_db::{
|
||||||
};
|
};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AstNode, HasDocComments},
|
ast::{self, AstNode},
|
||||||
SyntaxNode, SyntaxToken, TextRange, TextSize,
|
SyntaxNode, SyntaxToken, TextRange, TextSize,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -116,7 +117,7 @@ pub struct MacroCallLoc {
|
||||||
pub krate: CrateId,
|
pub krate: CrateId,
|
||||||
/// Some if this is a macro call for an eager macro. Note that this is `None`
|
/// Some if this is a macro call for an eager macro. Note that this is `None`
|
||||||
/// for the eager input macro file.
|
/// for the eager input macro file.
|
||||||
eager: Option<Box<EagerCallInfo>>,
|
eager: Option<Arc<EagerCallInfo>>,
|
||||||
pub kind: MacroCallKind,
|
pub kind: MacroCallKind,
|
||||||
pub call_site: SyntaxContextId,
|
pub call_site: SyntaxContextId,
|
||||||
}
|
}
|
||||||
|
@ -438,9 +439,9 @@ impl MacroCallLoc {
|
||||||
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
|
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
|
||||||
// FIXME: handle `cfg_attr`
|
// FIXME: handle `cfg_attr`
|
||||||
ast_id.with_value(ast_id.to_node(db)).map(|it| {
|
ast_id.with_value(ast_id.to_node(db)).map(|it| {
|
||||||
it.doc_comments_and_attrs()
|
collect_attrs(&it)
|
||||||
.nth(derive_attr_index.ast_index())
|
.nth(derive_attr_index.ast_index())
|
||||||
.and_then(|it| match it {
|
.and_then(|it| match it.1 {
|
||||||
Either::Left(attr) => Some(attr.syntax().clone()),
|
Either::Left(attr) => Some(attr.syntax().clone()),
|
||||||
Either::Right(_) => None,
|
Either::Right(_) => None,
|
||||||
})
|
})
|
||||||
|
@ -451,9 +452,9 @@ impl MacroCallLoc {
|
||||||
if self.def.is_attribute_derive() {
|
if self.def.is_attribute_derive() {
|
||||||
// FIXME: handle `cfg_attr`
|
// FIXME: handle `cfg_attr`
|
||||||
ast_id.with_value(ast_id.to_node(db)).map(|it| {
|
ast_id.with_value(ast_id.to_node(db)).map(|it| {
|
||||||
it.doc_comments_and_attrs()
|
collect_attrs(&it)
|
||||||
.nth(invoc_attr_index.ast_index())
|
.nth(invoc_attr_index.ast_index())
|
||||||
.and_then(|it| match it {
|
.and_then(|it| match it.1 {
|
||||||
Either::Left(attr) => Some(attr.syntax().clone()),
|
Either::Left(attr) => Some(attr.syntax().clone()),
|
||||||
Either::Right(_) => None,
|
Either::Right(_) => None,
|
||||||
})
|
})
|
||||||
|
@ -549,24 +550,24 @@ impl MacroCallKind {
|
||||||
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
|
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
|
||||||
// FIXME: should be the range of the macro name, not the whole derive
|
// FIXME: should be the range of the macro name, not the whole derive
|
||||||
// FIXME: handle `cfg_attr`
|
// FIXME: handle `cfg_attr`
|
||||||
ast_id
|
collect_attrs(&ast_id.to_node(db))
|
||||||
.to_node(db)
|
|
||||||
.doc_comments_and_attrs()
|
|
||||||
.nth(derive_attr_index.ast_index())
|
.nth(derive_attr_index.ast_index())
|
||||||
.expect("missing derive")
|
.expect("missing derive")
|
||||||
|
.1
|
||||||
.expect_left("derive is a doc comment?")
|
.expect_left("derive is a doc comment?")
|
||||||
.syntax()
|
.syntax()
|
||||||
.text_range()
|
.text_range()
|
||||||
}
|
}
|
||||||
// FIXME: handle `cfg_attr`
|
// FIXME: handle `cfg_attr`
|
||||||
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => ast_id
|
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
|
||||||
.to_node(db)
|
collect_attrs(&ast_id.to_node(db))
|
||||||
.doc_comments_and_attrs()
|
.nth(invoc_attr_index.ast_index())
|
||||||
.nth(invoc_attr_index.ast_index())
|
.expect("missing attribute")
|
||||||
.expect("missing attribute")
|
.1
|
||||||
.expect_left("attribute macro is a doc comment?")
|
.expect_left("attribute macro is a doc comment?")
|
||||||
.syntax()
|
.syntax()
|
||||||
.text_range(),
|
.text_range()
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
FileRange { range, file_id }
|
FileRange { range, file_id }
|
||||||
|
@ -737,11 +738,9 @@ impl ExpansionInfo {
|
||||||
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
|
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
|
||||||
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
|
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
|
||||||
// FIXME: handle `cfg_attr`
|
// FIXME: handle `cfg_attr`
|
||||||
let tt = ast_id
|
let tt = collect_attrs(&ast_id.to_node(db))
|
||||||
.to_node(db)
|
|
||||||
.doc_comments_and_attrs()
|
|
||||||
.nth(invoc_attr_index.ast_index())
|
.nth(invoc_attr_index.ast_index())
|
||||||
.and_then(Either::left)?
|
.and_then(|x| Either::left(x.1))?
|
||||||
.token_tree()?;
|
.token_tree()?;
|
||||||
Some(InFile::new(ast_id.file_id, tt))
|
Some(InFile::new(ast_id.file_id, tt))
|
||||||
}
|
}
|
||||||
|
|
|
@ -75,27 +75,40 @@ pub struct RealSpanMap {
|
||||||
/// Invariant: Sorted vec over TextSize
|
/// Invariant: Sorted vec over TextSize
|
||||||
// FIXME: SortedVec<(TextSize, ErasedFileAstId)>?
|
// FIXME: SortedVec<(TextSize, ErasedFileAstId)>?
|
||||||
pairs: Box<[(TextSize, ErasedFileAstId)]>,
|
pairs: Box<[(TextSize, ErasedFileAstId)]>,
|
||||||
|
end: TextSize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RealSpanMap {
|
impl RealSpanMap {
|
||||||
/// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id).
|
/// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id).
|
||||||
pub fn absolute(file_id: FileId) -> Self {
|
pub fn absolute(file_id: FileId) -> Self {
|
||||||
RealSpanMap { file_id, pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]) }
|
RealSpanMap {
|
||||||
|
file_id,
|
||||||
|
pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]),
|
||||||
|
end: TextSize::new(!0),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_file(db: &dyn ExpandDatabase, file_id: FileId) -> Self {
|
pub fn from_file(db: &dyn ExpandDatabase, file_id: FileId) -> Self {
|
||||||
let mut pairs = vec![(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)];
|
let mut pairs = vec![(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)];
|
||||||
let ast_id_map = db.ast_id_map(file_id.into());
|
let ast_id_map = db.ast_id_map(file_id.into());
|
||||||
pairs.extend(
|
let tree = db.parse(file_id).tree();
|
||||||
db.parse(file_id)
|
pairs
|
||||||
.tree()
|
.extend(tree.items().map(|item| {
|
||||||
.items()
|
(item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())
|
||||||
.map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())),
|
}));
|
||||||
);
|
RealSpanMap {
|
||||||
RealSpanMap { file_id, pairs: pairs.into_boxed_slice() }
|
file_id,
|
||||||
|
pairs: pairs.into_boxed_slice(),
|
||||||
|
end: tree.syntax().text_range().end(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn span_for_range(&self, range: TextRange) -> SpanData {
|
pub fn span_for_range(&self, range: TextRange) -> SpanData {
|
||||||
|
assert!(
|
||||||
|
range.end() <= self.end,
|
||||||
|
"range {range:?} goes beyond the end of the file {:?}",
|
||||||
|
self.end
|
||||||
|
);
|
||||||
let start = range.start();
|
let start = range.start();
|
||||||
let idx = self
|
let idx = self
|
||||||
.pairs
|
.pairs
|
||||||
|
|
|
@ -113,7 +113,7 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
|
||||||
// FIXME(const-generic-body): We should not get the return type in this way.
|
// FIXME(const-generic-body): We should not get the return type in this way.
|
||||||
ctx.return_ty = c
|
ctx.return_ty = c
|
||||||
.lookup(db.upcast())
|
.lookup(db.upcast())
|
||||||
.thing
|
.expected_ty
|
||||||
.box_any()
|
.box_any()
|
||||||
.downcast::<InTypeConstIdMetadata>()
|
.downcast::<InTypeConstIdMetadata>()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
|
|
@ -262,7 +262,7 @@ impl InferenceContext<'_> {
|
||||||
fn infer_pat(&mut self, pat: PatId, expected: &Ty, mut default_bm: BindingMode) -> Ty {
|
fn infer_pat(&mut self, pat: PatId, expected: &Ty, mut default_bm: BindingMode) -> Ty {
|
||||||
let mut expected = self.resolve_ty_shallow(expected);
|
let mut expected = self.resolve_ty_shallow(expected);
|
||||||
|
|
||||||
if is_non_ref_pat(self.body, pat) {
|
if self.is_non_ref_pat(self.body, pat) {
|
||||||
let mut pat_adjustments = Vec::new();
|
let mut pat_adjustments = Vec::new();
|
||||||
while let Some((inner, _lifetime, mutability)) = expected.as_reference() {
|
while let Some((inner, _lifetime, mutability)) = expected.as_reference() {
|
||||||
pat_adjustments.push(expected.clone());
|
pat_adjustments.push(expected.clone());
|
||||||
|
@ -496,24 +496,28 @@ impl InferenceContext<'_> {
|
||||||
|
|
||||||
self.infer_expr(expr, &Expectation::has_type(expected.clone()))
|
self.infer_expr(expr, &Expectation::has_type(expected.clone()))
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
|
fn is_non_ref_pat(&mut self, body: &hir_def::body::Body, pat: PatId) -> bool {
|
||||||
match &body[pat] {
|
match &body[pat] {
|
||||||
Pat::Tuple { .. }
|
Pat::Tuple { .. }
|
||||||
| Pat::TupleStruct { .. }
|
| Pat::TupleStruct { .. }
|
||||||
| Pat::Record { .. }
|
| Pat::Record { .. }
|
||||||
| Pat::Range { .. }
|
| Pat::Range { .. }
|
||||||
| Pat::Slice { .. } => true,
|
| Pat::Slice { .. } => true,
|
||||||
Pat::Or(pats) => pats.iter().all(|p| is_non_ref_pat(body, *p)),
|
Pat::Or(pats) => pats.iter().all(|p| self.is_non_ref_pat(body, *p)),
|
||||||
// FIXME: ConstBlock/Path/Lit might actually evaluate to ref, but inference is unimplemented.
|
Pat::Path(p) => {
|
||||||
Pat::Path(..) => true,
|
let v = self.resolve_value_path_inner(p, pat.into());
|
||||||
Pat::ConstBlock(..) => true,
|
v.is_some_and(|x| !matches!(x.0, hir_def::resolver::ValueNs::ConstId(_)))
|
||||||
Pat::Lit(expr) => !matches!(
|
}
|
||||||
body[*expr],
|
Pat::ConstBlock(..) => false,
|
||||||
Expr::Literal(Literal::String(..) | Literal::CString(..) | Literal::ByteString(..))
|
Pat::Lit(expr) => !matches!(
|
||||||
),
|
body[*expr],
|
||||||
Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => false,
|
Expr::Literal(Literal::String(..) | Literal::CString(..) | Literal::ByteString(..))
|
||||||
|
),
|
||||||
|
Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -40,33 +40,7 @@ impl InferenceContext<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<ValuePathResolution> {
|
fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<ValuePathResolution> {
|
||||||
let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
|
let (value, self_subst) = self.resolve_value_path_inner(path, id)?;
|
||||||
let last = path.segments().last()?;
|
|
||||||
|
|
||||||
// Don't use `self.make_ty()` here as we need `orig_ns`.
|
|
||||||
let ctx =
|
|
||||||
crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into());
|
|
||||||
let (ty, orig_ns) = ctx.lower_ty_ext(type_ref);
|
|
||||||
let ty = self.table.insert_type_vars(ty);
|
|
||||||
let ty = self.table.normalize_associated_types_in(ty);
|
|
||||||
|
|
||||||
let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
|
|
||||||
let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty);
|
|
||||||
let ty = self.table.insert_type_vars(ty);
|
|
||||||
let ty = self.table.normalize_associated_types_in(ty);
|
|
||||||
self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))?
|
|
||||||
} else {
|
|
||||||
// FIXME: report error, unresolved first path segment
|
|
||||||
let value_or_partial =
|
|
||||||
self.resolver.resolve_path_in_value_ns(self.db.upcast(), path)?;
|
|
||||||
|
|
||||||
match value_or_partial {
|
|
||||||
ResolveValueResult::ValueNs(it, _) => (it, None),
|
|
||||||
ResolveValueResult::Partial(def, remaining_index, _) => self
|
|
||||||
.resolve_assoc_item(def, path, remaining_index, id)
|
|
||||||
.map(|(it, substs)| (it, Some(substs)))?,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let value_def = match value {
|
let value_def = match value {
|
||||||
ValueNs::LocalBinding(pat) => match self.result.type_of_binding.get(pat) {
|
ValueNs::LocalBinding(pat) => match self.result.type_of_binding.get(pat) {
|
||||||
|
@ -144,6 +118,41 @@ impl InferenceContext<'_> {
|
||||||
Some(ValuePathResolution::GenericDef(value_def, generic_def, substs))
|
Some(ValuePathResolution::GenericDef(value_def, generic_def, substs))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(super) fn resolve_value_path_inner(
|
||||||
|
&mut self,
|
||||||
|
path: &Path,
|
||||||
|
id: ExprOrPatId,
|
||||||
|
) -> Option<(ValueNs, Option<chalk_ir::Substitution<Interner>>)> {
|
||||||
|
let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
|
||||||
|
let last = path.segments().last()?;
|
||||||
|
|
||||||
|
// Don't use `self.make_ty()` here as we need `orig_ns`.
|
||||||
|
let ctx =
|
||||||
|
crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into());
|
||||||
|
let (ty, orig_ns) = ctx.lower_ty_ext(type_ref);
|
||||||
|
let ty = self.table.insert_type_vars(ty);
|
||||||
|
let ty = self.table.normalize_associated_types_in(ty);
|
||||||
|
|
||||||
|
let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
|
||||||
|
let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty);
|
||||||
|
let ty = self.table.insert_type_vars(ty);
|
||||||
|
let ty = self.table.normalize_associated_types_in(ty);
|
||||||
|
self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))?
|
||||||
|
} else {
|
||||||
|
// FIXME: report error, unresolved first path segment
|
||||||
|
let value_or_partial =
|
||||||
|
self.resolver.resolve_path_in_value_ns(self.db.upcast(), path)?;
|
||||||
|
|
||||||
|
match value_or_partial {
|
||||||
|
ResolveValueResult::ValueNs(it, _) => (it, None),
|
||||||
|
ResolveValueResult::Partial(def, remaining_index, _) => self
|
||||||
|
.resolve_assoc_item(def, path, remaining_index, id)
|
||||||
|
.map(|(it, substs)| (it, Some(substs)))?,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Some((value, self_subst))
|
||||||
|
}
|
||||||
|
|
||||||
fn add_required_obligations_for_value_path(&mut self, def: GenericDefId, subst: &Substitution) {
|
fn add_required_obligations_for_value_path(&mut self, def: GenericDefId, subst: &Substitution) {
|
||||||
let predicates = self.db.generic_predicates(def);
|
let predicates = self.db.generic_predicates(def);
|
||||||
for predicate in predicates.iter() {
|
for predicate in predicates.iter() {
|
||||||
|
|
|
@ -113,7 +113,9 @@ pub struct TyLoweringContext<'a> {
|
||||||
pub db: &'a dyn HirDatabase,
|
pub db: &'a dyn HirDatabase,
|
||||||
resolver: &'a Resolver,
|
resolver: &'a Resolver,
|
||||||
in_binders: DebruijnIndex,
|
in_binders: DebruijnIndex,
|
||||||
owner: TypeOwnerId,
|
// FIXME: Should not be an `Option` but `Resolver` currently does not return owners in all cases
|
||||||
|
// where expected
|
||||||
|
owner: Option<TypeOwnerId>,
|
||||||
/// Note: Conceptually, it's thinkable that we could be in a location where
|
/// Note: Conceptually, it's thinkable that we could be in a location where
|
||||||
/// some type params should be represented as placeholders, and others
|
/// some type params should be represented as placeholders, and others
|
||||||
/// should be converted to variables. I think in practice, this isn't
|
/// should be converted to variables. I think in practice, this isn't
|
||||||
|
@ -127,6 +129,14 @@ pub struct TyLoweringContext<'a> {
|
||||||
|
|
||||||
impl<'a> TyLoweringContext<'a> {
|
impl<'a> TyLoweringContext<'a> {
|
||||||
pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver, owner: TypeOwnerId) -> Self {
|
pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver, owner: TypeOwnerId) -> Self {
|
||||||
|
Self::new_maybe_unowned(db, resolver, Some(owner))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new_maybe_unowned(
|
||||||
|
db: &'a dyn HirDatabase,
|
||||||
|
resolver: &'a Resolver,
|
||||||
|
owner: Option<TypeOwnerId>,
|
||||||
|
) -> Self {
|
||||||
let impl_trait_mode = ImplTraitLoweringState::Disallowed;
|
let impl_trait_mode = ImplTraitLoweringState::Disallowed;
|
||||||
let type_param_mode = ParamLoweringMode::Placeholder;
|
let type_param_mode = ParamLoweringMode::Placeholder;
|
||||||
let in_binders = DebruijnIndex::INNERMOST;
|
let in_binders = DebruijnIndex::INNERMOST;
|
||||||
|
@ -213,10 +223,11 @@ impl<'a> TyLoweringContext<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lower_const(&self, const_ref: &ConstRef, const_type: Ty) -> Const {
|
pub fn lower_const(&self, const_ref: &ConstRef, const_type: Ty) -> Const {
|
||||||
|
let Some(owner) = self.owner else { return unknown_const(const_type) };
|
||||||
const_or_path_to_chalk(
|
const_or_path_to_chalk(
|
||||||
self.db,
|
self.db,
|
||||||
self.resolver,
|
self.resolver,
|
||||||
self.owner,
|
owner,
|
||||||
const_type,
|
const_type,
|
||||||
const_ref,
|
const_ref,
|
||||||
self.type_param_mode,
|
self.type_param_mode,
|
||||||
|
@ -1768,10 +1779,11 @@ fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
|
||||||
let resolver = t.resolver(db.upcast());
|
let resolver = t.resolver(db.upcast());
|
||||||
let ctx = TyLoweringContext::new(db, &resolver, t.into())
|
let ctx = TyLoweringContext::new(db, &resolver, t.into())
|
||||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||||
if db.type_alias_data(t).is_extern {
|
let type_alias_data = db.type_alias_data(t);
|
||||||
|
if type_alias_data.is_extern {
|
||||||
Binders::empty(Interner, TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner))
|
Binders::empty(Interner, TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner))
|
||||||
} else {
|
} else {
|
||||||
let type_ref = &db.type_alias_data(t).type_ref;
|
let type_ref = &type_alias_data.type_ref;
|
||||||
let inner = ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error));
|
let inner = ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error));
|
||||||
make_binders(db, &generics, inner)
|
make_binders(db, &generics, inner)
|
||||||
}
|
}
|
||||||
|
@ -2042,7 +2054,7 @@ pub(crate) fn const_or_path_to_chalk(
|
||||||
.intern_in_type_const(InTypeConstLoc {
|
.intern_in_type_const(InTypeConstLoc {
|
||||||
id: it,
|
id: it,
|
||||||
owner,
|
owner,
|
||||||
thing: Box::new(InTypeConstIdMetadata(expected_ty.clone())),
|
expected_ty: Box::new(InTypeConstIdMetadata(expected_ty.clone())),
|
||||||
})
|
})
|
||||||
.into();
|
.into();
|
||||||
intern_const_scalar(
|
intern_const_scalar(
|
||||||
|
|
|
@ -9,11 +9,10 @@ use super::visit_module;
|
||||||
fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
||||||
let (mut db, pos) = TestDB::with_position(
|
let (mut db, pos) = TestDB::with_position(
|
||||||
"
|
"
|
||||||
//- /lib.rs
|
//- /lib.rs
|
||||||
fn foo() -> i32 {
|
fn foo() -> i32 {
|
||||||
$01 + 1
|
$01 + 1
|
||||||
}
|
}",
|
||||||
",
|
|
||||||
);
|
);
|
||||||
{
|
{
|
||||||
let events = db.log_executed(|| {
|
let events = db.log_executed(|| {
|
||||||
|
@ -27,12 +26,11 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
||||||
}
|
}
|
||||||
|
|
||||||
let new_text = "
|
let new_text = "
|
||||||
fn foo() -> i32 {
|
fn foo() -> i32 {
|
||||||
1
|
1
|
||||||
+
|
+
|
||||||
1
|
1
|
||||||
}
|
}";
|
||||||
";
|
|
||||||
|
|
||||||
db.set_file_text(pos.file_id, Arc::from(new_text));
|
db.set_file_text(pos.file_id, Arc::from(new_text));
|
||||||
|
|
||||||
|
@ -47,3 +45,55 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
||||||
assert!(!format!("{events:?}").contains("infer"), "{events:#?}")
|
assert!(!format!("{events:?}").contains("infer"), "{events:#?}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn typing_inside_a_function_should_not_invalidate_types_in_another() {
|
||||||
|
let (mut db, pos) = TestDB::with_position(
|
||||||
|
"
|
||||||
|
//- /lib.rs
|
||||||
|
fn foo() -> f32 {
|
||||||
|
1.0 + 2.0
|
||||||
|
}
|
||||||
|
fn bar() -> i32 {
|
||||||
|
$01 + 1
|
||||||
|
}
|
||||||
|
fn baz() -> i32 {
|
||||||
|
1 + 1
|
||||||
|
}",
|
||||||
|
);
|
||||||
|
{
|
||||||
|
let events = db.log_executed(|| {
|
||||||
|
let module = db.module_for_file(pos.file_id);
|
||||||
|
let crate_def_map = module.def_map(&db);
|
||||||
|
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
|
||||||
|
db.infer(def);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
assert!(format!("{events:?}").contains("infer"))
|
||||||
|
}
|
||||||
|
|
||||||
|
let new_text = "
|
||||||
|
fn foo() -> f32 {
|
||||||
|
1.0 + 2.0
|
||||||
|
}
|
||||||
|
fn bar() -> i32 {
|
||||||
|
53
|
||||||
|
}
|
||||||
|
fn baz() -> i32 {
|
||||||
|
1 + 1
|
||||||
|
}
|
||||||
|
";
|
||||||
|
|
||||||
|
db.set_file_text(pos.file_id, Arc::from(new_text));
|
||||||
|
|
||||||
|
{
|
||||||
|
let events = db.log_executed(|| {
|
||||||
|
let module = db.module_for_file(pos.file_id);
|
||||||
|
let crate_def_map = module.def_map(&db);
|
||||||
|
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
|
||||||
|
db.infer(def);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
assert!(format!("{events:?}").matches("infer").count() == 1, "{events:#?}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1153,3 +1153,41 @@ fn main() {
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn type_mismatch_pat_const_reference() {
|
||||||
|
check_no_mismatches(
|
||||||
|
r#"
|
||||||
|
const TEST_STR: &'static str = "abcd";
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let s = "abcd";
|
||||||
|
match s {
|
||||||
|
TEST_STR => (),
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
struct Foo<T>(T);
|
||||||
|
|
||||||
|
impl<T> Foo<T> {
|
||||||
|
const TEST_I32_REF: &'static i32 = &3;
|
||||||
|
const TEST_I32: i32 = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
match &6 {
|
||||||
|
Foo::<i32>::TEST_I32_REF => (),
|
||||||
|
Foo::<i32>::TEST_I32 => (),
|
||||||
|
//^^^^^^^^^^^^^^^^^^^^ expected &i32, got i32
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -20,8 +20,8 @@ use hir_def::{
|
||||||
AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId,
|
AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId,
|
||||||
};
|
};
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, InMacroFile, MacroCallId,
|
attrs::collect_attrs, db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo,
|
||||||
MacroFileId, MacroFileIdExt,
|
InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
|
||||||
};
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
|
@ -29,7 +29,7 @@ use smallvec::{smallvec, SmallVec};
|
||||||
use stdx::TupleExt;
|
use stdx::TupleExt;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo::skip_trivia_token,
|
algo::skip_trivia_token,
|
||||||
ast::{self, HasAttrs as _, HasDocComments, HasGenericParams, HasLoopBody, IsString as _},
|
ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _},
|
||||||
match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
|
match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
|
||||||
TextRange, TextSize,
|
TextRange, TextSize,
|
||||||
};
|
};
|
||||||
|
@ -673,11 +673,22 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
}
|
}
|
||||||
_ => 0,
|
_ => 0,
|
||||||
};
|
};
|
||||||
|
// FIXME: here, the attribute's text range is used to strip away all
|
||||||
|
// entries from the start of the attribute "list" up the the invoking
|
||||||
|
// attribute. But in
|
||||||
|
// ```
|
||||||
|
// mod foo {
|
||||||
|
// #![inner]
|
||||||
|
// }
|
||||||
|
// ```
|
||||||
|
// we don't wanna strip away stuff in the `mod foo {` range, that is
|
||||||
|
// here if the id corresponds to an inner attribute we got strip all
|
||||||
|
// text ranges of the outer ones, and then all of the inner ones up
|
||||||
|
// to the invoking attribute so that the inbetween is ignored.
|
||||||
let text_range = item.syntax().text_range();
|
let text_range = item.syntax().text_range();
|
||||||
let start = item
|
let start = collect_attrs(&item)
|
||||||
.doc_comments_and_attrs()
|
|
||||||
.nth(attr_id)
|
.nth(attr_id)
|
||||||
.map(|attr| match attr {
|
.map(|attr| match attr.1 {
|
||||||
Either::Left(it) => it.syntax().text_range().start(),
|
Either::Left(it) => it.syntax().text_range().start(),
|
||||||
Either::Right(it) => it.syntax().text_range().start(),
|
Either::Right(it) => it.syntax().text_range().start(),
|
||||||
})
|
})
|
||||||
|
@ -937,10 +948,10 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
|
pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
|
||||||
let analyze = self.analyze(ty.syntax())?;
|
let analyze = self.analyze(ty.syntax())?;
|
||||||
let ctx = LowerCtx::with_file_id(self.db.upcast(), analyze.file_id);
|
let ctx = LowerCtx::with_file_id(self.db.upcast(), analyze.file_id);
|
||||||
let ty = hir_ty::TyLoweringContext::new(
|
let ty = hir_ty::TyLoweringContext::new_maybe_unowned(
|
||||||
self.db,
|
self.db,
|
||||||
&analyze.resolver,
|
&analyze.resolver,
|
||||||
analyze.resolver.module().into(),
|
analyze.resolver.type_owner(),
|
||||||
)
|
)
|
||||||
.lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone()));
|
.lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone()));
|
||||||
Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
|
Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
|
||||||
|
|
|
@ -1040,8 +1040,9 @@ fn resolve_hir_path_(
|
||||||
let types = || {
|
let types = || {
|
||||||
let (ty, unresolved) = match path.type_anchor() {
|
let (ty, unresolved) = match path.type_anchor() {
|
||||||
Some(type_ref) => {
|
Some(type_ref) => {
|
||||||
let (_, res) = TyLoweringContext::new(db, resolver, resolver.module().into())
|
let (_, res) =
|
||||||
.lower_ty_ext(type_ref);
|
TyLoweringContext::new_maybe_unowned(db, resolver, resolver.type_owner())
|
||||||
|
.lower_ty_ext(type_ref);
|
||||||
res.map(|ty_ns| (ty_ns, path.segments().first()))
|
res.map(|ty_ns| (ty_ns, path.segments().first()))
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
|
|
|
@ -23,6 +23,7 @@ pub struct FileSymbol {
|
||||||
pub loc: DeclarationLocation,
|
pub loc: DeclarationLocation,
|
||||||
pub container_name: Option<SmolStr>,
|
pub container_name: Option<SmolStr>,
|
||||||
pub is_alias: bool,
|
pub is_alias: bool,
|
||||||
|
pub is_assoc: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
@ -121,34 +122,34 @@ impl<'a> SymbolCollector<'a> {
|
||||||
match module_def_id {
|
match module_def_id {
|
||||||
ModuleDefId::ModuleId(id) => self.push_module(id),
|
ModuleDefId::ModuleId(id) => self.push_module(id),
|
||||||
ModuleDefId::FunctionId(id) => {
|
ModuleDefId::FunctionId(id) => {
|
||||||
self.push_decl(id);
|
self.push_decl(id, false);
|
||||||
self.collect_from_body(id);
|
self.collect_from_body(id);
|
||||||
}
|
}
|
||||||
ModuleDefId::AdtId(AdtId::StructId(id)) => self.push_decl(id),
|
ModuleDefId::AdtId(AdtId::StructId(id)) => self.push_decl(id, false),
|
||||||
ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id),
|
ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id, false),
|
||||||
ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id),
|
ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id, false),
|
||||||
ModuleDefId::ConstId(id) => {
|
ModuleDefId::ConstId(id) => {
|
||||||
self.push_decl(id);
|
self.push_decl(id, false);
|
||||||
self.collect_from_body(id);
|
self.collect_from_body(id);
|
||||||
}
|
}
|
||||||
ModuleDefId::StaticId(id) => {
|
ModuleDefId::StaticId(id) => {
|
||||||
self.push_decl(id);
|
self.push_decl(id, false);
|
||||||
self.collect_from_body(id);
|
self.collect_from_body(id);
|
||||||
}
|
}
|
||||||
ModuleDefId::TraitId(id) => {
|
ModuleDefId::TraitId(id) => {
|
||||||
self.push_decl(id);
|
self.push_decl(id, false);
|
||||||
self.collect_from_trait(id);
|
self.collect_from_trait(id);
|
||||||
}
|
}
|
||||||
ModuleDefId::TraitAliasId(id) => {
|
ModuleDefId::TraitAliasId(id) => {
|
||||||
self.push_decl(id);
|
self.push_decl(id, false);
|
||||||
}
|
}
|
||||||
ModuleDefId::TypeAliasId(id) => {
|
ModuleDefId::TypeAliasId(id) => {
|
||||||
self.push_decl(id);
|
self.push_decl(id, false);
|
||||||
}
|
}
|
||||||
ModuleDefId::MacroId(id) => match id {
|
ModuleDefId::MacroId(id) => match id {
|
||||||
MacroId::Macro2Id(id) => self.push_decl(id),
|
MacroId::Macro2Id(id) => self.push_decl(id, false),
|
||||||
MacroId::MacroRulesId(id) => self.push_decl(id),
|
MacroId::MacroRulesId(id) => self.push_decl(id, false),
|
||||||
MacroId::ProcMacroId(id) => self.push_decl(id),
|
MacroId::ProcMacroId(id) => self.push_decl(id, false),
|
||||||
},
|
},
|
||||||
// Don't index these.
|
// Don't index these.
|
||||||
ModuleDefId::BuiltinType(_) => {}
|
ModuleDefId::BuiltinType(_) => {}
|
||||||
|
@ -190,6 +191,7 @@ impl<'a> SymbolCollector<'a> {
|
||||||
container_name: self.current_container_name.clone(),
|
container_name: self.current_container_name.clone(),
|
||||||
loc: dec_loc,
|
loc: dec_loc,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -202,9 +204,9 @@ impl<'a> SymbolCollector<'a> {
|
||||||
for &id in id {
|
for &id in id {
|
||||||
if id.module(self.db.upcast()) == module_id {
|
if id.module(self.db.upcast()) == module_id {
|
||||||
match id {
|
match id {
|
||||||
MacroId::Macro2Id(id) => self.push_decl(id),
|
MacroId::Macro2Id(id) => self.push_decl(id, false),
|
||||||
MacroId::MacroRulesId(id) => self.push_decl(id),
|
MacroId::MacroRulesId(id) => self.push_decl(id, false),
|
||||||
MacroId::ProcMacroId(id) => self.push_decl(id),
|
MacroId::ProcMacroId(id) => self.push_decl(id, false),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -266,13 +268,13 @@ impl<'a> SymbolCollector<'a> {
|
||||||
|
|
||||||
fn push_assoc_item(&mut self, assoc_item_id: AssocItemId) {
|
fn push_assoc_item(&mut self, assoc_item_id: AssocItemId) {
|
||||||
match assoc_item_id {
|
match assoc_item_id {
|
||||||
AssocItemId::FunctionId(id) => self.push_decl(id),
|
AssocItemId::FunctionId(id) => self.push_decl(id, true),
|
||||||
AssocItemId::ConstId(id) => self.push_decl(id),
|
AssocItemId::ConstId(id) => self.push_decl(id, true),
|
||||||
AssocItemId::TypeAliasId(id) => self.push_decl(id),
|
AssocItemId::TypeAliasId(id) => self.push_decl(id, true),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_decl<L>(&mut self, id: L)
|
fn push_decl<L>(&mut self, id: L, is_assoc: bool)
|
||||||
where
|
where
|
||||||
L: Lookup + Into<ModuleDefId>,
|
L: Lookup + Into<ModuleDefId>,
|
||||||
<L as Lookup>::Data: HasSource,
|
<L as Lookup>::Data: HasSource,
|
||||||
|
@ -296,6 +298,7 @@ impl<'a> SymbolCollector<'a> {
|
||||||
loc: dec_loc.clone(),
|
loc: dec_loc.clone(),
|
||||||
container_name: self.current_container_name.clone(),
|
container_name: self.current_container_name.clone(),
|
||||||
is_alias: true,
|
is_alias: true,
|
||||||
|
is_assoc,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -306,6 +309,7 @@ impl<'a> SymbolCollector<'a> {
|
||||||
container_name: self.current_container_name.clone(),
|
container_name: self.current_container_name.clone(),
|
||||||
loc: dec_loc,
|
loc: dec_loc,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -331,6 +335,7 @@ impl<'a> SymbolCollector<'a> {
|
||||||
loc: dec_loc.clone(),
|
loc: dec_loc.clone(),
|
||||||
container_name: self.current_container_name.clone(),
|
container_name: self.current_container_name.clone(),
|
||||||
is_alias: true,
|
is_alias: true,
|
||||||
|
is_assoc: false,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -341,6 +346,7 @@ impl<'a> SymbolCollector<'a> {
|
||||||
container_name: self.current_container_name.clone(),
|
container_name: self.current_container_name.clone(),
|
||||||
loc: dec_loc,
|
loc: dec_loc,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -216,7 +216,7 @@ fn edit_field_references(
|
||||||
edit.edit_file(file_id);
|
edit.edit_file(file_id);
|
||||||
for r in refs {
|
for r in refs {
|
||||||
if let Some(name_ref) = r.name.as_name_ref() {
|
if let Some(name_ref) = r.name.as_name_ref() {
|
||||||
edit.replace(name_ref.syntax().text_range(), name.text());
|
edit.replace(ctx.sema.original_range(name_ref.syntax()).range, name.text());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -458,13 +458,11 @@ impl Builder {
|
||||||
}
|
}
|
||||||
if let [import_edit] = &*self.imports_to_add {
|
if let [import_edit] = &*self.imports_to_add {
|
||||||
// snippets can have multiple imports, but normal completions only have up to one
|
// snippets can have multiple imports, but normal completions only have up to one
|
||||||
if let Some(original_path) = import_edit.original_path.as_ref() {
|
label_detail.replace(SmolStr::from(format!(
|
||||||
label_detail.replace(SmolStr::from(format!(
|
"{} (use {})",
|
||||||
"{} (use {})",
|
label_detail.as_deref().unwrap_or_default(),
|
||||||
label_detail.as_deref().unwrap_or_default(),
|
import_edit.import_path.display(db)
|
||||||
original_path.display(db)
|
)));
|
||||||
)));
|
|
||||||
}
|
|
||||||
} else if let Some(trait_name) = self.trait_name {
|
} else if let Some(trait_name) = self.trait_name {
|
||||||
label_detail.replace(SmolStr::from(format!(
|
label_detail.replace(SmolStr::from(format!(
|
||||||
"{} (as {trait_name})",
|
"{} (as {trait_name})",
|
||||||
|
|
|
@ -181,7 +181,7 @@ fn import_edits(ctx: &CompletionContext<'_>, requires: &[GreenNode]) -> Option<V
|
||||||
ctx.config.prefer_no_std,
|
ctx.config.prefer_no_std,
|
||||||
ctx.config.prefer_prelude,
|
ctx.config.prefer_prelude,
|
||||||
)?;
|
)?;
|
||||||
Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item, None)))
|
Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item)))
|
||||||
};
|
};
|
||||||
let mut res = Vec::with_capacity(requires.len());
|
let mut res = Vec::with_capacity(requires.len());
|
||||||
for import in requires {
|
for import in requires {
|
||||||
|
|
|
@ -597,8 +597,8 @@ fn main() {
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED
|
|
||||||
fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED
|
fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED
|
||||||
|
ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -717,7 +717,7 @@ fn main() {
|
||||||
check(
|
check(
|
||||||
fixture,
|
fixture,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
st Item (use foo::bar::baz::Item) Item
|
st Item (use foo::bar) Item
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -725,19 +725,19 @@ fn main() {
|
||||||
"Item",
|
"Item",
|
||||||
fixture,
|
fixture,
|
||||||
r#"
|
r#"
|
||||||
use foo::bar;
|
use foo::bar;
|
||||||
|
|
||||||
mod foo {
|
mod foo {
|
||||||
pub mod bar {
|
pub mod bar {
|
||||||
pub mod baz {
|
pub mod baz {
|
||||||
pub struct Item;
|
pub struct Item;
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
bar::baz::Item
|
bar::baz::Item
|
||||||
}"#,
|
}"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -803,7 +803,7 @@ fn main() {
|
||||||
check(
|
check(
|
||||||
fixture,
|
fixture,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
ct TEST_ASSOC (use foo::bar::Item) usize
|
ct TEST_ASSOC (use foo::bar) usize
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
//! Look up accessible paths for items.
|
//! Look up accessible paths for items.
|
||||||
|
|
||||||
use hir::{
|
use hir::{
|
||||||
AsAssocItem, AssocItem, AssocItemContainer, Crate, ItemInNs, ModPath, Module, ModuleDef,
|
AsAssocItem, AssocItem, AssocItemContainer, Crate, ItemInNs, ModPath, Module, ModuleDef, Name,
|
||||||
PathResolution, PrefixKind, ScopeDef, Semantics, SemanticsScope, Type,
|
PathResolution, PrefixKind, ScopeDef, Semantics, SemanticsScope, Type,
|
||||||
};
|
};
|
||||||
use itertools::Itertools;
|
use itertools::{EitherOrBoth, Itertools};
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, make, HasName},
|
ast::{self, make, HasName},
|
||||||
utils::path_to_string_stripping_turbo_fish,
|
AstNode, SmolStr, SyntaxNode,
|
||||||
AstNode, SyntaxNode,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -51,18 +51,11 @@ pub struct TraitImportCandidate {
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct PathImportCandidate {
|
pub struct PathImportCandidate {
|
||||||
/// Optional qualifier before name.
|
/// Optional qualifier before name.
|
||||||
pub qualifier: Option<FirstSegmentUnresolved>,
|
pub qualifier: Option<Vec<SmolStr>>,
|
||||||
/// The name the item (struct, trait, enum, etc.) should have.
|
/// The name the item (struct, trait, enum, etc.) should have.
|
||||||
pub name: NameToImport,
|
pub name: NameToImport,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A qualifier that has a first segment and it's unresolved.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct FirstSegmentUnresolved {
|
|
||||||
fist_segment: ast::NameRef,
|
|
||||||
full_qualifier: ast::Path,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A name that will be used during item lookups.
|
/// A name that will be used during item lookups.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum NameToImport {
|
pub enum NameToImport {
|
||||||
|
@ -195,18 +188,11 @@ pub struct LocatedImport {
|
||||||
/// the original item is the associated constant, but the import has to be a trait that
|
/// the original item is the associated constant, but the import has to be a trait that
|
||||||
/// defines this constant.
|
/// defines this constant.
|
||||||
pub original_item: ItemInNs,
|
pub original_item: ItemInNs,
|
||||||
/// A path of the original item.
|
|
||||||
pub original_path: Option<ModPath>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LocatedImport {
|
impl LocatedImport {
|
||||||
pub fn new(
|
pub fn new(import_path: ModPath, item_to_import: ItemInNs, original_item: ItemInNs) -> Self {
|
||||||
import_path: ModPath,
|
Self { import_path, item_to_import, original_item }
|
||||||
item_to_import: ItemInNs,
|
|
||||||
original_item: ItemInNs,
|
|
||||||
original_path: Option<ModPath>,
|
|
||||||
) -> Self {
|
|
||||||
Self { import_path, item_to_import, original_item, original_path }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -351,64 +337,75 @@ fn path_applicable_imports(
|
||||||
)
|
)
|
||||||
.filter_map(|item| {
|
.filter_map(|item| {
|
||||||
let mod_path = mod_path(item)?;
|
let mod_path = mod_path(item)?;
|
||||||
Some(LocatedImport::new(mod_path.clone(), item, item, Some(mod_path)))
|
Some(LocatedImport::new(mod_path, item, item))
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
Some(first_segment_unresolved) => {
|
|
||||||
let unresolved_qualifier =
|
|
||||||
path_to_string_stripping_turbo_fish(&first_segment_unresolved.full_qualifier);
|
|
||||||
let unresolved_first_segment = first_segment_unresolved.fist_segment.text();
|
|
||||||
items_locator::items_with_name(
|
|
||||||
sema,
|
|
||||||
current_crate,
|
|
||||||
path_candidate.name.clone(),
|
|
||||||
AssocSearchMode::Include,
|
|
||||||
Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
|
|
||||||
)
|
|
||||||
.filter_map(|item| {
|
|
||||||
import_for_item(
|
|
||||||
sema.db,
|
|
||||||
mod_path,
|
|
||||||
&unresolved_first_segment,
|
|
||||||
&unresolved_qualifier,
|
|
||||||
item,
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
Some(qualifier) => items_locator::items_with_name(
|
||||||
|
sema,
|
||||||
|
current_crate,
|
||||||
|
path_candidate.name.clone(),
|
||||||
|
AssocSearchMode::Include,
|
||||||
|
Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
|
||||||
|
)
|
||||||
|
.filter_map(|item| import_for_item(sema.db, mod_path, &qualifier, item))
|
||||||
|
.collect(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn import_for_item(
|
fn import_for_item(
|
||||||
db: &RootDatabase,
|
db: &RootDatabase,
|
||||||
mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
|
mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
|
||||||
unresolved_first_segment: &str,
|
unresolved_qualifier: &[SmolStr],
|
||||||
unresolved_qualifier: &str,
|
|
||||||
original_item: ItemInNs,
|
original_item: ItemInNs,
|
||||||
) -> Option<LocatedImport> {
|
) -> Option<LocatedImport> {
|
||||||
let _p = profile::span("import_assets::import_for_item");
|
let _p = profile::span("import_assets::import_for_item");
|
||||||
|
let [first_segment, ..] = unresolved_qualifier else { return None };
|
||||||
|
|
||||||
let original_item_candidate = item_for_path_search(db, original_item)?;
|
let item_as_assoc = item_as_assoc(db, original_item);
|
||||||
let import_path_candidate = mod_path(original_item_candidate)?;
|
|
||||||
let import_path_string = import_path_candidate.display(db).to_string();
|
|
||||||
|
|
||||||
let expected_import_end = if item_as_assoc(db, original_item).is_some() {
|
let (original_item_candidate, trait_item_to_import) = match item_as_assoc {
|
||||||
unresolved_qualifier.to_string()
|
Some(assoc_item) => match assoc_item.container(db) {
|
||||||
} else {
|
AssocItemContainer::Trait(trait_) => {
|
||||||
format!("{unresolved_qualifier}::{}", item_name(db, original_item)?.display(db))
|
let trait_ = ItemInNs::from(ModuleDef::from(trait_));
|
||||||
|
(trait_, Some(trait_))
|
||||||
|
}
|
||||||
|
AssocItemContainer::Impl(impl_) => {
|
||||||
|
(ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?)), None)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
None => (original_item, None),
|
||||||
};
|
};
|
||||||
if !import_path_string.contains(unresolved_first_segment)
|
let import_path_candidate = mod_path(original_item_candidate)?;
|
||||||
|| !import_path_string.ends_with(&expected_import_end)
|
|
||||||
{
|
let mut import_path_candidate_segments = import_path_candidate.segments().iter().rev();
|
||||||
|
let predicate = |it: EitherOrBoth<&SmolStr, &Name>| match it {
|
||||||
|
// segments match, check next one
|
||||||
|
EitherOrBoth::Both(a, b) if b.as_str() == Some(&**a) => None,
|
||||||
|
// segments mismatch / qualifier is longer than the path, bail out
|
||||||
|
EitherOrBoth::Both(..) | EitherOrBoth::Left(_) => Some(false),
|
||||||
|
// all segments match and we have exhausted the qualifier, proceed
|
||||||
|
EitherOrBoth::Right(_) => Some(true),
|
||||||
|
};
|
||||||
|
if item_as_assoc.is_none() {
|
||||||
|
let item_name = item_name(db, original_item)?.as_text()?;
|
||||||
|
let last_segment = import_path_candidate_segments.next()?;
|
||||||
|
if last_segment.as_str() != Some(&*item_name) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let ends_with = unresolved_qualifier
|
||||||
|
.iter()
|
||||||
|
.rev()
|
||||||
|
.zip_longest(import_path_candidate_segments)
|
||||||
|
.find_map(predicate)
|
||||||
|
.unwrap_or(true);
|
||||||
|
if !ends_with {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let segment_import =
|
let segment_import = find_import_for_segment(db, original_item_candidate, first_segment)?;
|
||||||
find_import_for_segment(db, original_item_candidate, unresolved_first_segment)?;
|
|
||||||
let trait_item_to_import = item_as_assoc(db, original_item)
|
|
||||||
.and_then(|assoc| assoc.containing_trait(db))
|
|
||||||
.map(|trait_| ItemInNs::from(ModuleDef::from(trait_)));
|
|
||||||
Some(match (segment_import == original_item_candidate, trait_item_to_import) {
|
Some(match (segment_import == original_item_candidate, trait_item_to_import) {
|
||||||
(true, Some(_)) => {
|
(true, Some(_)) => {
|
||||||
// FIXME we should be able to import both the trait and the segment,
|
// FIXME we should be able to import both the trait and the segment,
|
||||||
|
@ -416,42 +413,37 @@ fn import_for_item(
|
||||||
// especially in case of lazy completion edit resolutions.
|
// especially in case of lazy completion edit resolutions.
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
(false, Some(trait_to_import)) => LocatedImport::new(
|
(false, Some(trait_to_import)) => {
|
||||||
mod_path(trait_to_import)?,
|
LocatedImport::new(mod_path(trait_to_import)?, trait_to_import, original_item)
|
||||||
trait_to_import,
|
}
|
||||||
original_item,
|
(true, None) => {
|
||||||
mod_path(original_item),
|
LocatedImport::new(import_path_candidate, original_item_candidate, original_item)
|
||||||
),
|
}
|
||||||
(true, None) => LocatedImport::new(
|
(false, None) => {
|
||||||
import_path_candidate,
|
LocatedImport::new(mod_path(segment_import)?, segment_import, original_item)
|
||||||
original_item_candidate,
|
}
|
||||||
original_item,
|
|
||||||
mod_path(original_item),
|
|
||||||
),
|
|
||||||
(false, None) => LocatedImport::new(
|
|
||||||
mod_path(segment_import)?,
|
|
||||||
segment_import,
|
|
||||||
original_item,
|
|
||||||
mod_path(original_item),
|
|
||||||
),
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn item_for_path_search(db: &RootDatabase, item: ItemInNs) -> Option<ItemInNs> {
|
pub fn item_for_path_search(db: &RootDatabase, item: ItemInNs) -> Option<ItemInNs> {
|
||||||
Some(match item {
|
Some(match item {
|
||||||
ItemInNs::Types(_) | ItemInNs::Values(_) => match item_as_assoc(db, item) {
|
ItemInNs::Types(_) | ItemInNs::Values(_) => match item_as_assoc(db, item) {
|
||||||
Some(assoc_item) => match assoc_item.container(db) {
|
Some(assoc_item) => item_for_path_search_assoc(db, assoc_item)?,
|
||||||
AssocItemContainer::Trait(trait_) => ItemInNs::from(ModuleDef::from(trait_)),
|
|
||||||
AssocItemContainer::Impl(impl_) => {
|
|
||||||
ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?))
|
|
||||||
}
|
|
||||||
},
|
|
||||||
None => item,
|
None => item,
|
||||||
},
|
},
|
||||||
ItemInNs::Macros(_) => item,
|
ItemInNs::Macros(_) => item,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn item_for_path_search_assoc(db: &RootDatabase, assoc_item: AssocItem) -> Option<ItemInNs> {
|
||||||
|
Some(match assoc_item.container(db) {
|
||||||
|
AssocItemContainer::Trait(trait_) => ItemInNs::from(ModuleDef::from(trait_)),
|
||||||
|
AssocItemContainer::Impl(impl_) => {
|
||||||
|
ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
fn find_import_for_segment(
|
fn find_import_for_segment(
|
||||||
db: &RootDatabase,
|
db: &RootDatabase,
|
||||||
original_item: ItemInNs,
|
original_item: ItemInNs,
|
||||||
|
@ -528,6 +520,7 @@ fn trait_applicable_items(
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let mut located_imports = FxHashSet::default();
|
let mut located_imports = FxHashSet::default();
|
||||||
|
let mut trait_import_paths = FxHashMap::default();
|
||||||
|
|
||||||
if trait_assoc_item {
|
if trait_assoc_item {
|
||||||
trait_candidate.receiver_ty.iterate_path_candidates(
|
trait_candidate.receiver_ty.iterate_path_candidates(
|
||||||
|
@ -545,12 +538,14 @@ fn trait_applicable_items(
|
||||||
}
|
}
|
||||||
let located_trait = assoc.containing_trait(db)?;
|
let located_trait = assoc.containing_trait(db)?;
|
||||||
let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
|
let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
|
||||||
let original_item = assoc_to_item(assoc);
|
let import_path = trait_import_paths
|
||||||
|
.entry(trait_item)
|
||||||
|
.or_insert_with(|| mod_path(trait_item))
|
||||||
|
.clone()?;
|
||||||
located_imports.insert(LocatedImport::new(
|
located_imports.insert(LocatedImport::new(
|
||||||
mod_path(trait_item)?,
|
import_path,
|
||||||
trait_item,
|
trait_item,
|
||||||
original_item,
|
assoc_to_item(assoc),
|
||||||
mod_path(original_item),
|
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
None::<()>
|
None::<()>
|
||||||
|
@ -568,12 +563,14 @@ fn trait_applicable_items(
|
||||||
if required_assoc_items.contains(&assoc) {
|
if required_assoc_items.contains(&assoc) {
|
||||||
let located_trait = assoc.containing_trait(db)?;
|
let located_trait = assoc.containing_trait(db)?;
|
||||||
let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
|
let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
|
||||||
let original_item = assoc_to_item(assoc);
|
let import_path = trait_import_paths
|
||||||
|
.entry(trait_item)
|
||||||
|
.or_insert_with(|| mod_path(trait_item))
|
||||||
|
.clone()?;
|
||||||
located_imports.insert(LocatedImport::new(
|
located_imports.insert(LocatedImport::new(
|
||||||
mod_path(trait_item)?,
|
import_path,
|
||||||
trait_item,
|
trait_item,
|
||||||
original_item,
|
assoc_to_item(assoc),
|
||||||
mod_path(original_item),
|
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
None::<()>
|
None::<()>
|
||||||
|
@ -671,18 +668,13 @@ fn path_import_candidate(
|
||||||
Some(match qualifier {
|
Some(match qualifier {
|
||||||
Some(qualifier) => match sema.resolve_path(&qualifier) {
|
Some(qualifier) => match sema.resolve_path(&qualifier) {
|
||||||
None => {
|
None => {
|
||||||
let qualifier_start =
|
if qualifier.first_qualifier().map_or(true, |it| sema.resolve_path(&it).is_none()) {
|
||||||
qualifier.syntax().descendants().find_map(ast::NameRef::cast)?;
|
let mut qualifier = qualifier
|
||||||
let qualifier_start_path =
|
.segments_of_this_path_only_rev()
|
||||||
qualifier_start.syntax().ancestors().find_map(ast::Path::cast)?;
|
.map(|seg| seg.name_ref().map(|name| SmolStr::new(name.text())))
|
||||||
if sema.resolve_path(&qualifier_start_path).is_none() {
|
.collect::<Option<Vec<_>>>()?;
|
||||||
ImportCandidate::Path(PathImportCandidate {
|
qualifier.reverse();
|
||||||
qualifier: Some(FirstSegmentUnresolved {
|
ImportCandidate::Path(PathImportCandidate { qualifier: Some(qualifier), name })
|
||||||
fist_segment: qualifier_start,
|
|
||||||
full_qualifier: qualifier,
|
|
||||||
}),
|
|
||||||
name,
|
|
||||||
})
|
|
||||||
} else {
|
} else {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,13 +3,13 @@
|
||||||
//! The main reason for this module to exist is the fact that project's items and dependencies' items
|
//! The main reason for this module to exist is the fact that project's items and dependencies' items
|
||||||
//! are located in different caches, with different APIs.
|
//! are located in different caches, with different APIs.
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir::{import_map, AsAssocItem, Crate, ItemInNs, Semantics};
|
use hir::{import_map, Crate, ItemInNs, Semantics};
|
||||||
use limit::Limit;
|
use limit::Limit;
|
||||||
|
|
||||||
use crate::{imports::import_assets::NameToImport, symbol_index, RootDatabase};
|
use crate::{imports::import_assets::NameToImport, symbol_index, RootDatabase};
|
||||||
|
|
||||||
/// A value to use, when uncertain which limit to pick.
|
/// A value to use, when uncertain which limit to pick.
|
||||||
pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(40);
|
pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(100);
|
||||||
|
|
||||||
pub use import_map::AssocSearchMode;
|
pub use import_map::AssocSearchMode;
|
||||||
|
|
||||||
|
@ -36,7 +36,9 @@ pub fn items_with_name<'a>(
|
||||||
NameToImport::Prefix(exact_name, case_sensitive)
|
NameToImport::Prefix(exact_name, case_sensitive)
|
||||||
| NameToImport::Exact(exact_name, case_sensitive) => {
|
| NameToImport::Exact(exact_name, case_sensitive) => {
|
||||||
let mut local_query = symbol_index::Query::new(exact_name.clone());
|
let mut local_query = symbol_index::Query::new(exact_name.clone());
|
||||||
let mut external_query = import_map::Query::new(exact_name);
|
let mut external_query =
|
||||||
|
// import_map::Query::new(exact_name).assoc_search_mode(assoc_item_search);
|
||||||
|
import_map::Query::new(exact_name);
|
||||||
if prefix {
|
if prefix {
|
||||||
local_query.prefix();
|
local_query.prefix();
|
||||||
external_query = external_query.prefix();
|
external_query = external_query.prefix();
|
||||||
|
@ -101,8 +103,8 @@ fn find_items<'a>(
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(move |candidate| match assoc_item_search {
|
.filter(move |candidate| match assoc_item_search {
|
||||||
AssocSearchMode::Include => true,
|
AssocSearchMode::Include => true,
|
||||||
AssocSearchMode::Exclude => candidate.def.as_assoc_item(db).is_none(),
|
AssocSearchMode::Exclude => !candidate.is_assoc,
|
||||||
AssocSearchMode::AssocItemsOnly => candidate.def.as_assoc_item(db).is_some(),
|
AssocSearchMode::AssocItemsOnly => candidate.is_assoc,
|
||||||
})
|
})
|
||||||
.map(|local_candidate| match local_candidate.def {
|
.map(|local_candidate| match local_candidate.def {
|
||||||
hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
|
hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
|
||||||
|
|
|
@ -50,7 +50,7 @@ enum SearchMode {
|
||||||
Prefix,
|
Prefix,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Query {
|
pub struct Query {
|
||||||
query: String,
|
query: String,
|
||||||
lowercased: String,
|
lowercased: String,
|
||||||
|
|
|
@ -36,6 +36,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "Struct",
|
name: "Struct",
|
||||||
|
@ -65,6 +66,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "mul1",
|
name: "mul1",
|
||||||
|
@ -94,6 +96,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: true,
|
is_alias: true,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "mul2",
|
name: "mul2",
|
||||||
|
@ -123,6 +126,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: true,
|
is_alias: true,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "s1",
|
name: "s1",
|
||||||
|
@ -152,6 +156,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: true,
|
is_alias: true,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "s1",
|
name: "s1",
|
||||||
|
@ -181,6 +186,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: true,
|
is_alias: true,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "s2",
|
name: "s2",
|
||||||
|
@ -210,6 +216,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: true,
|
is_alias: true,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
|
|
|
@ -34,6 +34,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "CONST",
|
name: "CONST",
|
||||||
|
@ -61,6 +62,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "CONST_WITH_INNER",
|
name: "CONST_WITH_INNER",
|
||||||
|
@ -88,6 +90,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "Enum",
|
name: "Enum",
|
||||||
|
@ -117,6 +120,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "ItemLikeMacro",
|
name: "ItemLikeMacro",
|
||||||
|
@ -146,6 +150,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "Macro",
|
name: "Macro",
|
||||||
|
@ -175,6 +180,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "STATIC",
|
name: "STATIC",
|
||||||
|
@ -202,6 +208,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "Struct",
|
name: "Struct",
|
||||||
|
@ -231,6 +238,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "StructFromMacro",
|
name: "StructFromMacro",
|
||||||
|
@ -260,6 +268,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "StructInFn",
|
name: "StructInFn",
|
||||||
|
@ -291,6 +300,7 @@
|
||||||
"main",
|
"main",
|
||||||
),
|
),
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "StructInNamedConst",
|
name: "StructInNamedConst",
|
||||||
|
@ -322,6 +332,7 @@
|
||||||
"CONST_WITH_INNER",
|
"CONST_WITH_INNER",
|
||||||
),
|
),
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "StructInUnnamedConst",
|
name: "StructInUnnamedConst",
|
||||||
|
@ -351,6 +362,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "Trait",
|
name: "Trait",
|
||||||
|
@ -378,6 +390,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "Trait",
|
name: "Trait",
|
||||||
|
@ -407,6 +420,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "Union",
|
name: "Union",
|
||||||
|
@ -436,6 +450,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "a_mod",
|
name: "a_mod",
|
||||||
|
@ -465,6 +480,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "b_mod",
|
name: "b_mod",
|
||||||
|
@ -494,6 +510,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "define_struct",
|
name: "define_struct",
|
||||||
|
@ -523,6 +540,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "impl_fn",
|
name: "impl_fn",
|
||||||
|
@ -550,6 +568,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: true,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "macro_rules_macro",
|
name: "macro_rules_macro",
|
||||||
|
@ -579,6 +598,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "main",
|
name: "main",
|
||||||
|
@ -606,6 +626,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "really_define_struct",
|
name: "really_define_struct",
|
||||||
|
@ -635,6 +656,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "trait_fn",
|
name: "trait_fn",
|
||||||
|
@ -664,6 +686,7 @@
|
||||||
"Trait",
|
"Trait",
|
||||||
),
|
),
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: true,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
|
@ -704,6 +727,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
|
@ -744,6 +768,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "StructInModB",
|
name: "StructInModB",
|
||||||
|
@ -773,6 +798,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "SuperItemLikeMacro",
|
name: "SuperItemLikeMacro",
|
||||||
|
@ -802,6 +828,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "ThisStruct",
|
name: "ThisStruct",
|
||||||
|
@ -831,6 +858,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
FileSymbol {
|
FileSymbol {
|
||||||
name: "ThisStruct",
|
name: "ThisStruct",
|
||||||
|
@ -860,6 +888,7 @@
|
||||||
},
|
},
|
||||||
container_name: None,
|
container_name: None,
|
||||||
is_alias: false,
|
is_alias: false,
|
||||||
|
is_assoc: false,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
|
|
|
@ -44,21 +44,6 @@ extern crate core;
|
||||||
extern crate self as foo;
|
extern crate self as foo;
|
||||||
struct Foo;
|
struct Foo;
|
||||||
use foo::Foo as Bar;
|
use foo::Foo as Bar;
|
||||||
"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn regression_panic_with_inner_attribute_in_presence_of_unresolved_crate() {
|
|
||||||
check_diagnostics(
|
|
||||||
r#"
|
|
||||||
//- /lib.rs
|
|
||||||
#[macro_use] extern crate doesnotexist;
|
|
||||||
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: unresolved extern crate
|
|
||||||
mod _test_inner {
|
|
||||||
#![empty_attr]
|
|
||||||
//^^^^^^^^^^^^^^ error: unresolved macro `empty_attr`
|
|
||||||
}
|
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -67,6 +67,18 @@ macro_rules! m { () => {} } }
|
||||||
|
|
||||||
self::m!(); self::m2!();
|
self::m!(); self::m2!();
|
||||||
//^^ error: unresolved macro `self::m2!`
|
//^^ error: unresolved macro `self::m2!`
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn regression_panic_with_inner_attribute_in_presence_of_unresolved_crate() {
|
||||||
|
check_diagnostics(
|
||||||
|
r#"
|
||||||
|
mod _test_inner {
|
||||||
|
#![empty_attr]
|
||||||
|
//^^^^^^^^^^^^^^ error: unresolved macro `empty_attr`
|
||||||
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -94,7 +94,7 @@ use syntax::{
|
||||||
};
|
};
|
||||||
|
|
||||||
// FIXME: Make this an enum
|
// FIXME: Make this an enum
|
||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||||
pub enum DiagnosticCode {
|
pub enum DiagnosticCode {
|
||||||
RustcHardError(&'static str),
|
RustcHardError(&'static str),
|
||||||
RustcLint(&'static str),
|
RustcLint(&'static str),
|
||||||
|
@ -198,7 +198,7 @@ impl Diagnostic {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||||
pub enum Severity {
|
pub enum Severity {
|
||||||
Error,
|
Error,
|
||||||
Warning,
|
Warning,
|
||||||
|
|
|
@ -422,6 +422,11 @@ fn ty_to_text_edit(
|
||||||
Some(builder.finish())
|
Some(builder.finish())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub enum RangeLimit {
|
||||||
|
Fixed(TextRange),
|
||||||
|
NearestParent(TextSize),
|
||||||
|
}
|
||||||
|
|
||||||
// Feature: Inlay Hints
|
// Feature: Inlay Hints
|
||||||
//
|
//
|
||||||
// rust-analyzer shows additional information inline with the source code.
|
// rust-analyzer shows additional information inline with the source code.
|
||||||
|
@ -443,7 +448,7 @@ fn ty_to_text_edit(
|
||||||
pub(crate) fn inlay_hints(
|
pub(crate) fn inlay_hints(
|
||||||
db: &RootDatabase,
|
db: &RootDatabase,
|
||||||
file_id: FileId,
|
file_id: FileId,
|
||||||
range_limit: Option<TextRange>,
|
range_limit: Option<RangeLimit>,
|
||||||
config: &InlayHintsConfig,
|
config: &InlayHintsConfig,
|
||||||
) -> Vec<InlayHint> {
|
) -> Vec<InlayHint> {
|
||||||
let _p = profile::span("inlay_hints");
|
let _p = profile::span("inlay_hints");
|
||||||
|
@ -458,13 +463,31 @@ pub(crate) fn inlay_hints(
|
||||||
|
|
||||||
let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node);
|
let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node);
|
||||||
match range_limit {
|
match range_limit {
|
||||||
Some(range) => match file.covering_element(range) {
|
Some(RangeLimit::Fixed(range)) => match file.covering_element(range) {
|
||||||
NodeOrToken::Token(_) => return acc,
|
NodeOrToken::Token(_) => return acc,
|
||||||
NodeOrToken::Node(n) => n
|
NodeOrToken::Node(n) => n
|
||||||
.descendants()
|
.descendants()
|
||||||
.filter(|descendant| range.intersect(descendant.text_range()).is_some())
|
.filter(|descendant| range.intersect(descendant.text_range()).is_some())
|
||||||
.for_each(hints),
|
.for_each(hints),
|
||||||
},
|
},
|
||||||
|
Some(RangeLimit::NearestParent(position)) => {
|
||||||
|
match file.token_at_offset(position).left_biased() {
|
||||||
|
Some(token) => {
|
||||||
|
if let Some(parent_block) =
|
||||||
|
token.parent_ancestors().find_map(ast::BlockExpr::cast)
|
||||||
|
{
|
||||||
|
parent_block.syntax().descendants().for_each(hints)
|
||||||
|
} else if let Some(parent_item) =
|
||||||
|
token.parent_ancestors().find_map(ast::Item::cast)
|
||||||
|
{
|
||||||
|
parent_item.syntax().descendants().for_each(hints)
|
||||||
|
} else {
|
||||||
|
return acc;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => return acc,
|
||||||
|
}
|
||||||
|
}
|
||||||
None => file.descendants().for_each(hints),
|
None => file.descendants().for_each(hints),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -177,7 +177,11 @@ mod tests {
|
||||||
use syntax::{TextRange, TextSize};
|
use syntax::{TextRange, TextSize};
|
||||||
use test_utils::extract_annotations;
|
use test_utils::extract_annotations;
|
||||||
|
|
||||||
use crate::{fixture, inlay_hints::InlayHintsConfig, ClosureReturnTypeHints};
|
use crate::{
|
||||||
|
fixture,
|
||||||
|
inlay_hints::{InlayHintsConfig, RangeLimit},
|
||||||
|
ClosureReturnTypeHints,
|
||||||
|
};
|
||||||
|
|
||||||
use crate::inlay_hints::tests::{
|
use crate::inlay_hints::tests::{
|
||||||
check, check_edit, check_no_edit, check_with_config, DISABLED_CONFIG, TEST_CONFIG,
|
check, check_edit, check_no_edit, check_with_config, DISABLED_CONFIG, TEST_CONFIG,
|
||||||
|
@ -400,7 +404,7 @@ fn main() {
|
||||||
.inlay_hints(
|
.inlay_hints(
|
||||||
&InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG },
|
&InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG },
|
||||||
file_id,
|
file_id,
|
||||||
Some(TextRange::new(TextSize::from(500), TextSize::from(600))),
|
Some(RangeLimit::Fixed(TextRange::new(TextSize::from(500), TextSize::from(600)))),
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let actual =
|
let actual =
|
||||||
|
|
|
@ -94,7 +94,7 @@ pub use crate::{
|
||||||
inlay_hints::{
|
inlay_hints::{
|
||||||
AdjustmentHints, AdjustmentHintsMode, ClosureReturnTypeHints, DiscriminantHints,
|
AdjustmentHints, AdjustmentHintsMode, ClosureReturnTypeHints, DiscriminantHints,
|
||||||
InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayHintPosition,
|
InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayHintPosition,
|
||||||
InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints,
|
InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints, RangeLimit,
|
||||||
},
|
},
|
||||||
join_lines::JoinLinesConfig,
|
join_lines::JoinLinesConfig,
|
||||||
markup::Markup,
|
markup::Markup,
|
||||||
|
@ -133,7 +133,9 @@ pub use ide_db::{
|
||||||
symbol_index::Query,
|
symbol_index::Query,
|
||||||
RootDatabase, SymbolKind,
|
RootDatabase, SymbolKind,
|
||||||
};
|
};
|
||||||
pub use ide_diagnostics::{Diagnostic, DiagnosticsConfig, ExprFillDefaultMode, Severity};
|
pub use ide_diagnostics::{
|
||||||
|
Diagnostic, DiagnosticCode, DiagnosticsConfig, ExprFillDefaultMode, Severity,
|
||||||
|
};
|
||||||
pub use ide_ssr::SsrError;
|
pub use ide_ssr::SsrError;
|
||||||
pub use syntax::{TextRange, TextSize};
|
pub use syntax::{TextRange, TextSize};
|
||||||
pub use text_edit::{Indel, TextEdit};
|
pub use text_edit::{Indel, TextEdit};
|
||||||
|
@ -397,7 +399,7 @@ impl Analysis {
|
||||||
&self,
|
&self,
|
||||||
config: &InlayHintsConfig,
|
config: &InlayHintsConfig,
|
||||||
file_id: FileId,
|
file_id: FileId,
|
||||||
range: Option<TextRange>,
|
range: Option<RangeLimit>,
|
||||||
) -> Cancellable<Vec<InlayHint>> {
|
) -> Cancellable<Vec<InlayHint>> {
|
||||||
self.with_db(|db| inlay_hints::inlay_hints(db, file_id, range, config))
|
self.with_db(|db| inlay_hints::inlay_hints(db, file_id, range, config))
|
||||||
}
|
}
|
||||||
|
|
|
@ -311,7 +311,7 @@ where
|
||||||
|
|
||||||
let ident = tt::Leaf::from(tt::Ident {
|
let ident = tt::Leaf::from(tt::Ident {
|
||||||
text: SmolStr::new(&token.to_text(conv)[1..]),
|
text: SmolStr::new(&token.to_text(conv)[1..]),
|
||||||
span: conv.span_for(TextRange::at(
|
span: conv.span_for(TextRange::new(
|
||||||
abs_range.start() + TextSize::of('\''),
|
abs_range.start() + TextSize::of('\''),
|
||||||
abs_range.end(),
|
abs_range.end(),
|
||||||
)),
|
)),
|
||||||
|
@ -625,25 +625,6 @@ impl<SpanMap, S> Converter<SpanMap, S> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_token(&mut self) -> Option<SyntaxToken> {
|
fn next_token(&mut self) -> Option<SyntaxToken> {
|
||||||
// while let Some(ev) = self.preorder.next() {
|
|
||||||
// match ev {
|
|
||||||
// WalkEvent::Enter(SyntaxElement::Token(t)) => {
|
|
||||||
// if let Some(leafs) = self.append.remove(&t.clone().into()) {
|
|
||||||
// self.current_leafs.extend(leafs);
|
|
||||||
// }
|
|
||||||
// return Some(t);
|
|
||||||
// }
|
|
||||||
// WalkEvent::Enter(SyntaxElement::Node(n)) if self.remove.contains(&n) => {
|
|
||||||
// self.preorder.skip_subtree();
|
|
||||||
// if let Some(leafs) = self.append.remove(&n.into()) {
|
|
||||||
// self.current_leafs.extend(leafs);
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// _ => (),
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// None;
|
|
||||||
|
|
||||||
while let Some(ev) = self.preorder.next() {
|
while let Some(ev) = self.preorder.next() {
|
||||||
match ev {
|
match ev {
|
||||||
WalkEvent::Enter(SyntaxElement::Token(t)) => return Some(t),
|
WalkEvent::Enter(SyntaxElement::Token(t)) => return Some(t),
|
||||||
|
|
|
@ -131,7 +131,6 @@ impl<'a, S: Span> TtIter<'a, S> {
|
||||||
let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice());
|
let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice());
|
||||||
let parser_input = to_parser_input(&buffer);
|
let parser_input = to_parser_input(&buffer);
|
||||||
let tree_traversal = entry_point.parse(&parser_input);
|
let tree_traversal = entry_point.parse(&parser_input);
|
||||||
|
|
||||||
let mut cursor = buffer.begin();
|
let mut cursor = buffer.begin();
|
||||||
let mut error = false;
|
let mut error = false;
|
||||||
for step in tree_traversal.iter() {
|
for step in tree_traversal.iter() {
|
||||||
|
@ -163,12 +162,10 @@ impl<'a, S: Span> TtIter<'a, S> {
|
||||||
let mut curr = buffer.begin();
|
let mut curr = buffer.begin();
|
||||||
let mut res = vec![];
|
let mut res = vec![];
|
||||||
|
|
||||||
if cursor.is_root() {
|
while curr != cursor {
|
||||||
while curr != cursor {
|
let Some(token) = curr.token_tree() else { break };
|
||||||
let Some(token) = curr.token_tree() else { break };
|
res.push(token.cloned());
|
||||||
res.push(token.cloned());
|
curr = curr.bump();
|
||||||
curr = curr.bump();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.inner = self.inner.as_slice()[res.len()..].iter();
|
self.inner = self.inner.as_slice()[res.len()..].iter();
|
||||||
|
|
|
@ -131,7 +131,7 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
|
||||||
let len_bytes = &dot_rustc[8..16];
|
let len_bytes = &dot_rustc[8..16];
|
||||||
let data_len = u64::from_le_bytes(len_bytes.try_into().unwrap()) as usize;
|
let data_len = u64::from_le_bytes(len_bytes.try_into().unwrap()) as usize;
|
||||||
(&dot_rustc[16..data_len + 12], 17)
|
(&dot_rustc[16..data_len + 12], 17)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
return Err(io::Error::new(
|
return Err(io::Error::new(
|
||||||
io::ErrorKind::InvalidData,
|
io::ErrorKind::InvalidData,
|
||||||
|
|
|
@ -42,6 +42,7 @@ tracing-tree.workspace = true
|
||||||
triomphe.workspace = true
|
triomphe.workspace = true
|
||||||
nohash-hasher.workspace = true
|
nohash-hasher.workspace = true
|
||||||
always-assert = "0.1.2"
|
always-assert = "0.1.2"
|
||||||
|
walkdir = "2.3.2"
|
||||||
|
|
||||||
cfg.workspace = true
|
cfg.workspace = true
|
||||||
flycheck.workspace = true
|
flycheck.workspace = true
|
||||||
|
|
|
@ -87,6 +87,7 @@ fn main() -> anyhow::Result<()> {
|
||||||
flags::RustAnalyzerCmd::Lsif(cmd) => cmd.run()?,
|
flags::RustAnalyzerCmd::Lsif(cmd) => cmd.run()?,
|
||||||
flags::RustAnalyzerCmd::Scip(cmd) => cmd.run()?,
|
flags::RustAnalyzerCmd::Scip(cmd) => cmd.run()?,
|
||||||
flags::RustAnalyzerCmd::RunTests(cmd) => cmd.run()?,
|
flags::RustAnalyzerCmd::RunTests(cmd) => cmd.run()?,
|
||||||
|
flags::RustAnalyzerCmd::RustcTests(cmd) => cmd.run()?,
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,6 +10,7 @@ mod ssr;
|
||||||
mod lsif;
|
mod lsif;
|
||||||
mod scip;
|
mod scip;
|
||||||
mod run_tests;
|
mod run_tests;
|
||||||
|
mod rustc_tests;
|
||||||
|
|
||||||
mod progress_report;
|
mod progress_report;
|
||||||
|
|
||||||
|
|
|
@ -98,6 +98,15 @@ xflags::xflags! {
|
||||||
required path: PathBuf
|
required path: PathBuf
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Run unit tests of the project using mir interpreter
|
||||||
|
cmd rustc-tests {
|
||||||
|
/// Directory with Cargo.toml.
|
||||||
|
required rustc_repo: PathBuf
|
||||||
|
|
||||||
|
/// Only run tests with filter as substring
|
||||||
|
optional --filter path: String
|
||||||
|
}
|
||||||
|
|
||||||
cmd diagnostics {
|
cmd diagnostics {
|
||||||
/// Directory with Cargo.toml.
|
/// Directory with Cargo.toml.
|
||||||
required path: PathBuf
|
required path: PathBuf
|
||||||
|
@ -159,6 +168,7 @@ pub enum RustAnalyzerCmd {
|
||||||
Highlight(Highlight),
|
Highlight(Highlight),
|
||||||
AnalysisStats(AnalysisStats),
|
AnalysisStats(AnalysisStats),
|
||||||
RunTests(RunTests),
|
RunTests(RunTests),
|
||||||
|
RustcTests(RustcTests),
|
||||||
Diagnostics(Diagnostics),
|
Diagnostics(Diagnostics),
|
||||||
Ssr(Ssr),
|
Ssr(Ssr),
|
||||||
Search(Search),
|
Search(Search),
|
||||||
|
@ -211,6 +221,12 @@ pub struct RunTests {
|
||||||
pub path: PathBuf,
|
pub path: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct RustcTests {
|
||||||
|
pub rustc_repo: PathBuf,
|
||||||
|
pub filter: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Diagnostics {
|
pub struct Diagnostics {
|
||||||
pub path: PathBuf,
|
pub path: PathBuf,
|
||||||
|
|
236
crates/rust-analyzer/src/cli/rustc_tests.rs
Normal file
236
crates/rust-analyzer/src/cli/rustc_tests.rs
Normal file
|
@ -0,0 +1,236 @@
|
||||||
|
//! Run all tests in a project, similar to `cargo test`, but using the mir interpreter.
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
cell::RefCell, collections::HashMap, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf,
|
||||||
|
};
|
||||||
|
|
||||||
|
use hir::Crate;
|
||||||
|
use ide::{AnalysisHost, Change, DiagnosticCode, DiagnosticsConfig};
|
||||||
|
use profile::StopWatch;
|
||||||
|
use project_model::{CargoConfig, ProjectWorkspace, RustLibSource, Sysroot};
|
||||||
|
|
||||||
|
use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
|
||||||
|
use triomphe::Arc;
|
||||||
|
use vfs::{AbsPathBuf, FileId};
|
||||||
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
|
use crate::cli::{flags, report_metric, Result};
|
||||||
|
|
||||||
|
struct Tester {
|
||||||
|
host: AnalysisHost,
|
||||||
|
root_file: FileId,
|
||||||
|
pass_count: u64,
|
||||||
|
ignore_count: u64,
|
||||||
|
fail_count: u64,
|
||||||
|
stopwatch: StopWatch,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn string_to_diagnostic_code_leaky(code: &str) -> DiagnosticCode {
|
||||||
|
thread_local! {
|
||||||
|
static LEAK_STORE: RefCell<HashMap<String, DiagnosticCode>> = RefCell::new(HashMap::new());
|
||||||
|
}
|
||||||
|
LEAK_STORE.with_borrow_mut(|s| match s.get(code) {
|
||||||
|
Some(c) => *c,
|
||||||
|
None => {
|
||||||
|
let v = DiagnosticCode::RustcHardError(format!("E{code}").leak());
|
||||||
|
s.insert(code.to_owned(), v);
|
||||||
|
v
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn detect_errors_from_rustc_stderr_file(p: PathBuf) -> HashMap<DiagnosticCode, usize> {
|
||||||
|
let text = read_to_string(p).unwrap();
|
||||||
|
let mut result = HashMap::new();
|
||||||
|
{
|
||||||
|
let mut text = &*text;
|
||||||
|
while let Some(p) = text.find("error[E") {
|
||||||
|
text = &text[p + 7..];
|
||||||
|
let code = string_to_diagnostic_code_leaky(&text[..4]);
|
||||||
|
*result.entry(code).or_insert(0) += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Tester {
|
||||||
|
fn new() -> Result<Self> {
|
||||||
|
let tmp_file = AbsPathBuf::assert("/tmp/ra-rustc-test.rs".into());
|
||||||
|
std::fs::write(&tmp_file, "")?;
|
||||||
|
let mut cargo_config = CargoConfig::default();
|
||||||
|
cargo_config.sysroot = Some(RustLibSource::Discover);
|
||||||
|
let workspace = ProjectWorkspace::DetachedFiles {
|
||||||
|
files: vec![tmp_file.clone()],
|
||||||
|
sysroot: Ok(
|
||||||
|
Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env).unwrap()
|
||||||
|
),
|
||||||
|
rustc_cfg: vec![],
|
||||||
|
};
|
||||||
|
let load_cargo_config = LoadCargoConfig {
|
||||||
|
load_out_dirs_from_check: false,
|
||||||
|
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
|
||||||
|
prefill_caches: false,
|
||||||
|
};
|
||||||
|
let (host, _vfs, _proc_macro) =
|
||||||
|
load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
|
||||||
|
let db = host.raw_database();
|
||||||
|
let krates = Crate::all(db);
|
||||||
|
let root_crate = krates.iter().cloned().find(|krate| krate.origin(db).is_local()).unwrap();
|
||||||
|
let root_file = root_crate.root_file(db);
|
||||||
|
Ok(Self {
|
||||||
|
host,
|
||||||
|
root_file,
|
||||||
|
pass_count: 0,
|
||||||
|
ignore_count: 0,
|
||||||
|
fail_count: 0,
|
||||||
|
stopwatch: StopWatch::start(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test(&mut self, p: PathBuf) {
|
||||||
|
if p.parent().unwrap().file_name().unwrap() == "auxiliary" {
|
||||||
|
// These are not tests
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if IGNORED_TESTS.iter().any(|ig| p.file_name().is_some_and(|x| x == *ig)) {
|
||||||
|
println!("{p:?} IGNORE");
|
||||||
|
self.ignore_count += 1;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let stderr_path = p.with_extension("stderr");
|
||||||
|
let expected = if stderr_path.exists() {
|
||||||
|
detect_errors_from_rustc_stderr_file(stderr_path)
|
||||||
|
} else {
|
||||||
|
HashMap::new()
|
||||||
|
};
|
||||||
|
let text = read_to_string(&p).unwrap();
|
||||||
|
let mut change = Change::new();
|
||||||
|
// Ignore unstable tests, since they move too fast and we do not intend to support all of them.
|
||||||
|
let mut ignore_test = text.contains("#![feature");
|
||||||
|
// Ignore test with extern crates, as this infra don't support them yet.
|
||||||
|
ignore_test |= text.contains("// aux-build:") || text.contains("// aux-crate:");
|
||||||
|
// Ignore test with extern modules similarly.
|
||||||
|
ignore_test |= text.contains("mod ");
|
||||||
|
// These should work, but they don't, and I don't know why, so ignore them.
|
||||||
|
ignore_test |= text.contains("extern crate proc_macro");
|
||||||
|
let should_have_no_error = text.contains("// check-pass")
|
||||||
|
|| text.contains("// build-pass")
|
||||||
|
|| text.contains("// run-pass");
|
||||||
|
change.change_file(self.root_file, Some(Arc::from(text)));
|
||||||
|
self.host.apply_change(change);
|
||||||
|
let diagnostic_config = DiagnosticsConfig::test_sample();
|
||||||
|
let diags = self
|
||||||
|
.host
|
||||||
|
.analysis()
|
||||||
|
.diagnostics(&diagnostic_config, ide::AssistResolveStrategy::None, self.root_file)
|
||||||
|
.unwrap();
|
||||||
|
let mut actual = HashMap::new();
|
||||||
|
for diag in diags {
|
||||||
|
if !matches!(diag.code, DiagnosticCode::RustcHardError(_)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if !should_have_no_error && !SUPPORTED_DIAGNOSTICS.contains(&diag.code) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
*actual.entry(diag.code).or_insert(0) += 1;
|
||||||
|
}
|
||||||
|
// Ignore tests with diagnostics that we don't emit.
|
||||||
|
ignore_test |= expected.keys().any(|k| !SUPPORTED_DIAGNOSTICS.contains(k));
|
||||||
|
if ignore_test {
|
||||||
|
println!("{p:?} IGNORE");
|
||||||
|
self.ignore_count += 1;
|
||||||
|
} else if actual == expected {
|
||||||
|
println!("{p:?} PASS");
|
||||||
|
self.pass_count += 1;
|
||||||
|
} else {
|
||||||
|
println!("{p:?} FAIL");
|
||||||
|
println!("actual (r-a) = {:?}", actual);
|
||||||
|
println!("expected (rustc) = {:?}", expected);
|
||||||
|
self.fail_count += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn report(&mut self) {
|
||||||
|
println!(
|
||||||
|
"Pass count = {}, Fail count = {}, Ignore count = {}",
|
||||||
|
self.pass_count, self.fail_count, self.ignore_count
|
||||||
|
);
|
||||||
|
println!("Testing time and memory = {}", self.stopwatch.elapsed());
|
||||||
|
report_metric("rustc failed tests", self.fail_count, "#");
|
||||||
|
report_metric("rustc testing time", self.stopwatch.elapsed().time.as_millis() as u64, "ms");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// These tests break rust-analyzer (either by panicking or hanging) so we should ignore them.
|
||||||
|
const IGNORED_TESTS: &[&str] = &[
|
||||||
|
"trait-with-missing-associated-type-restriction.rs", // #15646
|
||||||
|
"trait-with-missing-associated-type-restriction-fixable.rs", // #15646
|
||||||
|
"resolve-self-in-impl.rs",
|
||||||
|
"basic.rs", // ../rust/tests/ui/associated-type-bounds/return-type-notation/basic.rs
|
||||||
|
"issue-26056.rs",
|
||||||
|
"float-field.rs",
|
||||||
|
"invalid_operator_trait.rs",
|
||||||
|
"type-alias-impl-trait-assoc-dyn.rs",
|
||||||
|
"deeply-nested_closures.rs", // exponential time
|
||||||
|
"hang-on-deeply-nested-dyn.rs", // exponential time
|
||||||
|
"dyn-rpit-and-let.rs", // unexpected free variable with depth `^1.0` with outer binder ^0
|
||||||
|
"issue-16098.rs", // Huge recursion limit for macros?
|
||||||
|
"issue-83471.rs", // crates/hir-ty/src/builder.rs:78:9: assertion failed: self.remaining() > 0
|
||||||
|
];
|
||||||
|
|
||||||
|
const SUPPORTED_DIAGNOSTICS: &[DiagnosticCode] = &[
|
||||||
|
DiagnosticCode::RustcHardError("E0023"),
|
||||||
|
DiagnosticCode::RustcHardError("E0046"),
|
||||||
|
DiagnosticCode::RustcHardError("E0063"),
|
||||||
|
DiagnosticCode::RustcHardError("E0107"),
|
||||||
|
DiagnosticCode::RustcHardError("E0117"),
|
||||||
|
DiagnosticCode::RustcHardError("E0133"),
|
||||||
|
DiagnosticCode::RustcHardError("E0210"),
|
||||||
|
DiagnosticCode::RustcHardError("E0268"),
|
||||||
|
DiagnosticCode::RustcHardError("E0308"),
|
||||||
|
DiagnosticCode::RustcHardError("E0384"),
|
||||||
|
DiagnosticCode::RustcHardError("E0407"),
|
||||||
|
DiagnosticCode::RustcHardError("E0432"),
|
||||||
|
DiagnosticCode::RustcHardError("E0451"),
|
||||||
|
DiagnosticCode::RustcHardError("E0507"),
|
||||||
|
DiagnosticCode::RustcHardError("E0583"),
|
||||||
|
DiagnosticCode::RustcHardError("E0559"),
|
||||||
|
DiagnosticCode::RustcHardError("E0616"),
|
||||||
|
DiagnosticCode::RustcHardError("E0618"),
|
||||||
|
DiagnosticCode::RustcHardError("E0624"),
|
||||||
|
DiagnosticCode::RustcHardError("E0774"),
|
||||||
|
DiagnosticCode::RustcHardError("E0767"),
|
||||||
|
DiagnosticCode::RustcHardError("E0777"),
|
||||||
|
];
|
||||||
|
|
||||||
|
impl flags::RustcTests {
|
||||||
|
pub fn run(self) -> Result<()> {
|
||||||
|
let mut tester = Tester::new()?;
|
||||||
|
let walk_dir = WalkDir::new(self.rustc_repo.join("tests/ui"));
|
||||||
|
for i in walk_dir {
|
||||||
|
let i = i?;
|
||||||
|
let p = i.into_path();
|
||||||
|
if let Some(f) = &self.filter {
|
||||||
|
if !p.as_os_str().to_string_lossy().contains(f) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if p.extension().map_or(true, |x| x != "rs") {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if let Err(e) = std::panic::catch_unwind({
|
||||||
|
let tester = AssertUnwindSafe(&mut tester);
|
||||||
|
let p = p.clone();
|
||||||
|
move || {
|
||||||
|
let tester = tester;
|
||||||
|
tester.0.test(p);
|
||||||
|
}
|
||||||
|
}) {
|
||||||
|
println!("panic detected at test {:?}", p);
|
||||||
|
std::panic::resume_unwind(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tester.report();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
|
@ -1354,6 +1354,7 @@ impl Config {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: This should be an AbsolutePathBuf
|
||||||
fn target_dir_from_config(&self) -> Option<PathBuf> {
|
fn target_dir_from_config(&self) -> Option<PathBuf> {
|
||||||
self.data.rust_analyzerTargetDir.as_ref().and_then(|target_dir| match target_dir {
|
self.data.rust_analyzerTargetDir.as_ref().and_then(|target_dir| match target_dir {
|
||||||
TargetDirectory::UseSubdirectory(yes) if *yes => {
|
TargetDirectory::UseSubdirectory(yes) if *yes => {
|
||||||
|
|
|
@ -12,8 +12,8 @@ use anyhow::Context;
|
||||||
|
|
||||||
use ide::{
|
use ide::{
|
||||||
AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange,
|
AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange,
|
||||||
HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, ReferenceCategory,
|
HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, RangeLimit,
|
||||||
Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit,
|
ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit,
|
||||||
};
|
};
|
||||||
use ide_db::SymbolKind;
|
use ide_db::SymbolKind;
|
||||||
use lsp_server::ErrorCode;
|
use lsp_server::ErrorCode;
|
||||||
|
@ -1409,7 +1409,7 @@ pub(crate) fn handle_inlay_hints(
|
||||||
let inlay_hints_config = snap.config.inlay_hints();
|
let inlay_hints_config = snap.config.inlay_hints();
|
||||||
Ok(Some(
|
Ok(Some(
|
||||||
snap.analysis
|
snap.analysis
|
||||||
.inlay_hints(&inlay_hints_config, file_id, Some(range))?
|
.inlay_hints(&inlay_hints_config, file_id, Some(RangeLimit::Fixed(range)))?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|it| {
|
.map(|it| {
|
||||||
to_proto::inlay_hint(
|
to_proto::inlay_hint(
|
||||||
|
@ -1440,22 +1440,13 @@ pub(crate) fn handle_inlay_hints_resolve(
|
||||||
anyhow::ensure!(snap.file_exists(file_id), "Invalid LSP resolve data");
|
anyhow::ensure!(snap.file_exists(file_id), "Invalid LSP resolve data");
|
||||||
|
|
||||||
let line_index = snap.file_line_index(file_id)?;
|
let line_index = snap.file_line_index(file_id)?;
|
||||||
let range = from_proto::text_range(
|
let hint_position = from_proto::offset(&line_index, original_hint.position)?;
|
||||||
&line_index,
|
|
||||||
lsp_types::Range { start: original_hint.position, end: original_hint.position },
|
|
||||||
)?;
|
|
||||||
let range_start = range.start();
|
|
||||||
let range_end = range.end();
|
|
||||||
let large_range = TextRange::new(
|
|
||||||
range_start.checked_sub(1.into()).unwrap_or(range_start),
|
|
||||||
range_end.checked_add(1.into()).unwrap_or(range_end),
|
|
||||||
);
|
|
||||||
let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints();
|
let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints();
|
||||||
forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty();
|
forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty();
|
||||||
let resolve_hints = snap.analysis.inlay_hints(
|
let resolve_hints = snap.analysis.inlay_hints(
|
||||||
&forced_resolve_inlay_hints_config,
|
&forced_resolve_inlay_hints_config,
|
||||||
file_id,
|
file_id,
|
||||||
Some(large_range),
|
Some(RangeLimit::NearestParent(hint_position)),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let mut resolved_hints = resolve_hints
|
let mut resolved_hints = resolve_hints
|
||||||
|
|
|
@ -32,7 +32,10 @@ fn integrated_highlighting_benchmark() {
|
||||||
let workspace_to_load = project_root();
|
let workspace_to_load = project_root();
|
||||||
let file = "./crates/rust-analyzer/src/config.rs";
|
let file = "./crates/rust-analyzer/src/config.rs";
|
||||||
|
|
||||||
let cargo_config = CargoConfig::default();
|
let cargo_config = CargoConfig {
|
||||||
|
sysroot: Some(project_model::RustLibSource::Discover),
|
||||||
|
..CargoConfig::default()
|
||||||
|
};
|
||||||
let load_cargo_config = LoadCargoConfig {
|
let load_cargo_config = LoadCargoConfig {
|
||||||
load_out_dirs_from_check: true,
|
load_out_dirs_from_check: true,
|
||||||
with_proc_macro_server: ProcMacroServerChoice::None,
|
with_proc_macro_server: ProcMacroServerChoice::None,
|
||||||
|
@ -85,7 +88,10 @@ fn integrated_completion_benchmark() {
|
||||||
let workspace_to_load = project_root();
|
let workspace_to_load = project_root();
|
||||||
let file = "./crates/hir/src/lib.rs";
|
let file = "./crates/hir/src/lib.rs";
|
||||||
|
|
||||||
let cargo_config = CargoConfig::default();
|
let cargo_config = CargoConfig {
|
||||||
|
sysroot: Some(project_model::RustLibSource::Discover),
|
||||||
|
..CargoConfig::default()
|
||||||
|
};
|
||||||
let load_cargo_config = LoadCargoConfig {
|
let load_cargo_config = LoadCargoConfig {
|
||||||
load_out_dirs_from_check: true,
|
load_out_dirs_from_check: true,
|
||||||
with_proc_macro_server: ProcMacroServerChoice::None,
|
with_proc_macro_server: ProcMacroServerChoice::None,
|
||||||
|
@ -103,10 +109,46 @@ fn integrated_completion_benchmark() {
|
||||||
vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
|
vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// kick off parsing and index population
|
||||||
|
|
||||||
|
let completion_offset = {
|
||||||
|
let _it = stdx::timeit("change");
|
||||||
|
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
|
||||||
|
let completion_offset =
|
||||||
|
patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
|
||||||
|
+ "sel".len();
|
||||||
|
let mut change = Change::new();
|
||||||
|
change.change_file(file_id, Some(Arc::from(text)));
|
||||||
|
host.apply_change(change);
|
||||||
|
completion_offset
|
||||||
|
};
|
||||||
|
|
||||||
{
|
{
|
||||||
let _it = stdx::timeit("initial");
|
let _span = profile::cpu_span();
|
||||||
let analysis = host.analysis();
|
let analysis = host.analysis();
|
||||||
analysis.highlight_as_html(file_id, false).unwrap();
|
let config = CompletionConfig {
|
||||||
|
enable_postfix_completions: true,
|
||||||
|
enable_imports_on_the_fly: true,
|
||||||
|
enable_self_on_the_fly: true,
|
||||||
|
enable_private_editable: true,
|
||||||
|
full_function_signatures: false,
|
||||||
|
callable: Some(CallableSnippets::FillArguments),
|
||||||
|
snippet_cap: SnippetCap::new(true),
|
||||||
|
insert_use: InsertUseConfig {
|
||||||
|
granularity: ImportGranularity::Crate,
|
||||||
|
prefix_kind: hir::PrefixKind::ByCrate,
|
||||||
|
enforce_granularity: true,
|
||||||
|
group: true,
|
||||||
|
skip_glob_imports: true,
|
||||||
|
},
|
||||||
|
snippets: Vec::new(),
|
||||||
|
prefer_no_std: false,
|
||||||
|
prefer_prelude: true,
|
||||||
|
limit: None,
|
||||||
|
};
|
||||||
|
let position =
|
||||||
|
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
|
||||||
|
analysis.completions(&config, position, None).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
profile::init_from("*>5");
|
profile::init_from("*>5");
|
||||||
|
@ -116,8 +158,8 @@ fn integrated_completion_benchmark() {
|
||||||
let _it = stdx::timeit("change");
|
let _it = stdx::timeit("change");
|
||||||
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
|
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
|
||||||
let completion_offset =
|
let completion_offset =
|
||||||
patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
|
patch(&mut text, "sel;\ndb.struct_data(self.id)", ";sel;\ndb.struct_data(self.id)")
|
||||||
+ "sel".len();
|
+ ";sel".len();
|
||||||
let mut change = Change::new();
|
let mut change = Change::new();
|
||||||
change.change_file(file_id, Some(Arc::from(text)));
|
change.change_file(file_id, Some(Arc::from(text)));
|
||||||
host.apply_change(change);
|
host.apply_change(change);
|
||||||
|
|
|
@ -275,10 +275,19 @@ impl ast::Path {
|
||||||
successors(Some(self.clone()), ast::Path::qualifier).last().unwrap()
|
successors(Some(self.clone()), ast::Path::qualifier).last().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn first_qualifier(&self) -> Option<ast::Path> {
|
||||||
|
successors(self.qualifier(), ast::Path::qualifier).last()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn first_segment(&self) -> Option<ast::PathSegment> {
|
pub fn first_segment(&self) -> Option<ast::PathSegment> {
|
||||||
self.first_qualifier_or_self().segment()
|
self.first_qualifier_or_self().segment()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: Check usages of Self::segments, they might be wrong because of the logic of the bloew function
|
||||||
|
pub fn segments_of_this_path_only_rev(&self) -> impl Iterator<Item = ast::PathSegment> + Clone {
|
||||||
|
self.qualifiers_and_self().filter_map(|it| it.segment())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn segments(&self) -> impl Iterator<Item = ast::PathSegment> + Clone {
|
pub fn segments(&self) -> impl Iterator<Item = ast::PathSegment> + Clone {
|
||||||
successors(self.first_segment(), |p| {
|
successors(self.first_segment(), |p| {
|
||||||
p.parent_path().parent_path().and_then(|p| p.segment())
|
p.parent_path().parent_path().and_then(|p| p.segment())
|
||||||
|
@ -289,6 +298,10 @@ impl ast::Path {
|
||||||
successors(self.qualifier(), |p| p.qualifier())
|
successors(self.qualifier(), |p| p.qualifier())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn qualifiers_and_self(&self) -> impl Iterator<Item = ast::Path> + Clone {
|
||||||
|
successors(Some(self.clone()), |p| p.qualifier())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn top_path(&self) -> ast::Path {
|
pub fn top_path(&self) -> ast::Path {
|
||||||
let mut this = self.clone();
|
let mut this = self.clone();
|
||||||
while let Some(path) = this.parent_path() {
|
while let Some(path) = this.parent_path() {
|
||||||
|
|
|
@ -76,9 +76,6 @@ pub trait HasDocComments: HasAttrs {
|
||||||
fn doc_comments(&self) -> DocCommentIter {
|
fn doc_comments(&self) -> DocCommentIter {
|
||||||
DocCommentIter { iter: self.syntax().children_with_tokens() }
|
DocCommentIter { iter: self.syntax().children_with_tokens() }
|
||||||
}
|
}
|
||||||
fn doc_comments_and_attrs(&self) -> AttrDocCommentIter {
|
|
||||||
AttrDocCommentIter { iter: self.syntax().children_with_tokens() }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DocCommentIter {
|
impl DocCommentIter {
|
||||||
|
|
|
@ -1,48 +1,8 @@
|
||||||
//! A set of utils methods to reuse on other abstraction levels
|
//! A set of utils methods to reuse on other abstraction levels
|
||||||
|
|
||||||
use itertools::Itertools;
|
use crate::SyntaxKind;
|
||||||
|
|
||||||
use crate::{ast, match_ast, AstNode, SyntaxKind};
|
|
||||||
|
|
||||||
pub fn path_to_string_stripping_turbo_fish(path: &ast::Path) -> String {
|
|
||||||
path.syntax()
|
|
||||||
.children()
|
|
||||||
.filter_map(|node| {
|
|
||||||
match_ast! {
|
|
||||||
match node {
|
|
||||||
ast::PathSegment(it) => {
|
|
||||||
Some(it.name_ref()?.to_string())
|
|
||||||
},
|
|
||||||
ast::Path(it) => {
|
|
||||||
Some(path_to_string_stripping_turbo_fish(&it))
|
|
||||||
},
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.join("::")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_raw_identifier(name: &str) -> bool {
|
pub fn is_raw_identifier(name: &str) -> bool {
|
||||||
let is_keyword = SyntaxKind::from_keyword(name).is_some();
|
let is_keyword = SyntaxKind::from_keyword(name).is_some();
|
||||||
is_keyword && !matches!(name, "self" | "crate" | "super" | "Self")
|
is_keyword && !matches!(name, "self" | "crate" | "super" | "Self")
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::path_to_string_stripping_turbo_fish;
|
|
||||||
use crate::ast::make;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn turbofishes_are_stripped() {
|
|
||||||
assert_eq!("Vec", path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::<i32>")),);
|
|
||||||
assert_eq!(
|
|
||||||
"Vec::new",
|
|
||||||
path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::<i32>::new")),
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
"Vec::new",
|
|
||||||
path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::new()")),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -61,13 +61,17 @@ pub use paths::{AbsPath, AbsPathBuf};
|
||||||
/// Most functions in rust-analyzer use this when they need to refer to a file.
|
/// Most functions in rust-analyzer use this when they need to refer to a file.
|
||||||
#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
|
#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
|
||||||
pub struct FileId(u32);
|
pub struct FileId(u32);
|
||||||
|
// pub struct FileId(NonMaxU32);
|
||||||
|
|
||||||
impl FileId {
|
impl FileId {
|
||||||
/// Think twice about using this outside of tests. If this ends up in a wrong place it will cause panics!
|
/// Think twice about using this outside of tests. If this ends up in a wrong place it will cause panics!
|
||||||
|
// FIXME: To be removed once we get rid of all `SpanData::DUMMY` usages.
|
||||||
pub const BOGUS: FileId = FileId(0xe4e4e);
|
pub const BOGUS: FileId = FileId(0xe4e4e);
|
||||||
|
pub const MAX_FILE_ID: u32 = 0x7fff_ffff;
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn from_raw(raw: u32) -> FileId {
|
pub const fn from_raw(raw: u32) -> FileId {
|
||||||
|
assert!(raw <= Self::MAX_FILE_ID);
|
||||||
FileId(raw)
|
FileId(raw)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "line-index"
|
name = "line-index"
|
||||||
version = "0.1.0"
|
version = "0.1.1"
|
||||||
description = "Maps flat `TextSize` offsets to/from `(line, column)` representation."
|
description = "Maps flat `TextSize` offsets to/from `(line, column)` representation."
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/line-index"
|
repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/line-index"
|
||||||
|
|
|
@ -110,6 +110,7 @@ pub struct PublishReleaseNotes {
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum MeasurementType {
|
pub enum MeasurementType {
|
||||||
Build,
|
Build,
|
||||||
|
RustcTests,
|
||||||
AnalyzeSelf,
|
AnalyzeSelf,
|
||||||
AnalyzeRipgrep,
|
AnalyzeRipgrep,
|
||||||
AnalyzeWebRender,
|
AnalyzeWebRender,
|
||||||
|
@ -122,6 +123,7 @@ impl FromStr for MeasurementType {
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
match s {
|
match s {
|
||||||
"build" => Ok(Self::Build),
|
"build" => Ok(Self::Build),
|
||||||
|
"rustc_tests" => Ok(Self::RustcTests),
|
||||||
"self" => Ok(Self::AnalyzeSelf),
|
"self" => Ok(Self::AnalyzeSelf),
|
||||||
"ripgrep-13.0.0" => Ok(Self::AnalyzeRipgrep),
|
"ripgrep-13.0.0" => Ok(Self::AnalyzeRipgrep),
|
||||||
"webrender-2022" => Ok(Self::AnalyzeWebRender),
|
"webrender-2022" => Ok(Self::AnalyzeWebRender),
|
||||||
|
@ -135,6 +137,7 @@ impl AsRef<str> for MeasurementType {
|
||||||
fn as_ref(&self) -> &str {
|
fn as_ref(&self) -> &str {
|
||||||
match self {
|
match self {
|
||||||
Self::Build => "build",
|
Self::Build => "build",
|
||||||
|
Self::RustcTests => "rustc_tests",
|
||||||
Self::AnalyzeSelf => "self",
|
Self::AnalyzeSelf => "self",
|
||||||
Self::AnalyzeRipgrep => "ripgrep-13.0.0",
|
Self::AnalyzeRipgrep => "ripgrep-13.0.0",
|
||||||
Self::AnalyzeWebRender => "webrender-2022",
|
Self::AnalyzeWebRender => "webrender-2022",
|
||||||
|
|
|
@ -36,6 +36,9 @@ impl flags::Metrics {
|
||||||
MeasurementType::Build => {
|
MeasurementType::Build => {
|
||||||
metrics.measure_build(sh)?;
|
metrics.measure_build(sh)?;
|
||||||
}
|
}
|
||||||
|
MeasurementType::RustcTests => {
|
||||||
|
metrics.measure_rustc_tests(sh)?;
|
||||||
|
}
|
||||||
MeasurementType::AnalyzeSelf => {
|
MeasurementType::AnalyzeSelf => {
|
||||||
metrics.measure_analysis_stats_self(sh)?;
|
metrics.measure_analysis_stats_self(sh)?;
|
||||||
}
|
}
|
||||||
|
@ -50,6 +53,7 @@ impl flags::Metrics {
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
metrics.measure_build(sh)?;
|
metrics.measure_build(sh)?;
|
||||||
|
metrics.measure_rustc_tests(sh)?;
|
||||||
metrics.measure_analysis_stats_self(sh)?;
|
metrics.measure_analysis_stats_self(sh)?;
|
||||||
metrics.measure_analysis_stats(sh, MeasurementType::AnalyzeRipgrep.as_ref())?;
|
metrics.measure_analysis_stats(sh, MeasurementType::AnalyzeRipgrep.as_ref())?;
|
||||||
metrics.measure_analysis_stats(sh, MeasurementType::AnalyzeWebRender.as_ref())?;
|
metrics.measure_analysis_stats(sh, MeasurementType::AnalyzeWebRender.as_ref())?;
|
||||||
|
@ -78,6 +82,19 @@ impl Metrics {
|
||||||
self.report("build", time.as_millis() as u64, "ms".into());
|
self.report("build", time.as_millis() as u64, "ms".into());
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn measure_rustc_tests(&mut self, sh: &Shell) -> anyhow::Result<()> {
|
||||||
|
eprintln!("\nMeasuring rustc tests");
|
||||||
|
|
||||||
|
cmd!(sh, "git clone --depth=1 https://github.com/rust-lang/rust").run()?;
|
||||||
|
|
||||||
|
let output = cmd!(sh, "./target/release/rust-analyzer rustc-tests ./rust").read()?;
|
||||||
|
for (metric, value, unit) in parse_metrics(&output) {
|
||||||
|
self.report(metric, value, unit.into());
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
fn measure_analysis_stats_self(&mut self, sh: &Shell) -> anyhow::Result<()> {
|
fn measure_analysis_stats_self(&mut self, sh: &Shell) -> anyhow::Result<()> {
|
||||||
self.measure_analysis_stats_path(sh, "self", ".")
|
self.measure_analysis_stats_path(sh, "self", ".")
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue