Merge commit '9b3d03408c66749d56466bb09baf2a7177deb6ce' into sync-from-ra

This commit is contained in:
Laurențiu Nicola 2023-08-21 12:44:09 +03:00
parent 883f16d805
commit 30d8aa1bec
136 changed files with 3865 additions and 1451 deletions

12
Cargo.lock generated
View file

@ -999,23 +999,23 @@ checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
[[package]] [[package]]
name = "lsp-server" name = "lsp-server"
version = "0.7.1" version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3711e4d6f491dc9edc0f1df80e204f38206775ac92c1241e89b79229a850bc00"
dependencies = [ dependencies = [
"crossbeam-channel", "crossbeam-channel",
"log", "log",
"lsp-types",
"serde", "serde",
"serde_json", "serde_json",
] ]
[[package]] [[package]]
name = "lsp-server" name = "lsp-server"
version = "0.7.2" version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72417faa455bfb4e5bf14b157d8e2ca2ed74b4e89b8cf42ea2d864825ae5c8a2"
dependencies = [ dependencies = [
"crossbeam-channel", "crossbeam-channel",
"log", "log",
"lsp-types",
"serde", "serde",
"serde_json", "serde_json",
] ]
@ -1555,7 +1555,7 @@ dependencies = [
"ide-ssr", "ide-ssr",
"itertools", "itertools",
"load-cargo", "load-cargo",
"lsp-server 0.7.1", "lsp-server 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
"lsp-types", "lsp-types",
"mbe", "mbe",
"mimalloc", "mimalloc",

View file

@ -86,7 +86,7 @@ proc-macro-test = { path = "./crates/proc-macro-test" }
# In-tree crates that are published separately and follow semver. See lib/README.md # In-tree crates that are published separately and follow semver. See lib/README.md
line-index = { version = "0.1.0-pre.1" } line-index = { version = "0.1.0-pre.1" }
la-arena = { version = "0.3.1" } la-arena = { version = "0.3.1" }
lsp-server = { version = "0.7.1" } lsp-server = { version = "0.7.3" }
# non-local crates # non-local crates
smallvec = { version = "1.10.0", features = [ smallvec = { version = "1.10.0", features = [
@ -97,7 +97,8 @@ smallvec = { version = "1.10.0", features = [
smol_str = "0.2.0" smol_str = "0.2.0"
nohash-hasher = "0.2.0" nohash-hasher = "0.2.0"
text-size = "1.1.0" text-size = "1.1.0"
serde = { version = "1.0.156", features = ["derive"] } # See https://github.com/serde-rs/serde/issues/2538#issuecomment-1684517372 for why we pin serde
serde = { version = "1.0.156, < 1.0.172", features = ["derive"] }
serde_json = "1.0.96" serde_json = "1.0.96"
triomphe = { version = "0.1.8", default-features = false, features = ["std"] } triomphe = { version = "0.1.8", default-features = false, features = ["std"] }
# can't upgrade due to dashmap depending on 0.12.3 currently # can't upgrade due to dashmap depending on 0.12.3 currently

View file

@ -130,6 +130,7 @@ impl ChangeFixture {
let mut default_crate_root: Option<FileId> = None; let mut default_crate_root: Option<FileId> = None;
let mut default_target_data_layout: Option<String> = None; let mut default_target_data_layout: Option<String> = None;
let mut default_cfg = CfgOptions::default(); let mut default_cfg = CfgOptions::default();
let mut default_env = Env::new_for_test_fixture();
let mut file_set = FileSet::default(); let mut file_set = FileSet::default();
let mut current_source_root_kind = SourceRootKind::Local; let mut current_source_root_kind = SourceRootKind::Local;
@ -200,6 +201,7 @@ impl ChangeFixture {
assert!(default_crate_root.is_none()); assert!(default_crate_root.is_none());
default_crate_root = Some(file_id); default_crate_root = Some(file_id);
default_cfg = meta.cfg; default_cfg = meta.cfg;
default_env.extend(meta.env.iter().map(|(x, y)| (x.to_owned(), y.to_owned())));
default_target_data_layout = meta.target_data_layout; default_target_data_layout = meta.target_data_layout;
} }
@ -220,7 +222,7 @@ impl ChangeFixture {
None, None,
default_cfg, default_cfg,
Default::default(), Default::default(),
Env::new_for_test_fixture(), default_env,
false, false,
CrateOrigin::Local { repo: None, name: None }, CrateOrigin::Local { repo: None, name: None },
default_target_data_layout default_target_data_layout

View file

@ -686,6 +686,12 @@ impl fmt::Display for Edition {
} }
} }
impl Extend<(String, String)> for Env {
fn extend<T: IntoIterator<Item = (String, String)>>(&mut self, iter: T) {
self.entries.extend(iter);
}
}
impl FromIterator<(String, String)> for Env { impl FromIterator<(String, String)> for Env {
fn from_iter<T: IntoIterator<Item = (String, String)>>(iter: T) -> Self { fn from_iter<T: IntoIterator<Item = (String, String)>>(iter: T) -> Self {
Env { entries: FromIterator::from_iter(iter) } Env { entries: FromIterator::from_iter(iter) }

View file

@ -431,12 +431,10 @@ impl AttrsWithOwner {
.item_tree(db) .item_tree(db)
.raw_attrs(AttrOwner::ModItem(definition_tree_id.value.into())) .raw_attrs(AttrOwner::ModItem(definition_tree_id.value.into()))
.clone(), .clone(),
ModuleOrigin::BlockExpr { block } => RawAttrs::from_attrs_owner( ModuleOrigin::BlockExpr { id, .. } => {
db.upcast(), let tree = db.block_item_tree_query(id);
InFile::new(block.file_id, block.to_node(db.upcast())) tree.raw_attrs(AttrOwner::TopLevel).clone()
.as_ref() }
.map(|it| it as &dyn ast::HasAttrs),
),
} }
} }
AttrDefId::FieldId(it) => { AttrDefId::FieldId(it) => {

View file

@ -505,6 +505,9 @@ impl ExprCollector<'_> {
let mut args = Vec::new(); let mut args = Vec::new();
let mut arg_types = Vec::new(); let mut arg_types = Vec::new();
if let Some(pl) = e.param_list() { if let Some(pl) = e.param_list() {
let num_params = pl.params().count();
args.reserve_exact(num_params);
arg_types.reserve_exact(num_params);
for param in pl.params() { for param in pl.params() {
let pat = this.collect_pat_top(param.pat()); let pat = this.collect_pat_top(param.pat());
let type_ref = let type_ref =
@ -1100,7 +1103,9 @@ impl ExprCollector<'_> {
ast::Stmt::ExprStmt(es) => matches!(es.expr(), Some(ast::Expr::MacroExpr(_))), ast::Stmt::ExprStmt(es) => matches!(es.expr(), Some(ast::Expr::MacroExpr(_))),
_ => false, _ => false,
}); });
statement_has_item || matches!(block.tail_expr(), Some(ast::Expr::MacroExpr(_))) statement_has_item
|| matches!(block.tail_expr(), Some(ast::Expr::MacroExpr(_)))
|| (block.may_carry_attributes() && block.attrs().next().is_some())
}; };
let block_id = if block_has_items { let block_id = if block_has_items {

View file

@ -38,9 +38,9 @@ fn outer() {
"#, "#,
expect![[r#" expect![[r#"
block scope block scope
CrateStruct: t CrateStruct: ti
PlainStruct: t v PlainStruct: ti vi
SelfStruct: t SelfStruct: ti
Struct: v Struct: v
SuperStruct: _ SuperStruct: _
@ -66,7 +66,7 @@ fn outer() {
"#, "#,
expect![[r#" expect![[r#"
block scope block scope
imported: t v imported: ti vi
name: v name: v
crate crate
@ -92,9 +92,9 @@ fn outer() {
"#, "#,
expect![[r#" expect![[r#"
block scope block scope
inner1: t inner1: ti
inner2: v inner2: v
outer: v outer: vi
block scope block scope
inner: v inner: v
@ -121,7 +121,7 @@ struct Struct {}
"#, "#,
expect![[r#" expect![[r#"
block scope block scope
Struct: t Struct: ti
crate crate
Struct: t Struct: t
@ -153,7 +153,7 @@ fn outer() {
"#, "#,
expect![[r#" expect![[r#"
block scope block scope
ResolveMe: t ResolveMe: ti
block scope block scope
m2: t m2: t
@ -214,7 +214,7 @@ fn f() {
"#, "#,
expect![[r#" expect![[r#"
block scope block scope
ResolveMe: t ResolveMe: ti
block scope block scope
h: v h: v
@ -292,7 +292,7 @@ pub mod cov_mark {
nested: v nested: v
crate crate
cov_mark: t cov_mark: ti
f: v f: v
"#]], "#]],
); );

View file

@ -487,7 +487,7 @@ impl ExternCrateDeclData {
db.crate_def_map(loc.container.krate()) db.crate_def_map(loc.container.krate())
.extern_prelude() .extern_prelude()
.find(|&(prelude_name, ..)| *prelude_name == name) .find(|&(prelude_name, ..)| *prelude_name == name)
.map(|(_, root)| root.krate()) .map(|(_, (root, _))| root.krate())
}; };
Arc::new(Self { Arc::new(Self {

View file

@ -82,6 +82,9 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
#[salsa::invoke(ItemTree::file_item_tree_query)] #[salsa::invoke(ItemTree::file_item_tree_query)]
fn file_item_tree(&self, file_id: HirFileId) -> Arc<ItemTree>; fn file_item_tree(&self, file_id: HirFileId) -> Arc<ItemTree>;
#[salsa::invoke(ItemTree::block_item_tree_query)]
fn block_item_tree_query(&self, block_id: BlockId) -> Arc<ItemTree>;
#[salsa::invoke(crate_def_map_wait)] #[salsa::invoke(crate_def_map_wait)]
#[salsa::transparent] #[salsa::transparent]
fn crate_def_map(&self, krate: CrateId) -> Arc<DefMap>; fn crate_def_map(&self, krate: CrateId) -> Arc<DefMap>;

View file

@ -11,7 +11,7 @@ use crate::{
nameres::DefMap, nameres::DefMap,
path::{ModPath, PathKind}, path::{ModPath, PathKind},
visibility::Visibility, visibility::Visibility,
ModuleDefId, ModuleId, CrateRootModuleId, ModuleDefId, ModuleId,
}; };
/// Find a path that can be used to refer to a certain item. This can depend on /// Find a path that can be used to refer to a certain item. This can depend on
@ -81,7 +81,7 @@ fn find_path_inner(
} }
let def_map = from.def_map(db); let def_map = from.def_map(db);
let crate_root = def_map.crate_root().into(); let crate_root = def_map.crate_root();
// - if the item is a module, jump straight to module search // - if the item is a module, jump straight to module search
if let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item { if let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item {
let mut visited_modules = FxHashSet::default(); let mut visited_modules = FxHashSet::default();
@ -149,7 +149,7 @@ fn find_path_for_module(
db: &dyn DefDatabase, db: &dyn DefDatabase,
def_map: &DefMap, def_map: &DefMap,
visited_modules: &mut FxHashSet<ModuleId>, visited_modules: &mut FxHashSet<ModuleId>,
crate_root: ModuleId, crate_root: CrateRootModuleId,
from: ModuleId, from: ModuleId,
module_id: ModuleId, module_id: ModuleId,
max_len: usize, max_len: usize,
@ -183,7 +183,7 @@ fn find_path_for_module(
// - if the item is the crate root of a dependency crate, return the name from the extern prelude // - if the item is the crate root of a dependency crate, return the name from the extern prelude
let root_def_map = crate_root.def_map(db); let root_def_map = crate_root.def_map(db);
for (name, def_id) in root_def_map.extern_prelude() { for (name, (def_id, _extern_crate)) in root_def_map.extern_prelude() {
if module_id == def_id { if module_id == def_id {
let name = scope_name.unwrap_or_else(|| name.clone()); let name = scope_name.unwrap_or_else(|| name.clone());
@ -192,7 +192,7 @@ fn find_path_for_module(
def_map[local_id] def_map[local_id]
.scope .scope
.type_(&name) .type_(&name)
.filter(|&(id, _)| id != ModuleDefId::ModuleId(def_id)) .filter(|&(id, _)| id != ModuleDefId::ModuleId(def_id.into()))
}) })
.is_some(); .is_some();
let kind = if name_already_occupied_in_type_ns { let kind = if name_already_occupied_in_type_ns {
@ -224,6 +224,7 @@ fn find_path_for_module(
) )
} }
// FIXME: Do we still need this now that we record import origins, and hence aliases?
fn find_in_scope( fn find_in_scope(
db: &dyn DefDatabase, db: &dyn DefDatabase,
def_map: &DefMap, def_map: &DefMap,
@ -244,7 +245,7 @@ fn find_in_prelude(
item: ItemInNs, item: ItemInNs,
from: ModuleId, from: ModuleId,
) -> Option<ModPath> { ) -> Option<ModPath> {
let prelude_module = root_def_map.prelude()?; let (prelude_module, _) = root_def_map.prelude()?;
// Preludes in block DefMaps are ignored, only the crate DefMap is searched // Preludes in block DefMaps are ignored, only the crate DefMap is searched
let prelude_def_map = prelude_module.def_map(db); let prelude_def_map = prelude_module.def_map(db);
let prelude_scope = &prelude_def_map[prelude_module.local_id].scope; let prelude_scope = &prelude_def_map[prelude_module.local_id].scope;
@ -293,7 +294,7 @@ fn calculate_best_path(
db: &dyn DefDatabase, db: &dyn DefDatabase,
def_map: &DefMap, def_map: &DefMap,
visited_modules: &mut FxHashSet<ModuleId>, visited_modules: &mut FxHashSet<ModuleId>,
crate_root: ModuleId, crate_root: CrateRootModuleId,
max_len: usize, max_len: usize,
item: ItemInNs, item: ItemInNs,
from: ModuleId, from: ModuleId,
@ -346,6 +347,11 @@ fn calculate_best_path(
let extern_paths = crate_graph[from.krate].dependencies.iter().filter_map(|dep| { let extern_paths = crate_graph[from.krate].dependencies.iter().filter_map(|dep| {
let import_map = db.import_map(dep.crate_id); let import_map = db.import_map(dep.crate_id);
import_map.import_info_for(item).and_then(|info| { import_map.import_info_for(item).and_then(|info| {
if info.is_doc_hidden {
// the item or import is `#[doc(hidden)]`, so skip it as it is in an external crate
return None;
}
// Determine best path for containing module and append last segment from `info`. // Determine best path for containing module and append last segment from `info`.
// FIXME: we should guide this to look up the path locally, or from the same crate again? // FIXME: we should guide this to look up the path locally, or from the same crate again?
let mut path = find_path_for_module( let mut path = find_path_for_module(
@ -1293,4 +1299,65 @@ pub mod prelude {
"None", "None",
); );
} }
#[test]
fn different_crate_renamed_through_dep() {
check_found_path(
r#"
//- /main.rs crate:main deps:intermediate
$0
//- /intermediate.rs crate:intermediate deps:std
pub extern crate std as std_renamed;
//- /std.rs crate:std
pub struct S;
"#,
"intermediate::std_renamed::S",
"intermediate::std_renamed::S",
"intermediate::std_renamed::S",
"intermediate::std_renamed::S",
);
}
#[test]
fn different_crate_doc_hidden() {
check_found_path(
r#"
//- /main.rs crate:main deps:intermediate
$0
//- /intermediate.rs crate:intermediate deps:std
#[doc(hidden)]
pub extern crate std;
pub extern crate std as longer;
//- /std.rs crate:std
pub struct S;
"#,
"intermediate::longer::S",
"intermediate::longer::S",
"intermediate::longer::S",
"intermediate::longer::S",
);
}
#[test]
fn respect_doc_hidden() {
check_found_path(
r#"
//- /main.rs crate:main deps:std,lazy_static
$0
//- /lazy_static.rs crate:lazy_static deps:core
#[doc(hidden)]
pub use core::ops::Deref as __Deref;
//- /std.rs crate:std deps:core
pub use core::ops;
//- /core.rs crate:core
pub mod ops {
pub trait Deref {}
}
"#,
"std::ops::Deref",
"std::ops::Deref",
"std::ops::Deref",
"std::ops::Deref",
);
}
} }

View file

@ -21,10 +21,11 @@ use crate::{
db::DefDatabase, db::DefDatabase,
dyn_map::{keys, DynMap}, dyn_map::{keys, DynMap},
expander::Expander, expander::Expander,
item_tree::{AttrOwner, ItemTree},
lower::LowerCtx, lower::LowerCtx,
nameres::{DefMap, MacroSubNs}, nameres::{DefMap, MacroSubNs},
src::{HasChildSource, HasSource}, src::{HasChildSource, HasSource},
type_ref::{LifetimeRef, TypeBound, TypeRef}, type_ref::{ConstRef, LifetimeRef, TypeBound, TypeRef},
AdtId, ConstParamId, GenericDefId, HasModule, LifetimeParamId, LocalLifetimeParamId, AdtId, ConstParamId, GenericDefId, HasModule, LifetimeParamId, LocalLifetimeParamId,
LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
}; };
@ -48,7 +49,7 @@ pub struct LifetimeParamData {
pub struct ConstParamData { pub struct ConstParamData {
pub name: Name, pub name: Name,
pub ty: Interned<TypeRef>, pub ty: Interned<TypeRef>,
pub has_default: bool, pub default: Option<ConstRef>,
} }
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
@ -75,7 +76,7 @@ impl TypeOrConstParamData {
pub fn has_default(&self) -> bool { pub fn has_default(&self) -> bool {
match self { match self {
TypeOrConstParamData::TypeParamData(it) => it.default.is_some(), TypeOrConstParamData::TypeParamData(it) => it.default.is_some(),
TypeOrConstParamData::ConstParamData(it) => it.has_default, TypeOrConstParamData::ConstParamData(it) => it.default.is_some(),
} }
} }
@ -154,12 +155,58 @@ impl GenericParams {
def: GenericDefId, def: GenericDefId,
) -> Interned<GenericParams> { ) -> Interned<GenericParams> {
let _p = profile::span("generic_params_query"); let _p = profile::span("generic_params_query");
let krate = def.module(db).krate;
let cfg_options = db.crate_graph();
let cfg_options = &cfg_options[krate].cfg_options;
// Returns the generic parameters that are enabled under the current `#[cfg]` options
let enabled_params = |params: &Interned<GenericParams>, item_tree: &ItemTree| {
let enabled = |param| item_tree.attrs(db, krate, param).is_cfg_enabled(cfg_options);
// In the common case, no parameters will by disabled by `#[cfg]` attributes.
// Therefore, make a first pass to check if all parameters are enabled and, if so,
// clone the `Interned<GenericParams>` instead of recreating an identical copy.
let all_type_or_consts_enabled =
params.type_or_consts.iter().all(|(idx, _)| enabled(idx.into()));
let all_lifetimes_enabled = params.lifetimes.iter().all(|(idx, _)| enabled(idx.into()));
if all_type_or_consts_enabled && all_lifetimes_enabled {
params.clone()
} else {
Interned::new(GenericParams {
type_or_consts: all_type_or_consts_enabled
.then(|| params.type_or_consts.clone())
.unwrap_or_else(|| {
params
.type_or_consts
.iter()
.filter_map(|(idx, param)| {
enabled(idx.into()).then(|| param.clone())
})
.collect()
}),
lifetimes: all_lifetimes_enabled
.then(|| params.lifetimes.clone())
.unwrap_or_else(|| {
params
.lifetimes
.iter()
.filter_map(|(idx, param)| {
enabled(idx.into()).then(|| param.clone())
})
.collect()
}),
where_predicates: params.where_predicates.clone(),
})
}
};
macro_rules! id_to_generics { macro_rules! id_to_generics {
($id:ident) => {{ ($id:ident) => {{
let id = $id.lookup(db).id; let id = $id.lookup(db).id;
let tree = id.item_tree(db); let tree = id.item_tree(db);
let item = &tree[id.value]; let item = &tree[id.value];
item.generic_params.clone() enabled_params(&item.generic_params, &tree)
}}; }};
} }
@ -169,7 +216,8 @@ impl GenericParams {
let tree = loc.id.item_tree(db); let tree = loc.id.item_tree(db);
let item = &tree[loc.id.value]; let item = &tree[loc.id.value];
let mut generic_params = GenericParams::clone(&item.explicit_generic_params); let enabled_params = enabled_params(&item.explicit_generic_params, &tree);
let mut generic_params = GenericParams::clone(&enabled_params);
let module = loc.container.module(db); let module = loc.container.module(db);
let func_data = db.function_data(id); let func_data = db.function_data(id);
@ -198,9 +246,14 @@ impl GenericParams {
} }
} }
pub(crate) fn fill(&mut self, lower_ctx: &LowerCtx<'_>, node: &dyn HasGenericParams) { pub(crate) fn fill(
&mut self,
lower_ctx: &LowerCtx<'_>,
node: &dyn HasGenericParams,
add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam),
) {
if let Some(params) = node.generic_param_list() { if let Some(params) = node.generic_param_list() {
self.fill_params(lower_ctx, params) self.fill_params(lower_ctx, params, add_param_attrs)
} }
if let Some(where_clause) = node.where_clause() { if let Some(where_clause) = node.where_clause() {
self.fill_where_predicates(lower_ctx, where_clause); self.fill_where_predicates(lower_ctx, where_clause);
@ -218,7 +271,12 @@ impl GenericParams {
} }
} }
fn fill_params(&mut self, lower_ctx: &LowerCtx<'_>, params: ast::GenericParamList) { fn fill_params(
&mut self,
lower_ctx: &LowerCtx<'_>,
params: ast::GenericParamList,
mut add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam),
) {
for type_or_const_param in params.type_or_const_params() { for type_or_const_param in params.type_or_const_params() {
match type_or_const_param { match type_or_const_param {
ast::TypeOrConstParam::Type(type_param) => { ast::TypeOrConstParam::Type(type_param) => {
@ -232,13 +290,14 @@ impl GenericParams {
default, default,
provenance: TypeParamProvenance::TypeParamList, provenance: TypeParamProvenance::TypeParamList,
}; };
self.type_or_consts.alloc(param.into()); let idx = self.type_or_consts.alloc(param.into());
let type_ref = TypeRef::Path(name.into()); let type_ref = TypeRef::Path(name.into());
self.fill_bounds( self.fill_bounds(
lower_ctx, lower_ctx,
type_param.type_bound_list(), type_param.type_bound_list(),
Either::Left(type_ref), Either::Left(type_ref),
); );
add_param_attrs(idx.into(), ast::GenericParam::TypeParam(type_param));
} }
ast::TypeOrConstParam::Const(const_param) => { ast::TypeOrConstParam::Const(const_param) => {
let name = const_param.name().map_or_else(Name::missing, |it| it.as_name()); let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
@ -248,9 +307,10 @@ impl GenericParams {
let param = ConstParamData { let param = ConstParamData {
name, name,
ty: Interned::new(ty), ty: Interned::new(ty),
has_default: const_param.default_val().is_some(), default: ConstRef::from_const_param(lower_ctx, &const_param),
}; };
self.type_or_consts.alloc(param.into()); let idx = self.type_or_consts.alloc(param.into());
add_param_attrs(idx.into(), ast::GenericParam::ConstParam(const_param));
} }
} }
} }
@ -258,13 +318,14 @@ impl GenericParams {
let name = let name =
lifetime_param.lifetime().map_or_else(Name::missing, |lt| Name::new_lifetime(&lt)); lifetime_param.lifetime().map_or_else(Name::missing, |lt| Name::new_lifetime(&lt));
let param = LifetimeParamData { name: name.clone() }; let param = LifetimeParamData { name: name.clone() };
self.lifetimes.alloc(param); let idx = self.lifetimes.alloc(param);
let lifetime_ref = LifetimeRef::new_name(name); let lifetime_ref = LifetimeRef::new_name(name);
self.fill_bounds( self.fill_bounds(
lower_ctx, lower_ctx,
lifetime_param.type_bound_list(), lifetime_param.type_bound_list(),
Either::Right(lifetime_ref), Either::Right(lifetime_ref),
); );
add_param_attrs(idx.into(), ast::GenericParam::LifetimeParam(lifetime_param));
} }
} }

View file

@ -393,6 +393,17 @@ impl ConstRef {
Self::Scalar(LiteralConstRef::Unknown) Self::Scalar(LiteralConstRef::Unknown)
} }
pub(crate) fn from_const_param(
lower_ctx: &LowerCtx<'_>,
param: &ast::ConstParam,
) -> Option<Self> {
let default = param.default_val();
match default {
Some(_) => Some(Self::from_const_arg(lower_ctx, default)),
None => None,
}
}
pub fn display<'a>(&'a self, db: &'a dyn ExpandDatabase) -> impl fmt::Display + 'a { pub fn display<'a>(&'a self, db: &'a dyn ExpandDatabase) -> impl fmt::Display + 'a {
struct Display<'a>(&'a dyn ExpandDatabase, &'a ConstRef); struct Display<'a>(&'a dyn ExpandDatabase, &'a ConstRef);
impl fmt::Display for Display<'_> { impl fmt::Display for Display<'_> {

View file

@ -11,6 +11,7 @@ use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet, FxHasher}; use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
use triomphe::Arc; use triomphe::Arc;
use crate::item_scope::ImportOrExternCrate;
use crate::{ use crate::{
db::DefDatabase, item_scope::ItemInNs, nameres::DefMap, visibility::Visibility, AssocItemId, db::DefDatabase, item_scope::ItemInNs, nameres::DefMap, visibility::Visibility, AssocItemId,
ModuleDefId, ModuleId, TraitId, ModuleDefId, ModuleId, TraitId,
@ -29,6 +30,8 @@ pub struct ImportInfo {
pub container: ModuleId, pub container: ModuleId,
/// Whether the import is a trait associated item or not. /// Whether the import is a trait associated item or not.
pub is_trait_assoc_item: bool, pub is_trait_assoc_item: bool,
/// Whether this item is annotated with `#[doc(hidden)]`.
pub is_doc_hidden: bool,
} }
/// A map from publicly exported items to its name. /// A map from publicly exported items to its name.
@ -109,23 +112,41 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemIn
}); });
for (name, per_ns) in visible_items { for (name, per_ns) in visible_items {
for item in per_ns.iter_items() { for (item, import) in per_ns.iter_items() {
// FIXME: Not yet used, but will be once we handle doc(hidden) import sources
let attr_id = if let Some(import) = import {
match import {
ImportOrExternCrate::ExternCrate(id) => Some(id.into()),
ImportOrExternCrate::Import(id) => Some(id.import.into()),
}
} else {
match item {
ItemInNs::Types(id) | ItemInNs::Values(id) => id.try_into().ok(),
ItemInNs::Macros(id) => Some(id.into()),
}
};
let is_doc_hidden =
attr_id.map_or(false, |attr_id| db.attrs(attr_id).has_doc_hidden());
let import_info = ImportInfo { let import_info = ImportInfo {
name: name.clone(), name: name.clone(),
container: module, container: module,
is_trait_assoc_item: false, is_trait_assoc_item: false,
is_doc_hidden,
}; };
match depth_map.entry(item) { match depth_map.entry(item) {
Entry::Vacant(entry) => { Entry::Vacant(entry) => _ = entry.insert((depth, is_doc_hidden)),
entry.insert(depth);
}
Entry::Occupied(mut entry) => { Entry::Occupied(mut entry) => {
if depth < *entry.get() { let &(occ_depth, occ_is_doc_hidden) = entry.get();
entry.insert(depth); // Prefer the one that is not doc(hidden),
} else { // Otherwise, if both have the same doc(hidden)-ness and the new path is shorter, prefer that one.
let overwrite_entry = occ_is_doc_hidden && !is_doc_hidden
|| occ_is_doc_hidden == is_doc_hidden && depth < occ_depth;
if !overwrite_entry {
continue; continue;
} }
entry.insert((depth, is_doc_hidden));
} }
} }
@ -162,10 +183,10 @@ fn collect_trait_assoc_items(
trait_import_info: &ImportInfo, trait_import_info: &ImportInfo,
) { ) {
let _p = profile::span("collect_trait_assoc_items"); let _p = profile::span("collect_trait_assoc_items");
for (assoc_item_name, item) in &db.trait_data(tr).items { for &(ref assoc_item_name, item) in &db.trait_data(tr).items {
let module_def_id = match item { let module_def_id = match item {
AssocItemId::FunctionId(f) => ModuleDefId::from(*f), AssocItemId::FunctionId(f) => ModuleDefId::from(f),
AssocItemId::ConstId(c) => ModuleDefId::from(*c), AssocItemId::ConstId(c) => ModuleDefId::from(c),
// cannot use associated type aliases directly: need a `<Struct as Trait>::TypeAlias` // cannot use associated type aliases directly: need a `<Struct as Trait>::TypeAlias`
// qualifier, ergo no need to store it for imports in import_map // qualifier, ergo no need to store it for imports in import_map
AssocItemId::TypeAliasId(_) => { AssocItemId::TypeAliasId(_) => {
@ -183,6 +204,7 @@ fn collect_trait_assoc_items(
container: trait_import_info.container, container: trait_import_info.container,
name: assoc_item_name.clone(), name: assoc_item_name.clone(),
is_trait_assoc_item: true, is_trait_assoc_item: true,
is_doc_hidden: db.attrs(item.into()).has_doc_hidden(),
}; };
map.insert(assoc_item, assoc_item_info); map.insert(assoc_item, assoc_item_info);
} }

View file

@ -6,6 +6,7 @@ use std::collections::hash_map::Entry;
use base_db::CrateId; use base_db::CrateId;
use hir_expand::{attrs::AttrId, db::ExpandDatabase, name::Name, AstId, MacroCallId}; use hir_expand::{attrs::AttrId, db::ExpandDatabase, name::Name, AstId, MacroCallId};
use itertools::Itertools; use itertools::Itertools;
use la_arena::Idx;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use profile::Count; use profile::Count;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
@ -15,16 +16,10 @@ use syntax::ast;
use crate::{ use crate::{
db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, ConstId, db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, ConstId,
ExternCrateId, HasModule, ImplId, LocalModuleId, MacroId, ModuleDefId, ModuleId, TraitId, ExternCrateId, HasModule, ImplId, LocalModuleId, Lookup, MacroId, ModuleDefId, ModuleId,
UseId, TraitId, UseId,
}; };
#[derive(Copy, Clone, Debug)]
pub(crate) enum ImportType {
Glob,
Named,
}
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub struct PerNsGlobImports { pub struct PerNsGlobImports {
types: FxHashSet<(LocalModuleId, Name)>, types: FxHashSet<(LocalModuleId, Name)>,
@ -32,15 +27,50 @@ pub struct PerNsGlobImports {
macros: FxHashSet<(LocalModuleId, Name)>, macros: FxHashSet<(LocalModuleId, Name)>,
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum ImportOrExternCrate {
Import(ImportId),
ExternCrate(ExternCrateId),
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub(crate) enum ImportType {
Import(ImportId),
Glob(UseId),
ExternCrate(ExternCrateId),
}
impl ImportOrExternCrate {
pub fn into_import(self) -> Option<ImportId> {
match self {
ImportOrExternCrate::Import(it) => Some(it),
_ => None,
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum ImportOrDef {
Import(ImportId),
ExternCrate(ExternCrateId),
Def(ModuleDefId),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub struct ImportId {
pub import: UseId,
pub idx: Idx<ast::UseTree>,
}
#[derive(Debug, Default, PartialEq, Eq)] #[derive(Debug, Default, PartialEq, Eq)]
pub struct ItemScope { pub struct ItemScope {
_c: Count<Self>, _c: Count<Self>,
/// Defs visible in this scope. This includes `declarations`, but also /// Defs visible in this scope. This includes `declarations`, but also
/// imports. /// imports. The imports belong to this module and can be resolved by using them on
types: FxHashMap<Name, (ModuleDefId, Visibility)>, /// the `use_imports_*` fields.
values: FxHashMap<Name, (ModuleDefId, Visibility)>, types: FxHashMap<Name, (ModuleDefId, Visibility, Option<ImportOrExternCrate>)>,
macros: FxHashMap<Name, (MacroId, Visibility)>, values: FxHashMap<Name, (ModuleDefId, Visibility, Option<ImportId>)>,
macros: FxHashMap<Name, (MacroId, Visibility, Option<ImportId>)>,
unresolved: FxHashSet<Name>, unresolved: FxHashSet<Name>,
/// The defs declared in this scope. Each def has a single scope where it is /// The defs declared in this scope. Each def has a single scope where it is
@ -50,7 +80,14 @@ pub struct ItemScope {
impls: Vec<ImplId>, impls: Vec<ImplId>,
unnamed_consts: Vec<ConstId>, unnamed_consts: Vec<ConstId>,
/// Traits imported via `use Trait as _;`. /// Traits imported via `use Trait as _;`.
unnamed_trait_imports: FxHashMap<TraitId, Visibility>, unnamed_trait_imports: FxHashMap<TraitId, (Visibility, Option<ImportId>)>,
// the resolutions of the imports of this scope
use_imports_types: FxHashMap<ImportOrExternCrate, ImportOrDef>,
use_imports_values: FxHashMap<ImportId, ImportOrDef>,
use_imports_macros: FxHashMap<ImportId, ImportOrDef>,
use_decls: Vec<UseId>,
extern_crate_decls: Vec<ExternCrateId>, extern_crate_decls: Vec<ExternCrateId>,
/// Macros visible in current module in legacy textual scope /// Macros visible in current module in legacy textual scope
/// ///
@ -82,7 +119,7 @@ struct DeriveMacroInvocation {
pub(crate) static BUILTIN_SCOPE: Lazy<FxHashMap<Name, PerNs>> = Lazy::new(|| { pub(crate) static BUILTIN_SCOPE: Lazy<FxHashMap<Name, PerNs>> = Lazy::new(|| {
BuiltinType::ALL BuiltinType::ALL
.iter() .iter()
.map(|(name, ty)| (name.clone(), PerNs::types((*ty).into(), Visibility::Public))) .map(|(name, ty)| (name.clone(), PerNs::types((*ty).into(), Visibility::Public, None)))
.collect() .collect()
}); });
@ -105,11 +142,77 @@ impl ItemScope {
.chain(self.values.keys()) .chain(self.values.keys())
.chain(self.macros.keys()) .chain(self.macros.keys())
.chain(self.unresolved.iter()) .chain(self.unresolved.iter())
.sorted()
.unique() .unique()
.sorted()
.map(move |name| (name, self.get(name))) .map(move |name| (name, self.get(name)))
} }
pub fn imports(&self) -> impl Iterator<Item = ImportId> + '_ {
self.use_imports_types
.keys()
.copied()
.filter_map(ImportOrExternCrate::into_import)
.chain(self.use_imports_values.keys().copied())
.chain(self.use_imports_macros.keys().copied())
.unique()
.sorted()
}
pub fn fully_resolve_import(&self, db: &dyn DefDatabase, mut import: ImportId) -> PerNs {
let mut res = PerNs::none();
let mut def_map;
let mut scope = self;
while let Some(&m) = scope.use_imports_macros.get(&import) {
match m {
ImportOrDef::Import(i) => {
let module_id = i.import.lookup(db).container;
def_map = module_id.def_map(db);
scope = &def_map[module_id.local_id].scope;
import = i;
}
ImportOrDef::Def(ModuleDefId::MacroId(def)) => {
res.macros = Some((def, Visibility::Public, None));
break;
}
_ => break,
}
}
let mut scope = self;
while let Some(&m) = scope.use_imports_types.get(&ImportOrExternCrate::Import(import)) {
match m {
ImportOrDef::Import(i) => {
let module_id = i.import.lookup(db).container;
def_map = module_id.def_map(db);
scope = &def_map[module_id.local_id].scope;
import = i;
}
ImportOrDef::Def(def) => {
res.types = Some((def, Visibility::Public, None));
break;
}
_ => break,
}
}
let mut scope = self;
while let Some(&m) = scope.use_imports_values.get(&import) {
match m {
ImportOrDef::Import(i) => {
let module_id = i.import.lookup(db).container;
def_map = module_id.def_map(db);
scope = &def_map[module_id.local_id].scope;
import = i;
}
ImportOrDef::Def(def) => {
res.values = Some((def, Visibility::Public, None));
break;
}
_ => break,
}
}
res
}
pub fn declarations(&self) -> impl Iterator<Item = ModuleDefId> + '_ { pub fn declarations(&self) -> impl Iterator<Item = ModuleDefId> + '_ {
self.declarations.iter().copied() self.declarations.iter().copied()
} }
@ -121,8 +224,7 @@ impl ItemScope {
} }
pub fn use_decls(&self) -> impl Iterator<Item = UseId> + ExactSizeIterator + '_ { pub fn use_decls(&self) -> impl Iterator<Item = UseId> + ExactSizeIterator + '_ {
// FIXME: to be implemented self.use_decls.iter().copied()
std::iter::empty()
} }
pub fn impls(&self) -> impl Iterator<Item = ImplId> + ExactSizeIterator + '_ { pub fn impls(&self) -> impl Iterator<Item = ImplId> + ExactSizeIterator + '_ {
@ -132,13 +234,13 @@ impl ItemScope {
pub fn values( pub fn values(
&self, &self,
) -> impl Iterator<Item = (ModuleDefId, Visibility)> + ExactSizeIterator + '_ { ) -> impl Iterator<Item = (ModuleDefId, Visibility)> + ExactSizeIterator + '_ {
self.values.values().copied() self.values.values().copied().map(|(a, b, _)| (a, b))
} }
pub fn types( pub(crate) fn types(
&self, &self,
) -> impl Iterator<Item = (ModuleDefId, Visibility)> + ExactSizeIterator + '_ { ) -> impl Iterator<Item = (ModuleDefId, Visibility)> + ExactSizeIterator + '_ {
self.types.values().copied() self.types.values().copied().map(|(def, vis, _)| (def, vis))
} }
pub fn unnamed_consts(&self) -> impl Iterator<Item = ConstId> + '_ { pub fn unnamed_consts(&self) -> impl Iterator<Item = ConstId> + '_ {
@ -165,33 +267,55 @@ impl ItemScope {
} }
pub(crate) fn type_(&self, name: &Name) -> Option<(ModuleDefId, Visibility)> { pub(crate) fn type_(&self, name: &Name) -> Option<(ModuleDefId, Visibility)> {
self.types.get(name).copied() self.types.get(name).copied().map(|(a, b, _)| (a, b))
} }
/// XXX: this is O(N) rather than O(1), try to not introduce new usages. /// XXX: this is O(N) rather than O(1), try to not introduce new usages.
pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility)> { pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility)> {
let (def, mut iter) = match item { match item {
ItemInNs::Macros(def) => { ItemInNs::Macros(def) => self
return self.macros.iter().find_map(|(name, &(other_def, vis))| { .macros
(other_def == def).then_some((name, vis)) .iter()
}); .find_map(|(name, &(other_def, vis, _))| (other_def == def).then_some((name, vis))),
} ItemInNs::Types(def) => self
ItemInNs::Types(def) => (def, self.types.iter()), .types
ItemInNs::Values(def) => (def, self.values.iter()), .iter()
}; .find_map(|(name, &(other_def, vis, _))| (other_def == def).then_some((name, vis))),
iter.find_map(|(name, &(other_def, vis))| (other_def == def).then_some((name, vis)))
ItemInNs::Values(def) => self
.values
.iter()
.find_map(|(name, &(other_def, vis, _))| (other_def == def).then_some((name, vis))),
}
} }
pub(crate) fn traits(&self) -> impl Iterator<Item = TraitId> + '_ { pub(crate) fn traits(&self) -> impl Iterator<Item = TraitId> + '_ {
self.types self.types
.values() .values()
.filter_map(|&(def, _)| match def { .filter_map(|&(def, _, _)| match def {
ModuleDefId::TraitId(t) => Some(t), ModuleDefId::TraitId(t) => Some(t),
_ => None, _ => None,
}) })
.chain(self.unnamed_trait_imports.keys().copied()) .chain(self.unnamed_trait_imports.keys().copied())
} }
pub(crate) fn resolutions(&self) -> impl Iterator<Item = (Option<Name>, PerNs)> + '_ {
self.entries().map(|(name, res)| (Some(name.clone()), res)).chain(
self.unnamed_trait_imports.iter().map(|(tr, (vis, i))| {
(
None,
PerNs::types(
ModuleDefId::TraitId(*tr),
*vis,
i.map(ImportOrExternCrate::Import),
),
)
}),
)
}
}
impl ItemScope {
pub(crate) fn declare(&mut self, def: ModuleDefId) { pub(crate) fn declare(&mut self, def: ModuleDefId) {
self.declarations.push(def) self.declarations.push(def)
} }
@ -277,12 +401,14 @@ impl ItemScope {
}) })
} }
// FIXME: This is only used in collection, we should move the relevant parts of it out of ItemScope
pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option<Visibility> { pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option<Visibility> {
self.unnamed_trait_imports.get(&tr).copied() self.unnamed_trait_imports.get(&tr).copied().map(|(a, _)| a)
} }
pub(crate) fn push_unnamed_trait(&mut self, tr: TraitId, vis: Visibility) { pub(crate) fn push_unnamed_trait(&mut self, tr: TraitId, vis: Visibility) {
self.unnamed_trait_imports.insert(tr, vis); // FIXME: import
self.unnamed_trait_imports.insert(tr, (vis, None));
} }
pub(crate) fn push_res_with_import( pub(crate) fn push_res_with_import(
@ -290,51 +416,187 @@ impl ItemScope {
glob_imports: &mut PerNsGlobImports, glob_imports: &mut PerNsGlobImports,
lookup: (LocalModuleId, Name), lookup: (LocalModuleId, Name),
def: PerNs, def: PerNs,
def_import_type: ImportType, import: Option<ImportType>,
) -> bool { ) -> bool {
let mut changed = false; let mut changed = false;
macro_rules! check_changed { // FIXME: Document and simplify this
(
$changed:ident,
( $this:ident / $def:ident ) . $field:ident,
$glob_imports:ident [ $lookup:ident ],
$def_import_type:ident
) => {{
if let Some(fld) = $def.$field {
let existing = $this.$field.entry($lookup.1.clone());
match existing {
Entry::Vacant(entry) => {
match $def_import_type {
ImportType::Glob => {
$glob_imports.$field.insert($lookup.clone());
}
ImportType::Named => {
$glob_imports.$field.remove(&$lookup);
}
}
entry.insert(fld); if let Some(mut fld) = def.types {
$changed = true; let existing = self.types.entry(lookup.1.clone());
match existing {
Entry::Vacant(entry) => {
match import {
Some(ImportType::Glob(_)) => {
glob_imports.types.insert(lookup.clone());
} }
Entry::Occupied(mut entry) _ => _ = glob_imports.types.remove(&lookup),
if matches!($def_import_type, ImportType::Named) => }
{ let import = match import {
if $glob_imports.$field.remove(&$lookup) { Some(ImportType::ExternCrate(extern_crate)) => {
cov_mark::hit!(import_shadowed); Some(ImportOrExternCrate::ExternCrate(extern_crate))
entry.insert(fld); }
$changed = true; Some(ImportType::Import(import)) => {
Some(ImportOrExternCrate::Import(import))
}
None | Some(ImportType::Glob(_)) => None,
};
let prev = std::mem::replace(&mut fld.2, import);
if let Some(import) = import {
self.use_imports_types.insert(
import,
match prev {
Some(ImportOrExternCrate::Import(import)) => {
ImportOrDef::Import(import)
}
Some(ImportOrExternCrate::ExternCrate(import)) => {
ImportOrDef::ExternCrate(import)
}
None => ImportOrDef::Def(fld.0),
},
);
}
entry.insert(fld);
changed = true;
}
Entry::Occupied(mut entry) if !matches!(import, Some(ImportType::Glob(..))) => {
if glob_imports.types.remove(&lookup) {
let import = match import {
Some(ImportType::ExternCrate(extern_crate)) => {
Some(ImportOrExternCrate::ExternCrate(extern_crate))
} }
Some(ImportType::Import(import)) => {
Some(ImportOrExternCrate::Import(import))
}
None | Some(ImportType::Glob(_)) => None,
};
let prev = std::mem::replace(&mut fld.2, import);
if let Some(import) = import {
self.use_imports_types.insert(
import,
match prev {
Some(ImportOrExternCrate::Import(import)) => {
ImportOrDef::Import(import)
}
Some(ImportOrExternCrate::ExternCrate(import)) => {
ImportOrDef::ExternCrate(import)
}
None => ImportOrDef::Def(fld.0),
},
);
} }
_ => {} cov_mark::hit!(import_shadowed);
entry.insert(fld);
changed = true;
} }
} }
}}; _ => {}
}
} }
check_changed!(changed, (self / def).types, glob_imports[lookup], def_import_type); if let Some(mut fld) = def.values {
check_changed!(changed, (self / def).values, glob_imports[lookup], def_import_type); let existing = self.values.entry(lookup.1.clone());
check_changed!(changed, (self / def).macros, glob_imports[lookup], def_import_type); match existing {
Entry::Vacant(entry) => {
match import {
Some(ImportType::Glob(_)) => {
glob_imports.values.insert(lookup.clone());
}
_ => _ = glob_imports.values.remove(&lookup),
}
let import = match import {
Some(ImportType::Import(import)) => Some(import),
_ => None,
};
let prev = std::mem::replace(&mut fld.2, import);
if let Some(import) = import {
self.use_imports_values.insert(
import,
match prev {
Some(import) => ImportOrDef::Import(import),
None => ImportOrDef::Def(fld.0),
},
);
}
entry.insert(fld);
changed = true;
}
Entry::Occupied(mut entry) if !matches!(import, Some(ImportType::Glob(..))) => {
if glob_imports.values.remove(&lookup) {
cov_mark::hit!(import_shadowed);
let import = match import {
Some(ImportType::Import(import)) => Some(import),
_ => None,
};
let prev = std::mem::replace(&mut fld.2, import);
if let Some(import) = import {
self.use_imports_values.insert(
import,
match prev {
Some(import) => ImportOrDef::Import(import),
None => ImportOrDef::Def(fld.0),
},
);
}
entry.insert(fld);
changed = true;
}
}
_ => {}
}
}
if let Some(mut fld) = def.macros {
let existing = self.macros.entry(lookup.1.clone());
match existing {
Entry::Vacant(entry) => {
match import {
Some(ImportType::Glob(_)) => {
glob_imports.macros.insert(lookup.clone());
}
_ => _ = glob_imports.macros.remove(&lookup),
}
let import = match import {
Some(ImportType::Import(import)) => Some(import),
_ => None,
};
let prev = std::mem::replace(&mut fld.2, import);
if let Some(import) = import {
self.use_imports_macros.insert(
import,
match prev {
Some(import) => ImportOrDef::Import(import),
None => ImportOrDef::Def(fld.0.into()),
},
);
}
entry.insert(fld);
changed = true;
}
Entry::Occupied(mut entry) if !matches!(import, Some(ImportType::Glob(..))) => {
if glob_imports.macros.remove(&lookup) {
cov_mark::hit!(import_shadowed);
let import = match import {
Some(ImportType::Import(import)) => Some(import),
_ => None,
};
let prev = std::mem::replace(&mut fld.2, import);
if let Some(import) = import {
self.use_imports_macros.insert(
import,
match prev {
Some(import) => ImportOrDef::Import(import),
None => ImportOrDef::Def(fld.0.into()),
},
);
}
entry.insert(fld);
changed = true;
}
}
_ => {}
}
}
if def.is_none() && self.unresolved.insert(lookup.1) { if def.is_none() && self.unresolved.insert(lookup.1) {
changed = true; changed = true;
@ -343,27 +605,18 @@ impl ItemScope {
changed changed
} }
pub(crate) fn resolutions(&self) -> impl Iterator<Item = (Option<Name>, PerNs)> + '_ {
self.entries().map(|(name, res)| (Some(name.clone()), res)).chain(
self.unnamed_trait_imports
.iter()
.map(|(tr, vis)| (None, PerNs::types(ModuleDefId::TraitId(*tr), *vis))),
)
}
/// Marks everything that is not a procedural macro as private to `this_module`. /// Marks everything that is not a procedural macro as private to `this_module`.
pub(crate) fn censor_non_proc_macros(&mut self, this_module: ModuleId) { pub(crate) fn censor_non_proc_macros(&mut self, this_module: ModuleId) {
self.types self.types
.values_mut() .values_mut()
.chain(self.values.values_mut()) .map(|(def, vis, _)| (def, vis))
.chain(self.values.values_mut().map(|(def, vis, _)| (def, vis)))
.map(|(_, v)| v) .map(|(_, v)| v)
.chain(self.unnamed_trait_imports.values_mut()) .chain(self.unnamed_trait_imports.values_mut().map(|(vis, _)| vis))
.for_each(|vis| *vis = Visibility::Module(this_module)); .for_each(|vis| *vis = Visibility::Module(this_module));
for (mac, vis) in self.macros.values_mut() { for (mac, vis, import) in self.macros.values_mut() {
if let MacroId::ProcMacroId(_) = mac { if matches!(mac, MacroId::ProcMacroId(_) if import.is_none()) {
// FIXME: Technically this is insufficient since reexports of proc macros are also
// forbidden. Practically nobody does that.
continue; continue;
} }
@ -382,14 +635,25 @@ impl ItemScope {
name.map_or("_".to_string(), |name| name.display(db).to_string()) name.map_or("_".to_string(), |name| name.display(db).to_string())
); );
if def.types.is_some() { if let Some((.., i)) = def.types {
buf.push_str(" t"); buf.push_str(" t");
match i {
Some(ImportOrExternCrate::Import(_)) => buf.push('i'),
Some(ImportOrExternCrate::ExternCrate(_)) => buf.push('e'),
None => (),
}
} }
if def.values.is_some() { if let Some((.., i)) = def.values {
buf.push_str(" v"); buf.push_str(" v");
if i.is_some() {
buf.push('i');
}
} }
if def.macros.is_some() { if let Some((.., i)) = def.macros {
buf.push_str(" m"); buf.push_str(" m");
if i.is_some() {
buf.push('i');
}
} }
if def.is_none() { if def.is_none() {
buf.push_str(" _"); buf.push_str(" _");
@ -415,10 +679,17 @@ impl ItemScope {
attr_macros, attr_macros,
derive_macros, derive_macros,
extern_crate_decls, extern_crate_decls,
use_decls,
use_imports_values,
use_imports_types,
use_imports_macros,
} = self; } = self;
types.shrink_to_fit(); types.shrink_to_fit();
values.shrink_to_fit(); values.shrink_to_fit();
macros.shrink_to_fit(); macros.shrink_to_fit();
use_imports_types.shrink_to_fit();
use_imports_values.shrink_to_fit();
use_imports_macros.shrink_to_fit();
unresolved.shrink_to_fit(); unresolved.shrink_to_fit();
declarations.shrink_to_fit(); declarations.shrink_to_fit();
impls.shrink_to_fit(); impls.shrink_to_fit();
@ -428,32 +699,44 @@ impl ItemScope {
attr_macros.shrink_to_fit(); attr_macros.shrink_to_fit();
derive_macros.shrink_to_fit(); derive_macros.shrink_to_fit();
extern_crate_decls.shrink_to_fit(); extern_crate_decls.shrink_to_fit();
use_decls.shrink_to_fit();
} }
} }
impl PerNs { impl PerNs {
pub(crate) fn from_def(def: ModuleDefId, v: Visibility, has_constructor: bool) -> PerNs { pub(crate) fn from_def(
def: ModuleDefId,
v: Visibility,
has_constructor: bool,
import: Option<ImportOrExternCrate>,
) -> PerNs {
match def { match def {
ModuleDefId::ModuleId(_) => PerNs::types(def, v), ModuleDefId::ModuleId(_) => PerNs::types(def, v, import),
ModuleDefId::FunctionId(_) => PerNs::values(def, v), ModuleDefId::FunctionId(_) => {
PerNs::values(def, v, import.and_then(ImportOrExternCrate::into_import))
}
ModuleDefId::AdtId(adt) => match adt { ModuleDefId::AdtId(adt) => match adt {
AdtId::UnionId(_) => PerNs::types(def, v), AdtId::UnionId(_) => PerNs::types(def, v, import),
AdtId::EnumId(_) => PerNs::types(def, v), AdtId::EnumId(_) => PerNs::types(def, v, import),
AdtId::StructId(_) => { AdtId::StructId(_) => {
if has_constructor { if has_constructor {
PerNs::both(def, def, v) PerNs::both(def, def, v, import)
} else { } else {
PerNs::types(def, v) PerNs::types(def, v, import)
} }
} }
}, },
ModuleDefId::EnumVariantId(_) => PerNs::both(def, def, v), ModuleDefId::EnumVariantId(_) => PerNs::both(def, def, v, import),
ModuleDefId::ConstId(_) | ModuleDefId::StaticId(_) => PerNs::values(def, v), ModuleDefId::ConstId(_) | ModuleDefId::StaticId(_) => {
ModuleDefId::TraitId(_) => PerNs::types(def, v), PerNs::values(def, v, import.and_then(ImportOrExternCrate::into_import))
ModuleDefId::TraitAliasId(_) => PerNs::types(def, v), }
ModuleDefId::TypeAliasId(_) => PerNs::types(def, v), ModuleDefId::TraitId(_) => PerNs::types(def, v, import),
ModuleDefId::BuiltinType(_) => PerNs::types(def, v), ModuleDefId::TraitAliasId(_) => PerNs::types(def, v, import),
ModuleDefId::MacroId(mac) => PerNs::macros(mac, v), ModuleDefId::TypeAliasId(_) => PerNs::types(def, v, import),
ModuleDefId::BuiltinType(_) => PerNs::types(def, v, import),
ModuleDefId::MacroId(mac) => {
PerNs::macros(mac, v, import.and_then(ImportOrExternCrate::into_import))
}
} }
} }
} }

View file

@ -64,11 +64,11 @@ use triomphe::Arc;
use crate::{ use crate::{
attr::Attrs, attr::Attrs,
db::DefDatabase, db::DefDatabase,
generics::GenericParams, generics::{GenericParams, LifetimeParamData, TypeOrConstParamData},
path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind}, path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind},
type_ref::{Mutability, TraitRef, TypeBound, TypeRef}, type_ref::{Mutability, TraitRef, TypeBound, TypeRef},
visibility::RawVisibility, visibility::RawVisibility,
BlockId, BlockId, Lookup,
}; };
#[derive(Copy, Clone, Eq, PartialEq)] #[derive(Copy, Clone, Eq, PartialEq)]
@ -143,6 +143,16 @@ impl ItemTree {
Arc::new(item_tree) Arc::new(item_tree)
} }
pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc<ItemTree> {
let loc = block.lookup(db);
let block = loc.ast_id.to_node(db.upcast());
let ctx = lower::Ctx::new(db, loc.ast_id.file_id);
let mut item_tree = ctx.lower_block(&block);
item_tree.shrink_to_fit();
Arc::new(item_tree)
}
/// Returns an iterator over all items located at the top level of the `HirFileId` this /// Returns an iterator over all items located at the top level of the `HirFileId` this
/// `ItemTree` was created from. /// `ItemTree` was created from.
pub fn top_level_items(&self) -> &[ModItem] { pub fn top_level_items(&self) -> &[ModItem] {
@ -178,13 +188,6 @@ impl ItemTree {
self.data.get_or_insert_with(Box::default) self.data.get_or_insert_with(Box::default)
} }
fn block_item_tree(db: &dyn DefDatabase, block: BlockId) -> Arc<ItemTree> {
let loc = db.lookup_intern_block(block);
let block = loc.ast_id.to_node(db.upcast());
let ctx = lower::Ctx::new(db, loc.ast_id.file_id);
Arc::new(ctx.lower_block(&block))
}
fn shrink_to_fit(&mut self) { fn shrink_to_fit(&mut self) {
if let Some(data) = &mut self.data { if let Some(data) = &mut self.data {
let ItemTreeData { let ItemTreeData {
@ -296,10 +299,12 @@ pub enum AttrOwner {
Variant(Idx<Variant>), Variant(Idx<Variant>),
Field(Idx<Field>), Field(Idx<Field>),
Param(Idx<Param>), Param(Idx<Param>),
TypeOrConstParamData(Idx<TypeOrConstParamData>),
LifetimeParamData(Idx<LifetimeParamData>),
} }
macro_rules! from_attrs { macro_rules! from_attrs {
( $( $var:ident($t:ty) ),+ ) => { ( $( $var:ident($t:ty) ),+ $(,)? ) => {
$( $(
impl From<$t> for AttrOwner { impl From<$t> for AttrOwner {
fn from(t: $t) -> AttrOwner { fn from(t: $t) -> AttrOwner {
@ -310,7 +315,14 @@ macro_rules! from_attrs {
}; };
} }
from_attrs!(ModItem(ModItem), Variant(Idx<Variant>), Field(Idx<Field>), Param(Idx<Param>)); from_attrs!(
ModItem(ModItem),
Variant(Idx<Variant>),
Field(Idx<Field>),
Param(Idx<Param>),
TypeOrConstParamData(Idx<TypeOrConstParamData>),
LifetimeParamData(Idx<LifetimeParamData>),
);
/// Trait implemented by all item nodes in the item tree. /// Trait implemented by all item nodes in the item tree.
pub trait ItemTreeNode: Clone { pub trait ItemTreeNode: Clone {
@ -373,7 +385,7 @@ impl TreeId {
pub(crate) fn item_tree(&self, db: &dyn DefDatabase) -> Arc<ItemTree> { pub(crate) fn item_tree(&self, db: &dyn DefDatabase) -> Arc<ItemTree> {
match self.block { match self.block {
Some(block) => ItemTree::block_item_tree(db, block), Some(block) => db.block_item_tree_query(block),
None => db.file_item_tree(self.file), None => db.file_item_tree(self.file),
} }
} }
@ -761,6 +773,19 @@ impl Use {
lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree"); lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree");
source_map[index].clone() source_map[index].clone()
} }
/// Maps a `UseTree` contained in this import back to its AST node.
pub fn use_tree_source_map(
&self,
db: &dyn DefDatabase,
file_id: HirFileId,
) -> Arena<ast::UseTree> {
// Re-lower the AST item and get the source map.
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
let hygiene = Hygiene::new(db.upcast(), file_id);
lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree").1
}
} }
#[derive(Clone, Copy, Debug, Eq, PartialEq)] #[derive(Clone, Copy, Debug, Eq, PartialEq)]
@ -785,7 +810,7 @@ impl UseTree {
fn expand_impl( fn expand_impl(
&self, &self,
prefix: Option<ModPath>, prefix: Option<ModPath>,
cb: &mut dyn FnMut(Idx<ast::UseTree>, ModPath, ImportKind, Option<ImportAlias>), cb: &mut impl FnMut(Idx<ast::UseTree>, ModPath, ImportKind, Option<ImportAlias>),
) { ) {
fn concat_mod_paths( fn concat_mod_paths(
prefix: Option<ModPath>, prefix: Option<ModPath>,

View file

@ -77,6 +77,9 @@ impl<'a> Ctx<'a> {
} }
pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree { pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
self.tree
.attrs
.insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.hygiene()));
self.tree.top_level = block self.tree.top_level = block
.statements() .statements()
.filter_map(|stmt| match stmt { .filter_map(|stmt| match stmt {
@ -602,7 +605,21 @@ impl<'a> Ctx<'a> {
generics.fill_bounds(&self.body_ctx, bounds, Either::Left(self_param)); generics.fill_bounds(&self.body_ctx, bounds, Either::Left(self_param));
} }
generics.fill(&self.body_ctx, node); let add_param_attrs = |item, param| {
let attrs = RawAttrs::new(self.db.upcast(), &param, self.body_ctx.hygiene());
// This is identical to the body of `Ctx::add_attrs()` but we can't call that here
// because it requires `&mut self` and the call to `generics.fill()` below also
// references `self`.
match self.tree.attrs.entry(item) {
Entry::Occupied(mut entry) => {
*entry.get_mut() = entry.get().merge(attrs);
}
Entry::Vacant(entry) => {
entry.insert(attrs);
}
}
};
generics.fill(&self.body_ctx, node, add_param_attrs);
generics.shrink_to_fit(); generics.shrink_to_fit();
Interned::new(generics) Interned::new(generics)
@ -763,7 +780,7 @@ impl UseTreeLowering<'_> {
} }
} }
pub(super) fn lower_use_tree( pub(crate) fn lower_use_tree(
db: &dyn DefDatabase, db: &dyn DefDatabase,
hygiene: &Hygiene, hygiene: &Hygiene,
tree: ast::UseTree, tree: ast::UseTree,

View file

@ -16,7 +16,7 @@ pub(super) fn print_item_tree(db: &dyn ExpandDatabase, tree: &ItemTree) -> Strin
let mut p = Printer { db, tree, buf: String::new(), indent_level: 0, needs_indent: true }; let mut p = Printer { db, tree, buf: String::new(), indent_level: 0, needs_indent: true };
if let Some(attrs) = tree.attrs.get(&AttrOwner::TopLevel) { if let Some(attrs) = tree.attrs.get(&AttrOwner::TopLevel) {
p.print_attrs(attrs, true); p.print_attrs(attrs, true, "\n");
} }
p.blank(); p.blank();
@ -84,22 +84,23 @@ impl Printer<'_> {
} }
} }
fn print_attrs(&mut self, attrs: &RawAttrs, inner: bool) { fn print_attrs(&mut self, attrs: &RawAttrs, inner: bool, separated_by: &str) {
let inner = if inner { "!" } else { "" }; let inner = if inner { "!" } else { "" };
for attr in &**attrs { for attr in &**attrs {
wln!( w!(
self, self,
"#{}[{}{}]", "#{}[{}{}]{}",
inner, inner,
attr.path.display(self.db), attr.path.display(self.db),
attr.input.as_ref().map(|it| it.to_string()).unwrap_or_default(), attr.input.as_ref().map(|it| it.to_string()).unwrap_or_default(),
separated_by,
); );
} }
} }
fn print_attrs_of(&mut self, of: impl Into<AttrOwner>) { fn print_attrs_of(&mut self, of: impl Into<AttrOwner>, separated_by: &str) {
if let Some(attrs) = self.tree.attrs.get(&of.into()) { if let Some(attrs) = self.tree.attrs.get(&of.into()) {
self.print_attrs(attrs, false); self.print_attrs(attrs, false, separated_by);
} }
} }
@ -118,7 +119,7 @@ impl Printer<'_> {
self.indented(|this| { self.indented(|this| {
for field in fields.clone() { for field in fields.clone() {
let Field { visibility, name, type_ref, ast_id: _ } = &this.tree[field]; let Field { visibility, name, type_ref, ast_id: _ } = &this.tree[field];
this.print_attrs_of(field); this.print_attrs_of(field, "\n");
this.print_visibility(*visibility); this.print_visibility(*visibility);
w!(this, "{}: ", name.display(self.db)); w!(this, "{}: ", name.display(self.db));
this.print_type_ref(type_ref); this.print_type_ref(type_ref);
@ -132,7 +133,7 @@ impl Printer<'_> {
self.indented(|this| { self.indented(|this| {
for field in fields.clone() { for field in fields.clone() {
let Field { visibility, name, type_ref, ast_id: _ } = &this.tree[field]; let Field { visibility, name, type_ref, ast_id: _ } = &this.tree[field];
this.print_attrs_of(field); this.print_attrs_of(field, "\n");
this.print_visibility(*visibility); this.print_visibility(*visibility);
w!(this, "{}: ", name.display(self.db)); w!(this, "{}: ", name.display(self.db));
this.print_type_ref(type_ref); this.print_type_ref(type_ref);
@ -195,7 +196,7 @@ impl Printer<'_> {
} }
fn print_mod_item(&mut self, item: ModItem) { fn print_mod_item(&mut self, item: ModItem) {
self.print_attrs_of(item); self.print_attrs_of(item, "\n");
match item { match item {
ModItem::Use(it) => { ModItem::Use(it) => {
@ -261,7 +262,7 @@ impl Printer<'_> {
if !params.is_empty() { if !params.is_empty() {
self.indented(|this| { self.indented(|this| {
for param in params.clone() { for param in params.clone() {
this.print_attrs_of(param); this.print_attrs_of(param, "\n");
match &this.tree[param] { match &this.tree[param] {
Param::Normal(ty) => { Param::Normal(ty) => {
if flags.contains(FnFlags::HAS_SELF_PARAM) { if flags.contains(FnFlags::HAS_SELF_PARAM) {
@ -319,7 +320,7 @@ impl Printer<'_> {
self.indented(|this| { self.indented(|this| {
for variant in variants.clone() { for variant in variants.clone() {
let Variant { name, fields, ast_id: _ } = &this.tree[variant]; let Variant { name, fields, ast_id: _ } = &this.tree[variant];
this.print_attrs_of(variant); this.print_attrs_of(variant, "\n");
w!(this, "{}", name.display(self.db)); w!(this, "{}", name.display(self.db));
this.print_fields(fields); this.print_fields(fields);
wln!(this, ","); wln!(this, ",");
@ -484,11 +485,12 @@ impl Printer<'_> {
w!(self, "<"); w!(self, "<");
let mut first = true; let mut first = true;
for (_, lt) in params.lifetimes.iter() { for (idx, lt) in params.lifetimes.iter() {
if !first { if !first {
w!(self, ", "); w!(self, ", ");
} }
first = false; first = false;
self.print_attrs_of(idx, " ");
w!(self, "{}", lt.name.display(self.db)); w!(self, "{}", lt.name.display(self.db));
} }
for (idx, x) in params.type_or_consts.iter() { for (idx, x) in params.type_or_consts.iter() {
@ -496,6 +498,7 @@ impl Printer<'_> {
w!(self, ", "); w!(self, ", ");
} }
first = false; first = false;
self.print_attrs_of(idx, " ");
match x { match x {
TypeOrConstParamData::TypeParamData(ty) => match &ty.name { TypeOrConstParamData::TypeParamData(ty) => match &ty.name {
Some(name) => w!(self, "{}", name.display(self.db)), Some(name) => w!(self, "{}", name.display(self.db)),

View file

@ -358,3 +358,15 @@ trait Tr<'a, T: 'a>: Super where Self: for<'a> Tr<'a, T> {}
"#]], "#]],
) )
} }
#[test]
fn generics_with_attributes() {
check(
r#"
struct S<#[cfg(never)] T>;
"#,
expect![[r#"
pub(self) struct S<#[cfg(never)] T>;
"#]],
)
}

View file

@ -109,6 +109,17 @@ impl CrateRootModuleId {
} }
} }
impl PartialEq<ModuleId> for CrateRootModuleId {
fn eq(&self, other: &ModuleId) -> bool {
other.block.is_none() && other.local_id == DefMap::ROOT && self.krate == other.krate
}
}
impl PartialEq<CrateRootModuleId> for ModuleId {
fn eq(&self, other: &CrateRootModuleId) -> bool {
other == self
}
}
impl From<CrateRootModuleId> for ModuleId { impl From<CrateRootModuleId> for ModuleId {
fn from(CrateRootModuleId { krate }: CrateRootModuleId) -> Self { fn from(CrateRootModuleId { krate }: CrateRootModuleId) -> Self {
ModuleId { krate, block: None, local_id: DefMap::ROOT } ModuleId { krate, block: None, local_id: DefMap::ROOT }
@ -854,14 +865,36 @@ impl_from!(
ConstId, ConstId,
FunctionId, FunctionId,
TraitId, TraitId,
TraitAliasId,
TypeAliasId, TypeAliasId,
MacroId(Macro2Id, MacroRulesId, ProcMacroId), MacroId(Macro2Id, MacroRulesId, ProcMacroId),
ImplId, ImplId,
GenericParamId, GenericParamId,
ExternCrateId ExternCrateId,
UseId
for AttrDefId for AttrDefId
); );
impl TryFrom<ModuleDefId> for AttrDefId {
type Error = ();
fn try_from(value: ModuleDefId) -> Result<Self, Self::Error> {
match value {
ModuleDefId::ModuleId(it) => Ok(it.into()),
ModuleDefId::FunctionId(it) => Ok(it.into()),
ModuleDefId::AdtId(it) => Ok(it.into()),
ModuleDefId::EnumVariantId(it) => Ok(it.into()),
ModuleDefId::ConstId(it) => Ok(it.into()),
ModuleDefId::StaticId(it) => Ok(it.into()),
ModuleDefId::TraitId(it) => Ok(it.into()),
ModuleDefId::TypeAliasId(it) => Ok(it.into()),
ModuleDefId::TraitAliasId(id) => Ok(id.into()),
ModuleDefId::MacroId(id) => Ok(id.into()),
ModuleDefId::BuiltinType(_) => Err(()),
}
}
}
impl From<ItemContainerId> for AttrDefId { impl From<ItemContainerId> for AttrDefId {
fn from(acid: ItemContainerId) -> Self { fn from(acid: ItemContainerId) -> Self {
match acid { match acid {
@ -872,6 +905,15 @@ impl From<ItemContainerId> for AttrDefId {
} }
} }
} }
impl From<AssocItemId> for AttrDefId {
fn from(assoc: AssocItemId) -> Self {
match assoc {
AssocItemId::FunctionId(it) => AttrDefId::FunctionId(it),
AssocItemId::ConstId(it) => AttrDefId::ConstId(it),
AssocItemId::TypeAliasId(it) => AttrDefId::TypeAliasId(it),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum VariantId { pub enum VariantId {

View file

@ -238,7 +238,7 @@ fn main() {
/* error: expected expression */; /* error: expected expression */;
/* error: expected expression, expected COMMA */; /* error: expected expression, expected COMMA */;
/* error: expected expression */::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(), ::core::fmt::Display::fmt), ]); /* error: expected expression */::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(), ::core::fmt::Display::fmt), ]);
/* error: expected expression, expected expression */; /* error: expected expression, expected R_PAREN */;
::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(5), ::core::fmt::Display::fmt), ]); ::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(5), ::core::fmt::Display::fmt), ]);
} }
"##]], "##]],

View file

@ -909,3 +909,68 @@ macro_rules! with_std {
"##]], "##]],
) )
} }
#[test]
fn eager_regression_15403() {
check(
r#"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! format_args {}
fn main() {
format_args /* +errors */ !("{}", line.1.);
}
"#,
expect![[r##"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! format_args {}
fn main() {
/* error: expected field name or number *//* parse error: expected field name or number */
::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(line.1.), ::core::fmt::Display::fmt), ]);
}
"##]],
);
}
#[test]
fn eager_regression_154032() {
check(
r#"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! format_args {}
fn main() {
format_args /* +errors */ !("{}", &[0 2]);
}
"#,
expect![[r##"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! format_args {}
fn main() {
/* error: expected COMMA, expected R_BRACK, expected COMMA, expected COMMA, expected expression, expected R_PAREN *//* parse error: expected COMMA */
/* parse error: expected R_BRACK */
/* parse error: expected COMMA */
/* parse error: expected COMMA */
/* parse error: expected expression */
/* parse error: expected R_PAREN */
/* parse error: expected R_PAREN */
/* parse error: expected expression, item or let statement */
/* parse error: expected expression, item or let statement */
/* parse error: expected expression, item or let statement */
/* parse error: expected expression, item or let statement */
/* parse error: expected expression, item or let statement */
::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(&[0 2]), ::core::fmt::Display::fmt), ]);
}
"##]],
);
}

View file

@ -131,7 +131,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
.as_call_id_with_errors(&db, krate, |path| { .as_call_id_with_errors(&db, krate, |path| {
resolver resolver
.resolve_path_as_macro(&db, &path, Some(MacroSubNs::Bang)) .resolve_path_as_macro(&db, &path, Some(MacroSubNs::Bang))
.map(|it| macro_id_to_def_id(&db, it)) .map(|(it, _)| macro_id_to_def_id(&db, it))
}) })
.unwrap(); .unwrap();
let macro_call_id = res.value.unwrap(); let macro_call_id = res.value.unwrap();

View file

@ -60,7 +60,7 @@ mod tests;
use std::{cmp::Ord, ops::Deref}; use std::{cmp::Ord, ops::Deref};
use base_db::{CrateId, Edition, FileId, ProcMacroKind}; use base_db::{CrateId, Edition, FileId, ProcMacroKind};
use hir_expand::{name::Name, HirFileId, InFile, MacroCallId, MacroDefId}; use hir_expand::{ast_id_map::FileAstId, name::Name, HirFileId, InFile, MacroCallId, MacroDefId};
use itertools::Itertools; use itertools::Itertools;
use la_arena::Arena; use la_arena::Arena;
use profile::Count; use profile::Count;
@ -77,8 +77,8 @@ use crate::{
path::ModPath, path::ModPath,
per_ns::PerNs, per_ns::PerNs,
visibility::Visibility, visibility::Visibility,
AstId, BlockId, BlockLoc, CrateRootModuleId, FunctionId, LocalModuleId, Lookup, MacroExpander, AstId, BlockId, BlockLoc, CrateRootModuleId, ExternCrateId, FunctionId, LocalModuleId, Lookup,
MacroId, ModuleId, ProcMacroId, MacroExpander, MacroId, ModuleId, ProcMacroId, UseId,
}; };
/// Contains the results of (early) name resolution. /// Contains the results of (early) name resolution.
@ -105,10 +105,11 @@ pub struct DefMap {
/// The prelude is empty for non-block DefMaps (unless `#[prelude_import]` was used, /// The prelude is empty for non-block DefMaps (unless `#[prelude_import]` was used,
/// but that attribute is nightly and when used in a block, it affects resolution globally /// but that attribute is nightly and when used in a block, it affects resolution globally
/// so we aren't handling this correctly anyways). /// so we aren't handling this correctly anyways).
prelude: Option<ModuleId>, prelude: Option<(ModuleId, Option<UseId>)>,
/// `macro_use` prelude that contains macros from `#[macro_use]`'d external crates. Note that /// `macro_use` prelude that contains macros from `#[macro_use]`'d external crates. Note that
/// this contains all kinds of macro, not just `macro_rules!` macro. /// this contains all kinds of macro, not just `macro_rules!` macro.
macro_use_prelude: FxHashMap<Name, MacroId>, /// ExternCrateId being None implies it being imported from the general prelude import.
macro_use_prelude: FxHashMap<Name, (MacroId, Option<ExternCrateId>)>,
/// Tracks which custom derives are in scope for an item, to allow resolution of derive helper /// Tracks which custom derives are in scope for an item, to allow resolution of derive helper
/// attributes. /// attributes.
@ -125,7 +126,7 @@ pub struct DefMap {
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
struct DefMapCrateData { struct DefMapCrateData {
/// The extern prelude which contains all root modules of external crates that are in scope. /// The extern prelude which contains all root modules of external crates that are in scope.
extern_prelude: FxHashMap<Name, CrateRootModuleId>, extern_prelude: FxHashMap<Name, (CrateRootModuleId, Option<ExternCrateId>)>,
/// Side table for resolving derive helpers. /// Side table for resolving derive helpers.
exported_derives: FxHashMap<MacroDefId, Box<[Name]>>, exported_derives: FxHashMap<MacroDefId, Box<[Name]>>,
@ -217,16 +218,17 @@ pub enum ModuleOrigin {
/// Note that non-inline modules, by definition, live inside non-macro file. /// Note that non-inline modules, by definition, live inside non-macro file.
File { File {
is_mod_rs: bool, is_mod_rs: bool,
declaration: AstId<ast::Module>, declaration: FileAstId<ast::Module>,
declaration_tree_id: ItemTreeId<Mod>, declaration_tree_id: ItemTreeId<Mod>,
definition: FileId, definition: FileId,
}, },
Inline { Inline {
definition_tree_id: ItemTreeId<Mod>, definition_tree_id: ItemTreeId<Mod>,
definition: AstId<ast::Module>, definition: FileAstId<ast::Module>,
}, },
/// Pseudo-module introduced by a block scope (contains only inner items). /// Pseudo-module introduced by a block scope (contains only inner items).
BlockExpr { BlockExpr {
id: BlockId,
block: AstId<ast::BlockExpr>, block: AstId<ast::BlockExpr>,
}, },
} }
@ -234,8 +236,12 @@ pub enum ModuleOrigin {
impl ModuleOrigin { impl ModuleOrigin {
pub fn declaration(&self) -> Option<AstId<ast::Module>> { pub fn declaration(&self) -> Option<AstId<ast::Module>> {
match self { match self {
ModuleOrigin::File { declaration: module, .. } &ModuleOrigin::File { declaration, declaration_tree_id, .. } => {
| ModuleOrigin::Inline { definition: module, .. } => Some(*module), Some(AstId::new(declaration_tree_id.file_id(), declaration))
}
&ModuleOrigin::Inline { definition, definition_tree_id } => {
Some(AstId::new(definition_tree_id.file_id(), definition))
}
ModuleOrigin::CrateRoot { .. } | ModuleOrigin::BlockExpr { .. } => None, ModuleOrigin::CrateRoot { .. } | ModuleOrigin::BlockExpr { .. } => None,
} }
} }
@ -260,16 +266,17 @@ impl ModuleOrigin {
/// That is, a file or a `mod foo {}` with items. /// That is, a file or a `mod foo {}` with items.
fn definition_source(&self, db: &dyn DefDatabase) -> InFile<ModuleSource> { fn definition_source(&self, db: &dyn DefDatabase) -> InFile<ModuleSource> {
match self { match self {
ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => { &ModuleOrigin::File { definition, .. } | &ModuleOrigin::CrateRoot { definition } => {
let file_id = *definition; let sf = db.parse(definition).tree();
let sf = db.parse(file_id).tree(); InFile::new(definition.into(), ModuleSource::SourceFile(sf))
InFile::new(file_id.into(), ModuleSource::SourceFile(sf))
} }
ModuleOrigin::Inline { definition, .. } => InFile::new( &ModuleOrigin::Inline { definition, definition_tree_id } => InFile::new(
definition.file_id, definition_tree_id.file_id(),
ModuleSource::Module(definition.to_node(db.upcast())), ModuleSource::Module(
AstId::new(definition_tree_id.file_id(), definition).to_node(db.upcast()),
),
), ),
ModuleOrigin::BlockExpr { block } => { ModuleOrigin::BlockExpr { block, .. } => {
InFile::new(block.file_id, ModuleSource::BlockExpr(block.to_node(db.upcast()))) InFile::new(block.file_id, ModuleSource::BlockExpr(block.to_node(db.upcast())))
} }
} }
@ -314,9 +321,7 @@ impl DefMap {
} }
pub(crate) fn block_def_map_query(db: &dyn DefDatabase, block_id: BlockId) -> Arc<DefMap> { pub(crate) fn block_def_map_query(db: &dyn DefDatabase, block_id: BlockId) -> Arc<DefMap> {
let block: BlockLoc = db.lookup_intern_block(block_id); let block: BlockLoc = block_id.lookup(db);
let tree_id = TreeId::new(block.ast_id.file_id, Some(block_id));
let parent_map = block.module.def_map(db); let parent_map = block.module.def_map(db);
let krate = block.module.krate; let krate = block.module.krate;
@ -325,8 +330,10 @@ impl DefMap {
// modules declared by blocks with items. At the moment, we don't use // modules declared by blocks with items. At the moment, we don't use
// this visibility for anything outside IDE, so that's probably OK. // this visibility for anything outside IDE, so that's probably OK.
let visibility = Visibility::Module(ModuleId { krate, local_id, block: None }); let visibility = Visibility::Module(ModuleId { krate, local_id, block: None });
let module_data = let module_data = ModuleData::new(
ModuleData::new(ModuleOrigin::BlockExpr { block: block.ast_id }, visibility); ModuleOrigin::BlockExpr { block: block.ast_id, id: block_id },
visibility,
);
let mut def_map = DefMap::empty(krate, parent_map.data.edition, module_data); let mut def_map = DefMap::empty(krate, parent_map.data.edition, module_data);
def_map.data = parent_map.data.clone(); def_map.data = parent_map.data.clone();
@ -338,7 +345,8 @@ impl DefMap {
}, },
}); });
let def_map = collector::collect_defs(db, def_map, tree_id); let def_map =
collector::collect_defs(db, def_map, TreeId::new(block.ast_id.file_id, Some(block_id)));
Arc::new(def_map) Arc::new(def_map)
} }
@ -427,15 +435,19 @@ impl DefMap {
self.block.map(|block| block.block) self.block.map(|block| block.block)
} }
pub(crate) fn prelude(&self) -> Option<ModuleId> { pub(crate) fn prelude(&self) -> Option<(ModuleId, Option<UseId>)> {
self.prelude self.prelude
} }
pub(crate) fn extern_prelude(&self) -> impl Iterator<Item = (&Name, ModuleId)> + '_ { pub(crate) fn extern_prelude(
self.data.extern_prelude.iter().map(|(name, &def)| (name, def.into())) &self,
) -> impl Iterator<Item = (&Name, (CrateRootModuleId, Option<ExternCrateId>))> + '_ {
self.data.extern_prelude.iter().map(|(name, &def)| (name, def))
} }
pub(crate) fn macro_use_prelude(&self) -> impl Iterator<Item = (&Name, MacroId)> + '_ { pub(crate) fn macro_use_prelude(
&self,
) -> impl Iterator<Item = (&Name, (MacroId, Option<ExternCrateId>))> + '_ {
self.macro_use_prelude.iter().map(|(name, &def)| (name, def)) self.macro_use_prelude.iter().map(|(name, &def)| (name, def))
} }
@ -638,8 +650,8 @@ impl ModuleData {
ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => { ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => {
definition.into() definition.into()
} }
ModuleOrigin::Inline { definition, .. } => definition.file_id, ModuleOrigin::Inline { definition_tree_id, .. } => definition_tree_id.file_id(),
ModuleOrigin::BlockExpr { block } => block.file_id, ModuleOrigin::BlockExpr { block, .. } => block.file_id,
} }
} }

View file

@ -33,7 +33,7 @@ use crate::{
attr_macro_as_call_id, attr_macro_as_call_id,
db::DefDatabase, db::DefDatabase,
derive_macro_as_call_id, derive_macro_as_call_id,
item_scope::{ImportType, PerNsGlobImports}, item_scope::{ImportId, ImportOrExternCrate, ImportType, PerNsGlobImports},
item_tree::{ item_tree::{
self, ExternCrate, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId, ItemTreeNode, self, ExternCrate, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId, ItemTreeNode,
MacroCall, MacroDef, MacroRules, Mod, ModItem, ModKind, TreeId, MacroCall, MacroDef, MacroRules, Mod, ModItem, ModKind, TreeId,
@ -52,10 +52,10 @@ use crate::{
tt, tt,
visibility::{RawVisibility, Visibility}, visibility::{RawVisibility, Visibility},
AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, EnumVariantId, AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, EnumVariantId,
ExternBlockLoc, ExternCrateLoc, FunctionId, FunctionLoc, ImplLoc, Intern, ItemContainerId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId, FunctionLoc, ImplLoc, Intern,
LocalModuleId, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, ItemContainerId, LocalModuleId, Lookup, Macro2Id, Macro2Loc, MacroExpander, MacroId,
ModuleDefId, ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, StructLoc, TraitAliasLoc, MacroRulesId, MacroRulesLoc, ModuleDefId, ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc,
TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseLoc, StructLoc, TraitAliasLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseId, UseLoc,
}; };
static GLOB_RECURSION_LIMIT: Limit = Limit::new(100); static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
@ -146,8 +146,8 @@ impl PartialResolvedImport {
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq)]
enum ImportSource { enum ImportSource {
Use { id: ItemTreeId<item_tree::Use>, use_tree: Idx<ast::UseTree> }, Use { use_tree: Idx<ast::UseTree>, id: UseId, is_prelude: bool, kind: ImportKind },
ExternCrate(ItemTreeId<item_tree::ExternCrate>), ExternCrate { id: ExternCrateId },
} }
#[derive(Debug, Eq, PartialEq)] #[derive(Debug, Eq, PartialEq)]
@ -155,54 +155,41 @@ struct Import {
path: ModPath, path: ModPath,
alias: Option<ImportAlias>, alias: Option<ImportAlias>,
visibility: RawVisibility, visibility: RawVisibility,
kind: ImportKind,
source: ImportSource, source: ImportSource,
is_prelude: bool,
is_macro_use: bool,
} }
impl Import { impl Import {
fn from_use( fn from_use(
db: &dyn DefDatabase,
krate: CrateId,
tree: &ItemTree, tree: &ItemTree,
id: ItemTreeId<item_tree::Use>, item_tree_id: ItemTreeId<item_tree::Use>,
id: UseId,
is_prelude: bool,
mut cb: impl FnMut(Self), mut cb: impl FnMut(Self),
) { ) {
let it = &tree[id.value]; let it = &tree[item_tree_id.value];
let attrs = &tree.attrs(db, krate, ModItem::from(id.value).into());
let visibility = &tree[it.visibility]; let visibility = &tree[it.visibility];
let is_prelude = attrs.by_key("prelude_import").exists();
it.use_tree.expand(|idx, path, kind, alias| { it.use_tree.expand(|idx, path, kind, alias| {
cb(Self { cb(Self {
path, path,
alias, alias,
visibility: visibility.clone(), visibility: visibility.clone(),
kind, source: ImportSource::Use { use_tree: idx, id, is_prelude, kind },
is_prelude,
is_macro_use: false,
source: ImportSource::Use { id, use_tree: idx },
}); });
}); });
} }
fn from_extern_crate( fn from_extern_crate(
db: &dyn DefDatabase,
krate: CrateId,
tree: &ItemTree, tree: &ItemTree,
id: ItemTreeId<item_tree::ExternCrate>, item_tree_id: ItemTreeId<item_tree::ExternCrate>,
id: ExternCrateId,
) -> Self { ) -> Self {
let it = &tree[id.value]; let it = &tree[item_tree_id.value];
let attrs = &tree.attrs(db, krate, ModItem::from(id.value).into());
let visibility = &tree[it.visibility]; let visibility = &tree[it.visibility];
Self { Self {
path: ModPath::from_segments(PathKind::Plain, iter::once(it.name.clone())), path: ModPath::from_segments(PathKind::Plain, iter::once(it.name.clone())),
alias: it.alias.clone(), alias: it.alias.clone(),
visibility: visibility.clone(), visibility: visibility.clone(),
kind: ImportKind::Plain, source: ImportSource::ExternCrate { id },
is_prelude: false,
is_macro_use: attrs.by_key("macro_use").exists(),
source: ImportSource::ExternCrate(id),
} }
} }
} }
@ -235,7 +222,7 @@ struct DefCollector<'a> {
db: &'a dyn DefDatabase, db: &'a dyn DefDatabase,
def_map: DefMap, def_map: DefMap,
deps: FxHashMap<Name, Dependency>, deps: FxHashMap<Name, Dependency>,
glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, Visibility)>>, glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, Visibility, UseId)>>,
unresolved_imports: Vec<ImportDirective>, unresolved_imports: Vec<ImportDirective>,
indeterminate_imports: Vec<ImportDirective>, indeterminate_imports: Vec<ImportDirective>,
unresolved_macros: Vec<MacroDirective>, unresolved_macros: Vec<MacroDirective>,
@ -280,7 +267,7 @@ impl DefCollector<'_> {
if dep.is_prelude() { if dep.is_prelude() {
crate_data crate_data
.extern_prelude .extern_prelude
.insert(name.clone(), CrateRootModuleId { krate: dep.crate_id }); .insert(name.clone(), (CrateRootModuleId { krate: dep.crate_id }, None));
} }
} }
@ -556,8 +543,12 @@ impl DefCollector<'_> {
self.def_map.resolve_path(self.db, DefMap::ROOT, &path, BuiltinShadowMode::Other, None); self.def_map.resolve_path(self.db, DefMap::ROOT, &path, BuiltinShadowMode::Other, None);
match per_ns.types { match per_ns.types {
Some((ModuleDefId::ModuleId(m), _)) => { Some((ModuleDefId::ModuleId(m), _, import)) => {
self.def_map.prelude = Some(m); // FIXME: This should specifically look for a glob import somehow and record that here
self.def_map.prelude = Some((
m,
import.and_then(ImportOrExternCrate::into_import).map(|it| it.import),
));
} }
types => { types => {
tracing::debug!( tracing::debug!(
@ -657,9 +648,9 @@ impl DefCollector<'_> {
self.def_map.modules[module_id].scope.declare(macro_.into()); self.def_map.modules[module_id].scope.declare(macro_.into());
self.update( self.update(
module_id, module_id,
&[(Some(name), PerNs::macros(macro_.into(), Visibility::Public))], &[(Some(name), PerNs::macros(macro_.into(), Visibility::Public, None))],
Visibility::Public, Visibility::Public,
ImportType::Named, None,
); );
} }
} }
@ -693,9 +684,9 @@ impl DefCollector<'_> {
self.def_map.modules[module_id].scope.declare(macro_.into()); self.def_map.modules[module_id].scope.declare(macro_.into());
self.update( self.update(
module_id, module_id,
&[(Some(name), PerNs::macros(macro_.into(), Visibility::Public))], &[(Some(name), PerNs::macros(macro_.into(), Visibility::Public, None))],
vis, vis,
ImportType::Named, None,
); );
} }
@ -708,9 +699,9 @@ impl DefCollector<'_> {
self.def_map.modules[module_id].scope.declare(macro_.into()); self.def_map.modules[module_id].scope.declare(macro_.into());
self.update( self.update(
module_id, module_id,
&[(Some(name), PerNs::macros(macro_.into(), Visibility::Public))], &[(Some(name), PerNs::macros(macro_.into(), Visibility::Public, None))],
Visibility::Public, Visibility::Public,
ImportType::Named, None,
); );
} }
@ -720,21 +711,29 @@ impl DefCollector<'_> {
/// Exported macros are just all macros in the root module scope. /// Exported macros are just all macros in the root module scope.
/// Note that it contains not only all `#[macro_export]` macros, but also all aliases /// Note that it contains not only all `#[macro_export]` macros, but also all aliases
/// created by `use` in the root module, ignoring the visibility of `use`. /// created by `use` in the root module, ignoring the visibility of `use`.
fn import_macros_from_extern_crate(&mut self, krate: CrateId, names: Option<Vec<Name>>) { fn import_macros_from_extern_crate(
&mut self,
krate: CrateId,
names: Option<Vec<Name>>,
extern_crate: Option<ExternCrateId>,
) {
let def_map = self.db.crate_def_map(krate); let def_map = self.db.crate_def_map(krate);
// `#[macro_use]` brings macros into macro_use prelude. Yes, even non-`macro_rules!` // `#[macro_use]` brings macros into macro_use prelude. Yes, even non-`macro_rules!`
// macros. // macros.
let root_scope = &def_map[DefMap::ROOT].scope; let root_scope = &def_map[DefMap::ROOT].scope;
if let Some(names) = names { match names {
for name in names { Some(names) => {
// FIXME: Report diagnostic on 404. for name in names {
if let Some(def) = root_scope.get(&name).take_macros() { // FIXME: Report diagnostic on 404.
self.def_map.macro_use_prelude.insert(name, def); if let Some(def) = root_scope.get(&name).take_macros() {
self.def_map.macro_use_prelude.insert(name, (def, extern_crate));
}
} }
} }
} else { None => {
for (name, def) in root_scope.macros() { for (name, def) in root_scope.macros() {
self.def_map.macro_use_prelude.insert(name.clone(), def); self.def_map.macro_use_prelude.insert(name.clone(), (def, extern_crate));
}
} }
} }
} }
@ -771,48 +770,53 @@ impl DefCollector<'_> {
let _p = profile::span("resolve_import") let _p = profile::span("resolve_import")
.detail(|| format!("{}", import.path.display(self.db.upcast()))); .detail(|| format!("{}", import.path.display(self.db.upcast())));
tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition); tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition);
if matches!(import.source, ImportSource::ExternCrate { .. }) { match import.source {
let name = import ImportSource::ExternCrate { .. } => {
.path let name = import
.as_ident() .path
.expect("extern crate should have been desugared to one-element path"); .as_ident()
.expect("extern crate should have been desugared to one-element path");
let res = self.resolve_extern_crate(name); let res = self.resolve_extern_crate(name);
match res { match res {
Some(res) => { Some(res) => PartialResolvedImport::Resolved(PerNs::types(
PartialResolvedImport::Resolved(PerNs::types(res.into(), Visibility::Public)) res.into(),
} Visibility::Public,
None => PartialResolvedImport::Unresolved, None,
} )),
} else { None => PartialResolvedImport::Unresolved,
let res = self.def_map.resolve_path_fp_with_macro(
self.db,
ResolveMode::Import,
module_id,
&import.path,
BuiltinShadowMode::Module,
None, // An import may resolve to any kind of macro.
);
let def = res.resolved_def;
if res.reached_fixedpoint == ReachedFixedPoint::No || def.is_none() {
return PartialResolvedImport::Unresolved;
}
if let Some(krate) = res.krate {
if krate != self.def_map.krate {
return PartialResolvedImport::Resolved(
def.filter_visibility(|v| matches!(v, Visibility::Public)),
);
} }
} }
ImportSource::Use { .. } => {
let res = self.def_map.resolve_path_fp_with_macro(
self.db,
ResolveMode::Import,
module_id,
&import.path,
BuiltinShadowMode::Module,
None, // An import may resolve to any kind of macro.
);
// Check whether all namespaces are resolved. let def = res.resolved_def;
if def.is_full() { if res.reached_fixedpoint == ReachedFixedPoint::No || def.is_none() {
PartialResolvedImport::Resolved(def) return PartialResolvedImport::Unresolved;
} else { }
PartialResolvedImport::Indeterminate(def)
if let Some(krate) = res.krate {
if krate != self.def_map.krate {
return PartialResolvedImport::Resolved(
def.filter_visibility(|v| matches!(v, Visibility::Public)),
);
}
}
// Check whether all namespaces are resolved.
if def.is_full() {
PartialResolvedImport::Resolved(def)
} else {
PartialResolvedImport::Indeterminate(def)
}
} }
} }
} }
@ -837,8 +841,9 @@ impl DefCollector<'_> {
.resolve_visibility(self.db, module_id, &directive.import.visibility, false) .resolve_visibility(self.db, module_id, &directive.import.visibility, false)
.unwrap_or(Visibility::Public); .unwrap_or(Visibility::Public);
match import.kind { match import.source {
ImportKind::Plain | ImportKind::TypeOnly => { ImportSource::ExternCrate { .. }
| ImportSource::Use { kind: ImportKind::Plain | ImportKind::TypeOnly, .. } => {
let name = match &import.alias { let name = match &import.alias {
Some(ImportAlias::Alias(name)) => Some(name), Some(ImportAlias::Alias(name)) => Some(name),
Some(ImportAlias::Underscore) => None, Some(ImportAlias::Underscore) => None,
@ -851,40 +856,44 @@ impl DefCollector<'_> {
}, },
}; };
if import.kind == ImportKind::TypeOnly { let imp = match import.source {
def.values = None; // extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658
def.macros = None; ImportSource::ExternCrate { id, .. } => {
} if self.def_map.block.is_none() && module_id == DefMap::ROOT {
if let (Some(ModuleDefId::ModuleId(def)), Some(name)) =
(def.take_types(), name)
{
if let Ok(def) = def.try_into() {
Arc::get_mut(&mut self.def_map.data)
.unwrap()
.extern_prelude
.insert(name.clone(), (def, Some(id)));
}
}
}
ImportType::ExternCrate(id)
}
ImportSource::Use { kind, id, use_tree, .. } => {
if kind == ImportKind::TypeOnly {
def.values = None;
def.macros = None;
}
ImportType::Import(ImportId { import: id, idx: use_tree })
}
};
tracing::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def); tracing::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def);
// extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658 self.update(module_id, &[(name.cloned(), def)], vis, Some(imp));
if matches!(import.source, ImportSource::ExternCrate { .. })
&& self.def_map.block.is_none()
&& module_id == DefMap::ROOT
{
if let (Some(ModuleDefId::ModuleId(def)), Some(name)) = (def.take_types(), name)
{
if let Ok(def) = def.try_into() {
Arc::get_mut(&mut self.def_map.data)
.unwrap()
.extern_prelude
.insert(name.clone(), def);
}
}
}
self.update(module_id, &[(name.cloned(), def)], vis, ImportType::Named);
} }
ImportKind::Glob => { ImportSource::Use { kind: ImportKind::Glob, id, .. } => {
tracing::debug!("glob import: {:?}", import); tracing::debug!("glob import: {:?}", import);
match def.take_types() { match def.take_types() {
Some(ModuleDefId::ModuleId(m)) => { Some(ModuleDefId::ModuleId(m)) => {
if import.is_prelude { if let ImportSource::Use { id, is_prelude: true, .. } = import.source {
// Note: This dodgily overrides the injected prelude. The rustc // Note: This dodgily overrides the injected prelude. The rustc
// implementation seems to work the same though. // implementation seems to work the same though.
cov_mark::hit!(std_prelude); cov_mark::hit!(std_prelude);
self.def_map.prelude = Some(m); self.def_map.prelude = Some((m, Some(id)));
} else if m.krate != self.def_map.krate { } else if m.krate != self.def_map.krate {
cov_mark::hit!(glob_across_crates); cov_mark::hit!(glob_across_crates);
// glob import from other crate => we can just import everything once // glob import from other crate => we can just import everything once
@ -901,7 +910,7 @@ impl DefCollector<'_> {
.filter(|(_, res)| !res.is_none()) .filter(|(_, res)| !res.is_none())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
self.update(module_id, &items, vis, ImportType::Glob); self.update(module_id, &items, vis, Some(ImportType::Glob(id)));
} else { } else {
// glob import from same crate => we do an initial // glob import from same crate => we do an initial
// import, and then need to propagate any further // import, and then need to propagate any further
@ -933,11 +942,11 @@ impl DefCollector<'_> {
.filter(|(_, res)| !res.is_none()) .filter(|(_, res)| !res.is_none())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
self.update(module_id, &items, vis, ImportType::Glob); self.update(module_id, &items, vis, Some(ImportType::Glob(id)));
// record the glob import in case we add further items // record the glob import in case we add further items
let glob = self.glob_imports.entry(m.local_id).or_default(); let glob = self.glob_imports.entry(m.local_id).or_default();
if !glob.iter().any(|(mid, _)| *mid == module_id) { if !glob.iter().any(|(mid, _, _)| *mid == module_id) {
glob.push((module_id, vis)); glob.push((module_id, vis, id));
} }
} }
} }
@ -959,11 +968,11 @@ impl DefCollector<'_> {
.map(|(local_id, variant_data)| { .map(|(local_id, variant_data)| {
let name = variant_data.name.clone(); let name = variant_data.name.clone();
let variant = EnumVariantId { parent: e, local_id }; let variant = EnumVariantId { parent: e, local_id };
let res = PerNs::both(variant.into(), variant.into(), vis); let res = PerNs::both(variant.into(), variant.into(), vis, None);
(Some(name), res) (Some(name), res)
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
self.update(module_id, &resolutions, vis, ImportType::Glob); self.update(module_id, &resolutions, vis, Some(ImportType::Glob(id)));
} }
Some(d) => { Some(d) => {
tracing::debug!("glob import {:?} from non-module/enum {:?}", import, d); tracing::debug!("glob import {:?} from non-module/enum {:?}", import, d);
@ -983,10 +992,10 @@ impl DefCollector<'_> {
resolutions: &[(Option<Name>, PerNs)], resolutions: &[(Option<Name>, PerNs)],
// Visibility this import will have // Visibility this import will have
vis: Visibility, vis: Visibility,
import_type: ImportType, import: Option<ImportType>,
) { ) {
self.db.unwind_if_cancelled(); self.db.unwind_if_cancelled();
self.update_recursive(module_id, resolutions, vis, import_type, 0) self.update_recursive(module_id, resolutions, vis, import, 0)
} }
fn update_recursive( fn update_recursive(
@ -997,7 +1006,7 @@ impl DefCollector<'_> {
// All resolutions are imported with this visibility; the visibilities in // All resolutions are imported with this visibility; the visibilities in
// the `PerNs` values are ignored and overwritten // the `PerNs` values are ignored and overwritten
vis: Visibility, vis: Visibility,
import_type: ImportType, import: Option<ImportType>,
depth: usize, depth: usize,
) { ) {
if GLOB_RECURSION_LIMIT.check(depth).is_err() { if GLOB_RECURSION_LIMIT.check(depth).is_err() {
@ -1014,7 +1023,7 @@ impl DefCollector<'_> {
&mut self.from_glob_import, &mut self.from_glob_import,
(module_id, name.clone()), (module_id, name.clone()),
res.with_visibility(vis), res.with_visibility(vis),
import_type, import,
); );
} }
None => { None => {
@ -1059,7 +1068,7 @@ impl DefCollector<'_> {
.get(&module_id) .get(&module_id)
.into_iter() .into_iter()
.flatten() .flatten()
.filter(|(glob_importing_module, _)| { .filter(|(glob_importing_module, _, _)| {
// we know all resolutions have the same visibility (`vis`), so we // we know all resolutions have the same visibility (`vis`), so we
// just need to check that once // just need to check that once
vis.is_visible_from_def_map(self.db, &self.def_map, *glob_importing_module) vis.is_visible_from_def_map(self.db, &self.def_map, *glob_importing_module)
@ -1067,12 +1076,12 @@ impl DefCollector<'_> {
.cloned() .cloned()
.collect::<Vec<_>>(); .collect::<Vec<_>>();
for (glob_importing_module, glob_import_vis) in glob_imports { for (glob_importing_module, glob_import_vis, use_) in glob_imports {
self.update_recursive( self.update_recursive(
glob_importing_module, glob_importing_module,
resolutions, resolutions,
glob_import_vis, glob_import_vis,
ImportType::Glob, Some(ImportType::Glob(use_)),
depth + 1, depth + 1,
); );
} }
@ -1460,31 +1469,34 @@ impl DefCollector<'_> {
// heuristic, but it works in practice. // heuristic, but it works in practice.
let mut diagnosed_extern_crates = FxHashSet::default(); let mut diagnosed_extern_crates = FxHashSet::default();
for directive in &self.unresolved_imports { for directive in &self.unresolved_imports {
if let ImportSource::ExternCrate(krate) = directive.import.source { if let ImportSource::ExternCrate { id } = directive.import.source {
let item_tree = krate.item_tree(self.db); let item_tree_id = id.lookup(self.db).id;
let extern_crate = &item_tree[krate.value]; let item_tree = item_tree_id.item_tree(self.db);
let extern_crate = &item_tree[item_tree_id.value];
diagnosed_extern_crates.insert(extern_crate.name.clone()); diagnosed_extern_crates.insert(extern_crate.name.clone());
self.def_map.diagnostics.push(DefDiagnostic::unresolved_extern_crate( self.def_map.diagnostics.push(DefDiagnostic::unresolved_extern_crate(
directive.module_id, directive.module_id,
InFile::new(krate.file_id(), extern_crate.ast_id), InFile::new(item_tree_id.file_id(), extern_crate.ast_id),
)); ));
} }
} }
for directive in &self.unresolved_imports { for directive in &self.unresolved_imports {
if let ImportSource::Use { id: import, use_tree } = directive.import.source { if let ImportSource::Use { use_tree, id, is_prelude: _, kind: _ } =
directive.import.source
{
if matches!( if matches!(
(directive.import.path.segments().first(), &directive.import.path.kind), (directive.import.path.segments().first(), &directive.import.path.kind),
(Some(krate), PathKind::Plain | PathKind::Abs) if diagnosed_extern_crates.contains(krate) (Some(krate), PathKind::Plain | PathKind::Abs) if diagnosed_extern_crates.contains(krate)
) { ) {
continue; continue;
} }
let item_tree_id = id.lookup(self.db).id;
self.def_map.diagnostics.push(DefDiagnostic::unresolved_import( self.def_map.diagnostics.push(DefDiagnostic::unresolved_import(
directive.module_id, directive.module_id,
import, item_tree_id,
use_tree, use_tree,
)); ));
} }
@ -1519,72 +1531,66 @@ impl ModCollector<'_, '_> {
self.def_collector.mod_dirs.insert(self.module_id, self.mod_dir.clone()); self.def_collector.mod_dirs.insert(self.module_id, self.mod_dir.clone());
// Prelude module is always considered to be `#[macro_use]`. // Prelude module is always considered to be `#[macro_use]`.
if let Some(prelude_module) = self.def_collector.def_map.prelude { if let Some((prelude_module, _use)) = self.def_collector.def_map.prelude {
if prelude_module.krate != krate && is_crate_root { if prelude_module.krate != krate && is_crate_root {
cov_mark::hit!(prelude_is_macro_use); cov_mark::hit!(prelude_is_macro_use);
self.def_collector.import_macros_from_extern_crate(prelude_module.krate, None); self.def_collector.import_macros_from_extern_crate(
prelude_module.krate,
None,
None,
);
} }
} }
let db = self.def_collector.db;
let module_id = self.module_id;
let update_def =
|def_collector: &mut DefCollector<'_>, id, name: &Name, vis, has_constructor| {
def_collector.def_map.modules[module_id].scope.declare(id);
def_collector.update(
module_id,
&[(Some(name.clone()), PerNs::from_def(id, vis, has_constructor, None))],
vis,
None,
)
};
let resolve_vis = |def_map: &DefMap, visibility| {
def_map
.resolve_visibility(db, module_id, visibility, false)
.unwrap_or(Visibility::Public)
};
// This should be processed eagerly instead of deferred to resolving. let mut process_mod_item = |item: ModItem| {
// `#[macro_use] extern crate` is hoisted to imports macros before collecting let attrs = self.item_tree.attrs(db, krate, item.into());
// any other items.
//
// If we're not at the crate root, `macro_use`d extern crates are an error so let's just
// ignore them.
if is_crate_root {
for &item in items {
if let ModItem::ExternCrate(id) = item {
self.process_macro_use_extern_crate(id);
}
}
}
for &item in items {
let attrs = self.item_tree.attrs(self.def_collector.db, krate, item.into());
if let Some(cfg) = attrs.cfg() { if let Some(cfg) = attrs.cfg() {
if !self.is_cfg_enabled(&cfg) { if !self.is_cfg_enabled(&cfg) {
self.emit_unconfigured_diagnostic(item, &cfg); self.emit_unconfigured_diagnostic(item, &cfg);
continue; return;
} }
} }
if let Err(()) = self.resolve_attributes(&attrs, item, container) { if let Err(()) = self.resolve_attributes(&attrs, item, container) {
// Do not process the item. It has at least one non-builtin attribute, so the // Do not process the item. It has at least one non-builtin attribute, so the
// fixed-point algorithm is required to resolve the rest of them. // fixed-point algorithm is required to resolve the rest of them.
continue; return;
} }
let db = self.def_collector.db; let module = self.def_collector.def_map.module_id(module_id);
let module = self.def_collector.def_map.module_id(self.module_id);
let def_map = &mut self.def_collector.def_map; let def_map = &mut self.def_collector.def_map;
let update_def =
|def_collector: &mut DefCollector<'_>, id, name: &Name, vis, has_constructor| {
def_collector.def_map.modules[self.module_id].scope.declare(id);
def_collector.update(
self.module_id,
&[(Some(name.clone()), PerNs::from_def(id, vis, has_constructor))],
vis,
ImportType::Named,
)
};
let resolve_vis = |def_map: &DefMap, visibility| {
def_map
.resolve_visibility(db, self.module_id, visibility, false)
.unwrap_or(Visibility::Public)
};
match item { match item {
ModItem::Mod(m) => self.collect_module(m, &attrs), ModItem::Mod(m) => self.collect_module(m, &attrs),
ModItem::Use(import_id) => { ModItem::Use(item_tree_id) => {
let _import_id = let id = UseLoc {
UseLoc { container: module, id: ItemTreeId::new(self.tree_id, import_id) } container: module,
.intern(db); id: ItemTreeId::new(self.tree_id, item_tree_id),
}
.intern(db);
let is_prelude = attrs.by_key("prelude_import").exists();
Import::from_use( Import::from_use(
db,
krate,
self.item_tree, self.item_tree,
ItemTreeId::new(self.tree_id, import_id), ItemTreeId::new(self.tree_id, item_tree_id),
id,
is_prelude,
|import| { |import| {
self.def_collector.unresolved_imports.push(ImportDirective { self.def_collector.unresolved_imports.push(ImportDirective {
module_id: self.module_id, module_id: self.module_id,
@ -1594,22 +1600,29 @@ impl ModCollector<'_, '_> {
}, },
) )
} }
ModItem::ExternCrate(import_id) => { ModItem::ExternCrate(item_tree_id) => {
let extern_crate_id = ExternCrateLoc { let id = ExternCrateLoc {
container: module, container: module,
id: ItemTreeId::new(self.tree_id, import_id), id: ItemTreeId::new(self.tree_id, item_tree_id),
} }
.intern(db); .intern(db);
if is_crate_root {
self.process_macro_use_extern_crate(
item_tree_id,
id,
attrs.by_key("macro_use").attrs(),
);
}
self.def_collector.def_map.modules[self.module_id] self.def_collector.def_map.modules[self.module_id]
.scope .scope
.define_extern_crate_decl(extern_crate_id); .define_extern_crate_decl(id);
self.def_collector.unresolved_imports.push(ImportDirective { self.def_collector.unresolved_imports.push(ImportDirective {
module_id: self.module_id, module_id: self.module_id,
import: Import::from_extern_crate( import: Import::from_extern_crate(
db,
krate,
self.item_tree, self.item_tree,
ItemTreeId::new(self.tree_id, import_id), ItemTreeId::new(self.tree_id, item_tree_id),
id,
), ),
status: PartialResolvedImport::Unresolved, status: PartialResolvedImport::Unresolved,
}) })
@ -1768,21 +1781,34 @@ impl ModCollector<'_, '_> {
); );
} }
} }
};
// extern crates should be processed eagerly instead of deferred to resolving.
// `#[macro_use] extern crate` is hoisted to imports macros before collecting
// any other items.
if is_crate_root {
items
.iter()
.filter(|it| matches!(it, ModItem::ExternCrate(..)))
.copied()
.for_each(&mut process_mod_item);
items
.iter()
.filter(|it| !matches!(it, ModItem::ExternCrate(..)))
.copied()
.for_each(process_mod_item);
} else {
items.iter().copied().for_each(process_mod_item);
} }
} }
fn process_macro_use_extern_crate(&mut self, extern_crate: FileItemTreeId<ExternCrate>) { fn process_macro_use_extern_crate<'a>(
&mut self,
extern_crate: FileItemTreeId<ExternCrate>,
extern_crate_id: ExternCrateId,
macro_use_attrs: impl Iterator<Item = &'a Attr>,
) {
let db = self.def_collector.db; let db = self.def_collector.db;
let attrs = self.item_tree.attrs(
db,
self.def_collector.def_map.krate,
ModItem::from(extern_crate).into(),
);
if let Some(cfg) = attrs.cfg() {
if !self.is_cfg_enabled(&cfg) {
return;
}
}
let target_crate = let target_crate =
match self.def_collector.resolve_extern_crate(&self.item_tree[extern_crate].name) { match self.def_collector.resolve_extern_crate(&self.item_tree[extern_crate].name) {
@ -1798,11 +1824,11 @@ impl ModCollector<'_, '_> {
let mut single_imports = Vec::new(); let mut single_imports = Vec::new();
let hygiene = Hygiene::new_unhygienic(); let hygiene = Hygiene::new_unhygienic();
for attr in attrs.by_key("macro_use").attrs() { for attr in macro_use_attrs {
let Some(paths) = attr.parse_path_comma_token_tree(db.upcast(), &hygiene) else { let Some(paths) = attr.parse_path_comma_token_tree(db.upcast(), &hygiene) else {
// `#[macro_use]` (without any paths) found, forget collected names and just import // `#[macro_use]` (without any paths) found, forget collected names and just import
// all visible macros. // all visible macros.
self.def_collector.import_macros_from_extern_crate(target_crate, None); self.def_collector.import_macros_from_extern_crate(target_crate, None, Some(extern_crate_id));
return; return;
}; };
for path in paths { for path in paths {
@ -1812,7 +1838,11 @@ impl ModCollector<'_, '_> {
} }
} }
self.def_collector.import_macros_from_extern_crate(target_crate, Some(single_imports)); self.def_collector.import_macros_from_extern_crate(
target_crate,
Some(single_imports),
Some(extern_crate_id),
);
} }
fn collect_module(&mut self, module_id: FileItemTreeId<Mod>, attrs: &Attrs) { fn collect_module(&mut self, module_id: FileItemTreeId<Mod>, attrs: &Attrs) {
@ -1824,7 +1854,7 @@ impl ModCollector<'_, '_> {
ModKind::Inline { items } => { ModKind::Inline { items } => {
let module_id = self.push_child_module( let module_id = self.push_child_module(
module.name.clone(), module.name.clone(),
AstId::new(self.file_id(), module.ast_id), module.ast_id,
None, None,
&self.item_tree[module.visibility], &self.item_tree[module.visibility],
module_id, module_id,
@ -1862,7 +1892,7 @@ impl ModCollector<'_, '_> {
if is_enabled { if is_enabled {
let module_id = self.push_child_module( let module_id = self.push_child_module(
module.name.clone(), module.name.clone(),
ast_id, ast_id.value,
Some((file_id, is_mod_rs)), Some((file_id, is_mod_rs)),
&self.item_tree[module.visibility], &self.item_tree[module.visibility],
module_id, module_id,
@ -1889,7 +1919,7 @@ impl ModCollector<'_, '_> {
Err(candidates) => { Err(candidates) => {
self.push_child_module( self.push_child_module(
module.name.clone(), module.name.clone(),
ast_id, ast_id.value,
None, None,
&self.item_tree[module.visibility], &self.item_tree[module.visibility],
module_id, module_id,
@ -1906,7 +1936,7 @@ impl ModCollector<'_, '_> {
fn push_child_module( fn push_child_module(
&mut self, &mut self,
name: Name, name: Name,
declaration: AstId<ast::Module>, declaration: FileAstId<ast::Module>,
definition: Option<(FileId, bool)>, definition: Option<(FileId, bool)>,
visibility: &crate::visibility::RawVisibility, visibility: &crate::visibility::RawVisibility,
mod_tree_id: FileItemTreeId<Mod>, mod_tree_id: FileItemTreeId<Mod>,
@ -1948,9 +1978,9 @@ impl ModCollector<'_, '_> {
def_map.modules[self.module_id].scope.declare(def); def_map.modules[self.module_id].scope.declare(def);
self.def_collector.update( self.def_collector.update(
self.module_id, self.module_id,
&[(Some(name), PerNs::from_def(def, vis, false))], &[(Some(name), PerNs::from_def(def, vis, false, None))],
vis, vis,
ImportType::Named, None,
); );
res res
} }
@ -2198,7 +2228,7 @@ impl ModCollector<'_, '_> {
map[module].scope.get_legacy_macro(name)?.last().copied() map[module].scope.get_legacy_macro(name)?.last().copied()
}) })
.or_else(|| def_map[self.module_id].scope.get(name).take_macros()) .or_else(|| def_map[self.module_id].scope.get(name).take_macros())
.or_else(|| def_map.macro_use_prelude.get(name).copied()) .or_else(|| Some(def_map.macro_use_prelude.get(name).copied()?.0))
.filter(|&id| { .filter(|&id| {
sub_namespace_match( sub_namespace_match(
Some(MacroSubNs::from_id(db, id)), Some(MacroSubNs::from_id(db, id)),

View file

@ -15,8 +15,9 @@ use hir_expand::name::Name;
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
data::adt::VariantData,
db::DefDatabase, db::DefDatabase,
item_scope::BUILTIN_SCOPE, item_scope::{ImportOrExternCrate, BUILTIN_SCOPE},
nameres::{sub_namespace_match, BlockInfo, BuiltinShadowMode, DefMap, MacroSubNs}, nameres::{sub_namespace_match, BlockInfo, BuiltinShadowMode, DefMap, MacroSubNs},
path::{ModPath, PathKind}, path::{ModPath, PathKind},
per_ns::PerNs, per_ns::PerNs,
@ -65,7 +66,7 @@ impl PerNs {
db: &dyn DefDatabase, db: &dyn DefDatabase,
expected: Option<MacroSubNs>, expected: Option<MacroSubNs>,
) -> Self { ) -> Self {
self.macros = self.macros.filter(|&(id, _)| { self.macros = self.macros.filter(|&(id, _, _)| {
let this = MacroSubNs::from_id(db, id); let this = MacroSubNs::from_id(db, id);
sub_namespace_match(Some(this), expected) sub_namespace_match(Some(this), expected)
}); });
@ -196,15 +197,15 @@ impl DefMap {
PathKind::DollarCrate(krate) => { PathKind::DollarCrate(krate) => {
if krate == self.krate { if krate == self.krate {
cov_mark::hit!(macro_dollar_crate_self); cov_mark::hit!(macro_dollar_crate_self);
PerNs::types(self.crate_root().into(), Visibility::Public) PerNs::types(self.crate_root().into(), Visibility::Public, None)
} else { } else {
let def_map = db.crate_def_map(krate); let def_map = db.crate_def_map(krate);
let module = def_map.module_id(Self::ROOT); let module = def_map.module_id(Self::ROOT);
cov_mark::hit!(macro_dollar_crate_other); cov_mark::hit!(macro_dollar_crate_other);
PerNs::types(module.into(), Visibility::Public) PerNs::types(module.into(), Visibility::Public, None)
} }
} }
PathKind::Crate => PerNs::types(self.crate_root().into(), Visibility::Public), PathKind::Crate => PerNs::types(self.crate_root().into(), Visibility::Public, None),
// plain import or absolute path in 2015: crate-relative with // plain import or absolute path in 2015: crate-relative with
// fallback to extern prelude (with the simplification in // fallback to extern prelude (with the simplification in
// rust-lang/rust#57745) // rust-lang/rust#57745)
@ -291,7 +292,7 @@ impl DefMap {
); );
} }
PerNs::types(module.into(), Visibility::Public) PerNs::types(module.into(), Visibility::Public, None)
} }
PathKind::Abs => { PathKind::Abs => {
// 2018-style absolute path -- only extern prelude // 2018-style absolute path -- only extern prelude
@ -299,9 +300,13 @@ impl DefMap {
Some((_, segment)) => segment, Some((_, segment)) => segment,
None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
}; };
if let Some(&def) = self.data.extern_prelude.get(segment) { if let Some(&(def, extern_crate)) = self.data.extern_prelude.get(segment) {
tracing::debug!("absolute path {:?} resolved to crate {:?}", path, def); tracing::debug!("absolute path {:?} resolved to crate {:?}", path, def);
PerNs::types(def.into(), Visibility::Public) PerNs::types(
def.into(),
Visibility::Public,
extern_crate.map(ImportOrExternCrate::ExternCrate),
)
} else { } else {
return ResolvePathResult::empty(ReachedFixedPoint::No); // extern crate declarations can add to the extern prelude return ResolvePathResult::empty(ReachedFixedPoint::No); // extern crate declarations can add to the extern prelude
} }
@ -309,7 +314,7 @@ impl DefMap {
}; };
for (i, segment) in segments { for (i, segment) in segments {
let (curr, vis) = match curr_per_ns.take_types_vis() { let (curr, vis, imp) = match curr_per_ns.take_types_full() {
Some(r) => r, Some(r) => r,
None => { None => {
// we still have path segments left, but the path so far // we still have path segments left, but the path so far
@ -364,18 +369,20 @@ impl DefMap {
Some(local_id) => { Some(local_id) => {
let variant = EnumVariantId { parent: e, local_id }; let variant = EnumVariantId { parent: e, local_id };
match &*enum_data.variants[local_id].variant_data { match &*enum_data.variants[local_id].variant_data {
crate::data::adt::VariantData::Record(_) => { VariantData::Record(_) => {
PerNs::types(variant.into(), Visibility::Public) PerNs::types(variant.into(), Visibility::Public, None)
}
crate::data::adt::VariantData::Tuple(_)
| crate::data::adt::VariantData::Unit => {
PerNs::both(variant.into(), variant.into(), Visibility::Public)
} }
VariantData::Tuple(_) | VariantData::Unit => PerNs::both(
variant.into(),
variant.into(),
Visibility::Public,
None,
),
} }
} }
None => { None => {
return ResolvePathResult::with( return ResolvePathResult::with(
PerNs::types(e.into(), vis), PerNs::types(e.into(), vis, imp),
ReachedFixedPoint::Yes, ReachedFixedPoint::Yes,
Some(i), Some(i),
Some(self.krate), Some(self.krate),
@ -393,7 +400,7 @@ impl DefMap {
); );
return ResolvePathResult::with( return ResolvePathResult::with(
PerNs::types(s, vis), PerNs::types(s, vis, imp),
ReachedFixedPoint::Yes, ReachedFixedPoint::Yes,
Some(i), Some(i),
Some(self.krate), Some(self.krate),
@ -430,7 +437,7 @@ impl DefMap {
.filter(|&id| { .filter(|&id| {
sub_namespace_match(Some(MacroSubNs::from_id(db, id)), expected_macro_subns) sub_namespace_match(Some(MacroSubNs::from_id(db, id)), expected_macro_subns)
}) })
.map_or_else(PerNs::none, |m| PerNs::macros(m, Visibility::Public)); .map_or_else(PerNs::none, |m| PerNs::macros(m, Visibility::Public, None));
let from_scope = self[module].scope.get(name).filter_macro(db, expected_macro_subns); let from_scope = self[module].scope.get(name).filter_macro(db, expected_macro_subns);
let from_builtin = match self.block { let from_builtin = match self.block {
Some(_) => { Some(_) => {
@ -449,18 +456,27 @@ impl DefMap {
let extern_prelude = || { let extern_prelude = || {
if self.block.is_some() { if self.block.is_some() {
// Don't resolve extern prelude in block `DefMap`s. // Don't resolve extern prelude in block `DefMap`s, defer it to the crate def map so
// that blocks can properly shadow them
return PerNs::none(); return PerNs::none();
} }
self.data self.data.extern_prelude.get(name).map_or(PerNs::none(), |&(it, extern_crate)| {
.extern_prelude PerNs::types(
.get(name) it.into(),
.map_or(PerNs::none(), |&it| PerNs::types(it.into(), Visibility::Public)) Visibility::Public,
extern_crate.map(ImportOrExternCrate::ExternCrate),
)
})
}; };
let macro_use_prelude = || { let macro_use_prelude = || {
self.macro_use_prelude self.macro_use_prelude.get(name).map_or(PerNs::none(), |&(it, _extern_crate)| {
.get(name) PerNs::macros(
.map_or(PerNs::none(), |&it| PerNs::macros(it.into(), Visibility::Public)) it.into(),
Visibility::Public,
// FIXME?
None, // extern_crate.map(ImportOrExternCrate::ExternCrate),
)
})
}; };
let prelude = || self.resolve_in_prelude(db, name); let prelude = || self.resolve_in_prelude(db, name);
@ -488,18 +504,23 @@ impl DefMap {
// Don't resolve extern prelude in block `DefMap`s. // Don't resolve extern prelude in block `DefMap`s.
return PerNs::none(); return PerNs::none();
} }
self.data self.data.extern_prelude.get(name).copied().map_or(
.extern_prelude PerNs::none(),
.get(name) |(it, extern_crate)| {
.copied() PerNs::types(
.map_or(PerNs::none(), |it| PerNs::types(it.into(), Visibility::Public)) it.into(),
Visibility::Public,
extern_crate.map(ImportOrExternCrate::ExternCrate),
)
},
)
}; };
from_crate_root.or_else(from_extern_prelude) from_crate_root.or_else(from_extern_prelude)
} }
fn resolve_in_prelude(&self, db: &dyn DefDatabase, name: &Name) -> PerNs { fn resolve_in_prelude(&self, db: &dyn DefDatabase, name: &Name) -> PerNs {
if let Some(prelude) = self.prelude { if let Some((prelude, _use)) = self.prelude {
let keep; let keep;
let def_map = if prelude.krate == self.krate { let def_map = if prelude.krate == self.krate {
self self

View file

@ -168,7 +168,7 @@ pub struct Baz;
"#, "#,
expect![[r#" expect![[r#"
crate crate
Foo: t v Foo: ti vi
foo: t foo: t
crate::foo crate::foo
@ -194,8 +194,8 @@ pub enum Quux {};
"#, "#,
expect![[r#" expect![[r#"
crate crate
Baz: t v Baz: ti vi
Quux: t Quux: ti
foo: t foo: t
crate::foo crate::foo
@ -225,11 +225,11 @@ pub struct Baz;
"#, "#,
expect![[r#" expect![[r#"
crate crate
Baz: t v Baz: ti vi
foo: t foo: t
crate::foo crate::foo
Baz: t v Baz: ti vi
bar: t bar: t
crate::foo::bar crate::foo::bar
@ -274,7 +274,7 @@ use self::E::V;
expect![[r#" expect![[r#"
crate crate
E: t E: t
V: t v V: ti vi
"#]], "#]],
); );
} }
@ -307,7 +307,7 @@ pub struct FromLib;
crate::foo crate::foo
Bar: _ Bar: _
FromLib: t v FromLib: ti vi
"#]], "#]],
); );
} }
@ -328,7 +328,7 @@ pub struct Baz;
"#, "#,
expect![[r#" expect![[r#"
crate crate
Baz: t Baz: ti
foo: t foo: t
crate::foo crate::foo
@ -352,7 +352,7 @@ pub struct Baz;
"#, "#,
expect![[r#" expect![[r#"
crate crate
Baz: t v Baz: ti vi
"#]], "#]],
); );
} }
@ -375,13 +375,13 @@ pub struct Arc;
expect![[r#" expect![[r#"
crate crate
alloc: t alloc: t
alloc_crate: t alloc_crate: te
sync: t sync: t
crate::alloc crate::alloc
crate::sync crate::sync
Arc: t v Arc: ti vi
"#]], "#]],
); );
} }
@ -404,13 +404,13 @@ pub struct Arc;
expect![[r#" expect![[r#"
crate crate
alloc: t alloc: t
alloc_crate: t alloc_crate: te
sync: t sync: t
crate::alloc crate::alloc
crate::sync crate::sync
Arc: t v Arc: ti vi
"#]], "#]],
); );
} }
@ -426,7 +426,7 @@ extern crate self as bla;
"#, "#,
expect![[r#" expect![[r#"
crate crate
bla: t bla: te
"#]], "#]],
); );
} }
@ -447,7 +447,7 @@ pub struct Baz;
"#, "#,
expect![[r#" expect![[r#"
crate crate
Baz: t v Baz: ti vi
"#]], "#]],
); );
} }
@ -465,7 +465,7 @@ pub struct Bar;
"#, "#,
expect![[r#" expect![[r#"
crate crate
Bar: t v Bar: ti vi
foo: v foo: v
"#]], "#]],
); );
@ -492,9 +492,9 @@ fn no_std_prelude() {
} }
"#, "#,
expect![[r#" expect![[r#"
crate crate
Rust: t v Rust: ti vi
"#]], "#]],
); );
} }
@ -516,9 +516,9 @@ fn edition_specific_preludes() {
} }
"#, "#,
expect![[r#" expect![[r#"
crate crate
Rust2018: t v Rust2018: ti vi
"#]], "#]],
); );
check( check(
r#" r#"
@ -533,9 +533,9 @@ fn edition_specific_preludes() {
} }
"#, "#,
expect![[r#" expect![[r#"
crate crate
Rust2021: t v Rust2021: ti vi
"#]], "#]],
); );
} }
@ -563,8 +563,8 @@ pub mod prelude {
"#, "#,
expect![[r#" expect![[r#"
crate crate
Bar: t v Bar: ti vi
Foo: t v Foo: ti vi
"#]], "#]],
); );
} }
@ -590,7 +590,7 @@ pub mod prelude {
"#, "#,
expect![[r#" expect![[r#"
crate crate
Bar: t v Bar: ti vi
Baz: _ Baz: _
Foo: _ Foo: _
"#]], "#]],
@ -619,8 +619,8 @@ pub mod prelude {
expect![[r#" expect![[r#"
crate crate
Bar: _ Bar: _
Baz: t v Baz: ti vi
Foo: t v Foo: ti vi
"#]], "#]],
); );
} }
@ -643,7 +643,7 @@ mod b {
"#, "#,
expect![[r#" expect![[r#"
crate crate
T: t v T: ti vi
a: t a: t
b: t b: t
@ -816,8 +816,8 @@ fn bar() {}
expect![[r#" expect![[r#"
crate crate
bar: v bar: v
baz: v baz: vi
foo: t foo: ti
"#]], "#]],
); );
} }
@ -836,7 +836,7 @@ use self::m::S::{self};
"#, "#,
expect![[r#" expect![[r#"
crate crate
S: t S: ti
m: t m: t
crate::m crate::m
@ -860,8 +860,8 @@ pub const settings: () = ();
"#, "#,
expect![[r#" expect![[r#"
crate crate
Settings: t v Settings: ti vi
settings: v settings: vi
"#]], "#]],
) )
} }
@ -890,8 +890,8 @@ pub struct Struct;
"#, "#,
expect![[r#" expect![[r#"
crate crate
Struct: t v Struct: ti vi
dep: t dep: te
"#]], "#]],
); );
} }
@ -917,13 +917,13 @@ use some_module::unknown_func;
crate crate
other_module: t other_module: t
some_module: t some_module: t
unknown_func: v unknown_func: vi
crate::other_module crate::other_module
some_submodule: t some_submodule: t
crate::other_module::some_submodule crate::other_module::some_submodule
unknown_func: v unknown_func: vi
crate::some_module crate::some_module
unknown_func: v unknown_func: v

View file

@ -24,7 +24,7 @@ pub struct Baz;
foo: t foo: t
crate::foo crate::foo
Baz: t v Baz: ti vi
Foo: t v Foo: t v
bar: t bar: t
@ -237,9 +237,9 @@ pub mod baz { pub struct Bar; }
"#, "#,
expect![[r#" expect![[r#"
crate crate
Bar: t v Bar: ti vi
bar: t bar: t
baz: t baz: ti
foo: t foo: t
crate::bar crate::bar
@ -276,9 +276,9 @@ pub mod baz { pub struct Bar; }
"#, "#,
expect![[r#" expect![[r#"
crate crate
Bar: t v Bar: ti vi
bar: t bar: t
baz: t baz: ti
foo: t foo: t
crate::bar crate::bar
@ -323,7 +323,7 @@ mod d {
X: t v X: t v
crate::b crate::b
foo: t foo: ti
crate::c crate::c
foo: t foo: t
@ -332,8 +332,8 @@ mod d {
Y: t v Y: t v
crate::d crate::d
Y: t v Y: ti vi
foo: t foo: ti
"#]], "#]],
); );
} }
@ -355,7 +355,7 @@ use event::Event;
"#, "#,
expect![[r#" expect![[r#"
crate crate
Event: t Event: ti
event: t event: t
crate::event crate::event

View file

@ -212,7 +212,7 @@ pub type Ty = ();
} }
for (_, res) in module_data.scope.resolutions() { for (_, res) in module_data.scope.resolutions() {
match res.values.or(res.types).unwrap().0 { match res.values.map(|(a, _, _)| a).or(res.types.map(|(a, _, _)| a)).unwrap() {
ModuleDefId::FunctionId(f) => _ = db.function_data(f), ModuleDefId::FunctionId(f) => _ = db.function_data(f),
ModuleDefId::AdtId(adt) => match adt { ModuleDefId::AdtId(adt) => match adt {
AdtId::StructId(it) => _ = db.struct_data(it), AdtId::StructId(it) => _ = db.struct_data(it),

View file

@ -203,8 +203,8 @@ macro_rules! bar {
expect![[r#" expect![[r#"
crate crate
Foo: t Foo: t
bar: m bar: mi
foo: m foo: mi
"#]], "#]],
); );
} }
@ -251,7 +251,7 @@ mod priv_mod {
Bar: t v Bar: t v
Foo: t v Foo: t v
bar: t bar: t
foo: t foo: te
crate::bar crate::bar
Baz: t v Baz: t v
@ -318,9 +318,9 @@ macro_rules! baz3 { () => { struct OkBaz3; } }
OkBaz1: t v OkBaz1: t v
OkBaz2: t v OkBaz2: t v
OkBaz3: t v OkBaz3: t v
all: t all: te
empty: t empty: te
multiple: t multiple: te
"#]], "#]],
); );
} }
@ -551,8 +551,8 @@ fn baz() {}
"#, "#,
expect![[r#" expect![[r#"
crate crate
bar: t m bar: ti mi
baz: t v m baz: ti v mi
foo: t m foo: t m
"#]], "#]],
); );
@ -583,7 +583,7 @@ mod m {
crate crate
Alias: t v Alias: t v
Direct: t v Direct: t v
foo: t foo: te
"#]], "#]],
); );
} }
@ -628,9 +628,9 @@ mod m {
m: t m: t
crate::m crate::m
alias1: m alias1: mi
alias2: m alias2: mi
alias3: m alias3: mi
not_found: _ not_found: _
"#]], "#]],
); );
@ -682,11 +682,11 @@ pub struct Baz;
"#, "#,
expect![[r#" expect![[r#"
crate crate
Bar: t v Bar: ti vi
Baz: t v Baz: ti vi
Foo: t v Foo: t v
FooSelf: t v FooSelf: ti vi
foo: t foo: te
m: t m: t
crate::m crate::m
@ -725,7 +725,7 @@ pub struct bar;
"#, "#,
expect![[r#" expect![[r#"
crate crate
bar: t v bar: ti vi
"#]], "#]],
); );
} }
@ -1340,7 +1340,7 @@ pub mod prelude {
crate crate
Ok: t v Ok: t v
bar: m bar: m
dep: t dep: te
foo: m foo: m
ok: v ok: v
"#]], "#]],
@ -1370,13 +1370,13 @@ macro_rules! mk_foo {
} }
"#, "#,
expect![[r#" expect![[r#"
crate crate
a: t a: t
lib: t lib: te
crate::a crate::a
Ok: t v Ok: t v
"#]], "#]],
); );
} }
@ -1427,8 +1427,8 @@ pub mod prelude {
expect![[r#" expect![[r#"
crate crate
Ok: t v Ok: t v
bar: m bar: mi
foo: m foo: mi
ok: v ok: v
"#]], "#]],
); );

View file

@ -80,18 +80,18 @@ pub trait Iterator;
prelude: t prelude: t
crate::iter crate::iter
Iterator: t Iterator: ti
traits: t traits: t
crate::iter::traits crate::iter::traits
Iterator: t Iterator: ti
iterator: t iterator: t
crate::iter::traits::iterator crate::iter::traits::iterator
Iterator: t Iterator: t
crate::prelude crate::prelude
Iterator: t Iterator: ti
"#]], "#]],
); );
} }
@ -109,7 +109,7 @@ pub struct Bar;
"#, "#,
expect![[r#" expect![[r#"
crate crate
Bar: t v Bar: ti vi
foo: t foo: t
crate::foo crate::foo
@ -139,7 +139,7 @@ pub struct Baz;
"#, "#,
expect![[r#" expect![[r#"
crate crate
Bar: t v Bar: ti vi
r#async: t r#async: t
crate::r#async crate::r#async
@ -176,8 +176,8 @@ pub struct Bar;
"#, "#,
expect![[r#" expect![[r#"
crate crate
Bar: t v Bar: ti vi
Foo: t v Foo: ti vi
r#async: t r#async: t
crate::r#async crate::r#async
@ -207,7 +207,7 @@ pub struct Bar;
"#, "#,
expect![[r#" expect![[r#"
crate crate
Bar: t v Bar: ti vi
foo: t foo: t
crate::foo crate::foo
@ -236,7 +236,7 @@ pub struct Baz;
foo: t foo: t
crate::foo crate::foo
Baz: t v Baz: ti vi
bar: t bar: t
crate::foo::bar crate::foo::bar
@ -265,7 +265,7 @@ pub struct Baz;
foo: t foo: t
crate::foo crate::foo
Baz: t v Baz: ti vi
bar: t bar: t
crate::foo::bar crate::foo::bar
@ -292,7 +292,7 @@ use super::Baz;
foo: t foo: t
crate::foo crate::foo
Baz: t v Baz: ti vi
"#]], "#]],
); );
} }
@ -626,7 +626,7 @@ pub struct Baz;
"#, "#,
expect![[r#" expect![[r#"
crate crate
Baz: t v Baz: ti vi
foo: t foo: t
crate::foo crate::foo
@ -660,7 +660,7 @@ pub struct Baz;
foo: t foo: t
crate::foo crate::foo
Baz: t v Baz: ti vi
bar: t bar: t
crate::foo::bar crate::foo::bar
@ -694,7 +694,7 @@ pub struct Baz;
foo: t foo: t
crate::foo crate::foo
Baz: t v Baz: ti vi
bar: t bar: t
crate::foo::bar crate::foo::bar
@ -728,7 +728,7 @@ pub struct Baz;
foo: t foo: t
crate::foo crate::foo
Baz: t v Baz: ti vi
bar: t bar: t
crate::foo::bar crate::foo::bar
@ -868,7 +868,7 @@ pub mod hash { pub trait Hash {} }
"#, "#,
expect![[r#" expect![[r#"
crate crate
Hash: t Hash: ti
core: t core: t
crate::core crate::core

View file

@ -14,10 +14,10 @@ pub use i32 as int;
expect![[r#" expect![[r#"
crate crate
foo: t foo: t
int: t int: ti
crate::foo crate::foo
int: t int: ti
"#]], "#]],
); );
} }

View file

@ -3,13 +3,24 @@
//! //!
//! `PerNs` (per namespace) captures this. //! `PerNs` (per namespace) captures this.
use crate::{item_scope::ItemInNs, visibility::Visibility, MacroId, ModuleDefId}; use crate::{
item_scope::{ImportId, ImportOrExternCrate, ItemInNs},
visibility::Visibility,
MacroId, ModuleDefId,
};
#[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)]
pub enum Namespace {
Types,
Values,
Macros,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct PerNs { pub struct PerNs {
pub types: Option<(ModuleDefId, Visibility)>, pub types: Option<(ModuleDefId, Visibility, Option<ImportOrExternCrate>)>,
pub values: Option<(ModuleDefId, Visibility)>, pub values: Option<(ModuleDefId, Visibility, Option<ImportId>)>,
pub macros: Option<(MacroId, Visibility)>, pub macros: Option<(MacroId, Visibility, Option<ImportId>)>,
} }
impl Default for PerNs { impl Default for PerNs {
@ -23,20 +34,29 @@ impl PerNs {
PerNs { types: None, values: None, macros: None } PerNs { types: None, values: None, macros: None }
} }
pub fn values(t: ModuleDefId, v: Visibility) -> PerNs { pub fn values(t: ModuleDefId, v: Visibility, i: Option<ImportId>) -> PerNs {
PerNs { types: None, values: Some((t, v)), macros: None } PerNs { types: None, values: Some((t, v, i)), macros: None }
} }
pub fn types(t: ModuleDefId, v: Visibility) -> PerNs { pub fn types(t: ModuleDefId, v: Visibility, i: Option<ImportOrExternCrate>) -> PerNs {
PerNs { types: Some((t, v)), values: None, macros: None } PerNs { types: Some((t, v, i)), values: None, macros: None }
} }
pub fn both(types: ModuleDefId, values: ModuleDefId, v: Visibility) -> PerNs { pub fn both(
PerNs { types: Some((types, v)), values: Some((values, v)), macros: None } types: ModuleDefId,
values: ModuleDefId,
v: Visibility,
i: Option<ImportOrExternCrate>,
) -> PerNs {
PerNs {
types: Some((types, v, i)),
values: Some((values, v, i.and_then(ImportOrExternCrate::into_import))),
macros: None,
}
} }
pub fn macros(macro_: MacroId, v: Visibility) -> PerNs { pub fn macros(macro_: MacroId, v: Visibility, i: Option<ImportId>) -> PerNs {
PerNs { types: None, values: None, macros: Some((macro_, v)) } PerNs { types: None, values: None, macros: Some((macro_, v, i)) }
} }
pub fn is_none(&self) -> bool { pub fn is_none(&self) -> bool {
@ -51,7 +71,7 @@ impl PerNs {
self.types.map(|it| it.0) self.types.map(|it| it.0)
} }
pub fn take_types_vis(self) -> Option<(ModuleDefId, Visibility)> { pub fn take_types_full(self) -> Option<(ModuleDefId, Visibility, Option<ImportOrExternCrate>)> {
self.types self.types
} }
@ -59,24 +79,32 @@ impl PerNs {
self.values.map(|it| it.0) self.values.map(|it| it.0)
} }
pub fn take_values_import(self) -> Option<(ModuleDefId, Option<ImportId>)> {
self.values.map(|it| (it.0, it.2))
}
pub fn take_macros(self) -> Option<MacroId> { pub fn take_macros(self) -> Option<MacroId> {
self.macros.map(|it| it.0) self.macros.map(|it| it.0)
} }
pub fn take_macros_import(self) -> Option<(MacroId, Option<ImportId>)> {
self.macros.map(|it| (it.0, it.2))
}
pub fn filter_visibility(self, mut f: impl FnMut(Visibility) -> bool) -> PerNs { pub fn filter_visibility(self, mut f: impl FnMut(Visibility) -> bool) -> PerNs {
let _p = profile::span("PerNs::filter_visibility"); let _p = profile::span("PerNs::filter_visibility");
PerNs { PerNs {
types: self.types.filter(|(_, v)| f(*v)), types: self.types.filter(|&(_, v, _)| f(v)),
values: self.values.filter(|(_, v)| f(*v)), values: self.values.filter(|&(_, v, _)| f(v)),
macros: self.macros.filter(|(_, v)| f(*v)), macros: self.macros.filter(|&(_, v, _)| f(v)),
} }
} }
pub fn with_visibility(self, vis: Visibility) -> PerNs { pub fn with_visibility(self, vis: Visibility) -> PerNs {
PerNs { PerNs {
types: self.types.map(|(it, _)| (it, vis)), types: self.types.map(|(it, _, c)| (it, vis, c)),
values: self.values.map(|(it, _)| (it, vis)), values: self.values.map(|(it, _, c)| (it, vis, c)),
macros: self.macros.map(|(it, _)| (it, vis)), macros: self.macros.map(|(it, _, import)| (it, vis, import)),
} }
} }
@ -96,12 +124,20 @@ impl PerNs {
} }
} }
pub fn iter_items(self) -> impl Iterator<Item = ItemInNs> { pub fn iter_items(self) -> impl Iterator<Item = (ItemInNs, Option<ImportOrExternCrate>)> {
let _p = profile::span("PerNs::iter_items"); let _p = profile::span("PerNs::iter_items");
self.types self.types
.map(|it| ItemInNs::Types(it.0)) .map(|it| (ItemInNs::Types(it.0), it.2))
.into_iter() .into_iter()
.chain(self.values.map(|it| ItemInNs::Values(it.0)).into_iter()) .chain(
.chain(self.macros.map(|it| ItemInNs::Macros(it.0)).into_iter()) self.values
.map(|it| (ItemInNs::Values(it.0), it.2.map(ImportOrExternCrate::Import)))
.into_iter(),
)
.chain(
self.macros
.map(|it| (ItemInNs::Macros(it.0), it.2.map(ImportOrExternCrate::Import)))
.into_iter(),
)
} }
} }

View file

@ -12,20 +12,21 @@ use triomphe::Arc;
use crate::{ use crate::{
body::scope::{ExprScopes, ScopeId}, body::scope::{ExprScopes, ScopeId},
builtin_type::BuiltinType, builtin_type::BuiltinType,
data::ExternCrateDeclData,
db::DefDatabase, db::DefDatabase,
generics::{GenericParams, TypeOrConstParamData}, generics::{GenericParams, TypeOrConstParamData},
hir::{BindingId, ExprId, LabelId}, hir::{BindingId, ExprId, LabelId},
item_scope::{BuiltinShadowMode, BUILTIN_SCOPE}, item_scope::{BuiltinShadowMode, ImportId, ImportOrExternCrate, BUILTIN_SCOPE},
lang_item::LangItemTarget, lang_item::LangItemTarget,
nameres::{DefMap, MacroSubNs}, nameres::{DefMap, MacroSubNs},
path::{ModPath, Path, PathKind}, path::{ModPath, Path, PathKind},
per_ns::PerNs, per_ns::PerNs,
visibility::{RawVisibility, Visibility}, visibility::{RawVisibility, Visibility},
AdtId, AssocItemId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId, AdtId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId,
EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule, ImplId,
HasModule, ImplId, ItemContainerId, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, MacroId, ItemContainerId, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, MacroId, MacroRulesId,
MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
TypeAliasId, TypeOrConstParamId, TypeOwnerId, TypeParamId, UseId, VariantId, TypeOrConstParamId, TypeOwnerId, TypeParamId, UseId, VariantId,
}; };
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -100,8 +101,8 @@ pub enum TypeNs {
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ResolveValueResult { pub enum ResolveValueResult {
ValueNs(ValueNs), ValueNs(ValueNs, Option<ImportId>),
Partial(TypeNs, usize), Partial(TypeNs, usize, Option<ImportOrExternCrate>),
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
@ -148,39 +149,11 @@ impl Resolver {
self.resolve_module_path(db, path, BuiltinShadowMode::Module) self.resolve_module_path(db, path, BuiltinShadowMode::Module)
} }
// FIXME: This shouldn't exist
pub fn resolve_module_path_in_trait_assoc_items(
&self,
db: &dyn DefDatabase,
path: &ModPath,
) -> Option<PerNs> {
let (item_map, module) = self.item_scope();
let (module_res, idx) =
item_map.resolve_path(db, module, path, BuiltinShadowMode::Module, None);
match module_res.take_types()? {
ModuleDefId::TraitId(it) => {
let idx = idx?;
let unresolved = &path.segments()[idx..];
let assoc = match unresolved {
[it] => it,
_ => return None,
};
let &(_, assoc) = db.trait_data(it).items.iter().find(|(n, _)| n == assoc)?;
Some(match assoc {
AssocItemId::FunctionId(it) => PerNs::values(it.into(), Visibility::Public),
AssocItemId::ConstId(it) => PerNs::values(it.into(), Visibility::Public),
AssocItemId::TypeAliasId(it) => PerNs::types(it.into(), Visibility::Public),
})
}
_ => None,
}
}
pub fn resolve_path_in_type_ns( pub fn resolve_path_in_type_ns(
&self, &self,
db: &dyn DefDatabase, db: &dyn DefDatabase,
path: &Path, path: &Path,
) -> Option<(TypeNs, Option<usize>)> { ) -> Option<(TypeNs, Option<usize>, Option<ImportOrExternCrate>)> {
let path = match path { let path = match path {
Path::Normal { mod_path, .. } => mod_path, Path::Normal { mod_path, .. } => mod_path,
Path::LangItem(l) => { Path::LangItem(l) => {
@ -197,6 +170,7 @@ impl Resolver {
| LangItemTarget::Static(_) => return None, | LangItemTarget::Static(_) => return None,
}, },
None, None,
None,
)) ))
} }
}; };
@ -213,17 +187,17 @@ impl Resolver {
Scope::ExprScope(_) => continue, Scope::ExprScope(_) => continue,
Scope::GenericParams { params, def } => { Scope::GenericParams { params, def } => {
if let Some(id) = params.find_type_by_name(first_name, *def) { if let Some(id) = params.find_type_by_name(first_name, *def) {
return Some((TypeNs::GenericParam(id), remaining_idx())); return Some((TypeNs::GenericParam(id), remaining_idx(), None));
} }
} }
&Scope::ImplDefScope(impl_) => { &Scope::ImplDefScope(impl_) => {
if first_name == &name![Self] { if first_name == &name![Self] {
return Some((TypeNs::SelfType(impl_), remaining_idx())); return Some((TypeNs::SelfType(impl_), remaining_idx(), None));
} }
} }
&Scope::AdtScope(adt) => { &Scope::AdtScope(adt) => {
if first_name == &name![Self] { if first_name == &name![Self] {
return Some((TypeNs::AdtSelfType(adt), remaining_idx())); return Some((TypeNs::AdtSelfType(adt), remaining_idx(), None));
} }
} }
Scope::BlockScope(m) => { Scope::BlockScope(m) => {
@ -236,12 +210,24 @@ impl Resolver {
self.module_scope.resolve_path_in_type_ns(db, path) self.module_scope.resolve_path_in_type_ns(db, path)
} }
pub fn resolve_path_in_type_ns_fully_with_imports(
&self,
db: &dyn DefDatabase,
path: &Path,
) -> Option<(TypeNs, Option<ImportOrExternCrate>)> {
let (res, unresolved, imp) = self.resolve_path_in_type_ns(db, path)?;
if unresolved.is_some() {
return None;
}
Some((res, imp))
}
pub fn resolve_path_in_type_ns_fully( pub fn resolve_path_in_type_ns_fully(
&self, &self,
db: &dyn DefDatabase, db: &dyn DefDatabase,
path: &Path, path: &Path,
) -> Option<TypeNs> { ) -> Option<TypeNs> {
let (res, unresolved) = self.resolve_path_in_type_ns(db, path)?; let (res, unresolved, _) = self.resolve_path_in_type_ns(db, path)?;
if unresolved.is_some() { if unresolved.is_some() {
return None; return None;
} }
@ -263,7 +249,6 @@ impl Resolver {
RawVisibility::Public => Some(Visibility::Public), RawVisibility::Public => Some(Visibility::Public),
} }
} }
pub fn resolve_path_in_value_ns( pub fn resolve_path_in_value_ns(
&self, &self,
db: &dyn DefDatabase, db: &dyn DefDatabase,
@ -272,17 +257,20 @@ impl Resolver {
let path = match path { let path = match path {
Path::Normal { mod_path, .. } => mod_path, Path::Normal { mod_path, .. } => mod_path,
Path::LangItem(l) => { Path::LangItem(l) => {
return Some(ResolveValueResult::ValueNs(match *l { return Some(ResolveValueResult::ValueNs(
LangItemTarget::Function(it) => ValueNs::FunctionId(it), match *l {
LangItemTarget::Static(it) => ValueNs::StaticId(it), LangItemTarget::Function(it) => ValueNs::FunctionId(it),
LangItemTarget::Struct(it) => ValueNs::StructId(it), LangItemTarget::Static(it) => ValueNs::StaticId(it),
LangItemTarget::EnumVariant(it) => ValueNs::EnumVariantId(it), LangItemTarget::Struct(it) => ValueNs::StructId(it),
LangItemTarget::Union(_) LangItemTarget::EnumVariant(it) => ValueNs::EnumVariantId(it),
| LangItemTarget::ImplDef(_) LangItemTarget::Union(_)
| LangItemTarget::TypeAlias(_) | LangItemTarget::ImplDef(_)
| LangItemTarget::Trait(_) | LangItemTarget::TypeAlias(_)
| LangItemTarget::EnumId(_) => return None, | LangItemTarget::Trait(_)
})) | LangItemTarget::EnumId(_) => return None,
},
None,
))
} }
}; };
let n_segments = path.segments().len(); let n_segments = path.segments().len();
@ -304,20 +292,24 @@ impl Resolver {
.find(|entry| entry.name() == first_name); .find(|entry| entry.name() == first_name);
if let Some(e) = entry { if let Some(e) = entry {
return Some(ResolveValueResult::ValueNs(ValueNs::LocalBinding( return Some(ResolveValueResult::ValueNs(
e.binding(), ValueNs::LocalBinding(e.binding()),
))); None,
));
} }
} }
Scope::GenericParams { params, def } => { Scope::GenericParams { params, def } => {
if let Some(id) = params.find_const_by_name(first_name, *def) { if let Some(id) = params.find_const_by_name(first_name, *def) {
let val = ValueNs::GenericParam(id); let val = ValueNs::GenericParam(id);
return Some(ResolveValueResult::ValueNs(val)); return Some(ResolveValueResult::ValueNs(val, None));
} }
} }
&Scope::ImplDefScope(impl_) => { &Scope::ImplDefScope(impl_) => {
if first_name == &name![Self] { if first_name == &name![Self] {
return Some(ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_))); return Some(ResolveValueResult::ValueNs(
ValueNs::ImplSelf(impl_),
None,
));
} }
} }
// bare `Self` doesn't work in the value namespace in a struct/enum definition // bare `Self` doesn't work in the value namespace in a struct/enum definition
@ -336,18 +328,22 @@ impl Resolver {
Scope::GenericParams { params, def } => { Scope::GenericParams { params, def } => {
if let Some(id) = params.find_type_by_name(first_name, *def) { if let Some(id) = params.find_type_by_name(first_name, *def) {
let ty = TypeNs::GenericParam(id); let ty = TypeNs::GenericParam(id);
return Some(ResolveValueResult::Partial(ty, 1)); return Some(ResolveValueResult::Partial(ty, 1, None));
} }
} }
&Scope::ImplDefScope(impl_) => { &Scope::ImplDefScope(impl_) => {
if first_name == &name![Self] { if first_name == &name![Self] {
return Some(ResolveValueResult::Partial(TypeNs::SelfType(impl_), 1)); return Some(ResolveValueResult::Partial(
TypeNs::SelfType(impl_),
1,
None,
));
} }
} }
Scope::AdtScope(adt) => { Scope::AdtScope(adt) => {
if first_name == &name![Self] { if first_name == &name![Self] {
let ty = TypeNs::AdtSelfType(*adt); let ty = TypeNs::AdtSelfType(*adt);
return Some(ResolveValueResult::Partial(ty, 1)); return Some(ResolveValueResult::Partial(ty, 1, None));
} }
} }
Scope::BlockScope(m) => { Scope::BlockScope(m) => {
@ -368,7 +364,7 @@ impl Resolver {
// `use core::u16;`. // `use core::u16;`.
if path.kind == PathKind::Plain && n_segments > 1 { if path.kind == PathKind::Plain && n_segments > 1 {
if let Some(builtin) = BuiltinType::by_name(first_name) { if let Some(builtin) = BuiltinType::by_name(first_name) {
return Some(ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1)); return Some(ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1, None));
} }
} }
@ -381,7 +377,7 @@ impl Resolver {
path: &Path, path: &Path,
) -> Option<ValueNs> { ) -> Option<ValueNs> {
match self.resolve_path_in_value_ns(db, path)? { match self.resolve_path_in_value_ns(db, path)? {
ResolveValueResult::ValueNs(it) => Some(it), ResolveValueResult::ValueNs(it, _) => Some(it),
ResolveValueResult::Partial(..) => None, ResolveValueResult::Partial(..) => None,
} }
} }
@ -391,12 +387,12 @@ impl Resolver {
db: &dyn DefDatabase, db: &dyn DefDatabase,
path: &ModPath, path: &ModPath,
expected_macro_kind: Option<MacroSubNs>, expected_macro_kind: Option<MacroSubNs>,
) -> Option<MacroId> { ) -> Option<(MacroId, Option<ImportId>)> {
let (item_map, module) = self.item_scope(); let (item_map, module) = self.item_scope();
item_map item_map
.resolve_path(db, module, path, BuiltinShadowMode::Other, expected_macro_kind) .resolve_path(db, module, path, BuiltinShadowMode::Other, expected_macro_kind)
.0 .0
.take_macros() .take_macros_import()
} }
/// Returns a set of names available in the current scope. /// Returns a set of names available in the current scope.
@ -456,21 +452,22 @@ impl Resolver {
def_map[module_id].scope.entries().for_each(|(name, def)| { def_map[module_id].scope.entries().for_each(|(name, def)| {
res.add_per_ns(name, def); res.add_per_ns(name, def);
}); });
def_map[module_id].scope.legacy_macros().for_each(|(name, macs)| { def_map[module_id].scope.legacy_macros().for_each(|(name, macs)| {
macs.iter().for_each(|&mac| { macs.iter().for_each(|&mac| {
res.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac))); res.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac)));
}) })
}); });
def_map.macro_use_prelude().for_each(|(name, def)| { def_map.macro_use_prelude().for_each(|(name, (def, _extern_crate))| {
res.add(name, ScopeDef::ModuleDef(def.into())); res.add(name, ScopeDef::ModuleDef(def.into()));
}); });
def_map.extern_prelude().for_each(|(name, def)| { def_map.extern_prelude().for_each(|(name, (def, _extern_crate))| {
res.add(name, ScopeDef::ModuleDef(ModuleDefId::ModuleId(def))); res.add(name, ScopeDef::ModuleDef(ModuleDefId::ModuleId(def.into())));
}); });
BUILTIN_SCOPE.iter().for_each(|(name, &def)| { BUILTIN_SCOPE.iter().for_each(|(name, &def)| {
res.add_per_ns(name, def); res.add_per_ns(name, def);
}); });
if let Some(prelude) = def_map.prelude() { if let Some((prelude, _use)) = def_map.prelude() {
let prelude_def_map = prelude.def_map(db); let prelude_def_map = prelude.def_map(db);
for (name, def) in prelude_def_map[prelude.local_id].scope.entries() { for (name, def) in prelude_def_map[prelude.local_id].scope.entries() {
res.add_per_ns(name, def) res.add_per_ns(name, def)
@ -479,6 +476,23 @@ impl Resolver {
res.map res.map
} }
pub fn extern_crate_decls_in_scope<'a>(
&'a self,
db: &'a dyn DefDatabase,
) -> impl Iterator<Item = Name> + 'a {
self.module_scope.def_map[self.module_scope.module_id]
.scope
.extern_crate_decls()
.map(|id| ExternCrateDeclData::extern_crate_decl_data_query(db, id).name.clone())
}
pub fn extern_crates_in_scope<'a>(&'a self) -> impl Iterator<Item = (Name, ModuleId)> + 'a {
self.module_scope
.def_map
.extern_prelude()
.map(|(name, module_id)| (name.clone(), module_id.0.into()))
}
pub fn traits_in_scope(&self, db: &dyn DefDatabase) -> FxHashSet<TraitId> { pub fn traits_in_scope(&self, db: &dyn DefDatabase) -> FxHashSet<TraitId> {
// FIXME(trait_alias): Trait alias brings aliased traits in scope! Note that supertraits of // FIXME(trait_alias): Trait alias brings aliased traits in scope! Note that supertraits of
// aliased traits are NOT brought in scope (unless also aliased). // aliased traits are NOT brought in scope (unless also aliased).
@ -501,7 +515,7 @@ impl Resolver {
} }
// Fill in the prelude traits // Fill in the prelude traits
if let Some(prelude) = self.module_scope.def_map.prelude() { if let Some((prelude, _use)) = self.module_scope.def_map.prelude() {
let prelude_def_map = prelude.def_map(db); let prelude_def_map = prelude.def_map(db);
traits.extend(prelude_def_map[prelude.local_id].scope.traits()); traits.extend(prelude_def_map[prelude.local_id].scope.traits());
} }
@ -804,11 +818,12 @@ impl ModuleItemMap {
self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other); self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other);
match idx { match idx {
None => { None => {
let value = to_value_ns(module_def)?; let (value, import) = to_value_ns(module_def)?;
Some(ResolveValueResult::ValueNs(value)) Some(ResolveValueResult::ValueNs(value, import))
} }
Some(idx) => { Some(idx) => {
let ty = match module_def.take_types()? { let (def, _, import) = module_def.take_types_full()?;
let ty = match def {
ModuleDefId::AdtId(it) => TypeNs::AdtId(it), ModuleDefId::AdtId(it) => TypeNs::AdtId(it),
ModuleDefId::TraitId(it) => TypeNs::TraitId(it), ModuleDefId::TraitId(it) => TypeNs::TraitId(it),
ModuleDefId::TraitAliasId(it) => TypeNs::TraitAliasId(it), ModuleDefId::TraitAliasId(it) => TypeNs::TraitAliasId(it),
@ -822,7 +837,7 @@ impl ModuleItemMap {
| ModuleDefId::MacroId(_) | ModuleDefId::MacroId(_)
| ModuleDefId::StaticId(_) => return None, | ModuleDefId::StaticId(_) => return None,
}; };
Some(ResolveValueResult::Partial(ty, idx)) Some(ResolveValueResult::Partial(ty, idx, import))
} }
} }
} }
@ -831,16 +846,17 @@ impl ModuleItemMap {
&self, &self,
db: &dyn DefDatabase, db: &dyn DefDatabase,
path: &ModPath, path: &ModPath,
) -> Option<(TypeNs, Option<usize>)> { ) -> Option<(TypeNs, Option<usize>, Option<ImportOrExternCrate>)> {
let (module_def, idx) = let (module_def, idx) =
self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other); self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other);
let res = to_type_ns(module_def)?; let (res, import) = to_type_ns(module_def)?;
Some((res, idx)) Some((res, idx, import))
} }
} }
fn to_value_ns(per_ns: PerNs) -> Option<ValueNs> { fn to_value_ns(per_ns: PerNs) -> Option<(ValueNs, Option<ImportId>)> {
let res = match per_ns.take_values()? { let (def, import) = per_ns.take_values_import()?;
let res = match def {
ModuleDefId::FunctionId(it) => ValueNs::FunctionId(it), ModuleDefId::FunctionId(it) => ValueNs::FunctionId(it),
ModuleDefId::AdtId(AdtId::StructId(it)) => ValueNs::StructId(it), ModuleDefId::AdtId(AdtId::StructId(it)) => ValueNs::StructId(it),
ModuleDefId::EnumVariantId(it) => ValueNs::EnumVariantId(it), ModuleDefId::EnumVariantId(it) => ValueNs::EnumVariantId(it),
@ -855,11 +871,12 @@ fn to_value_ns(per_ns: PerNs) -> Option<ValueNs> {
| ModuleDefId::MacroId(_) | ModuleDefId::MacroId(_)
| ModuleDefId::ModuleId(_) => return None, | ModuleDefId::ModuleId(_) => return None,
}; };
Some(res) Some((res, import))
} }
fn to_type_ns(per_ns: PerNs) -> Option<TypeNs> { fn to_type_ns(per_ns: PerNs) -> Option<(TypeNs, Option<ImportOrExternCrate>)> {
let res = match per_ns.take_types()? { let (def, _, import) = per_ns.take_types_full()?;
let res = match def {
ModuleDefId::AdtId(it) => TypeNs::AdtId(it), ModuleDefId::AdtId(it) => TypeNs::AdtId(it),
ModuleDefId::EnumVariantId(it) => TypeNs::EnumVariantId(it), ModuleDefId::EnumVariantId(it) => TypeNs::EnumVariantId(it),
@ -875,7 +892,7 @@ fn to_type_ns(per_ns: PerNs) -> Option<TypeNs> {
| ModuleDefId::StaticId(_) | ModuleDefId::StaticId(_)
| ModuleDefId::ModuleId(_) => return None, | ModuleDefId::ModuleId(_) => return None,
}; };
Some(res) Some((res, import))
} }
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<rustc_hash::FxHasher>>; type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<rustc_hash::FxHasher>>;
@ -892,13 +909,13 @@ impl ScopeNames {
} }
} }
fn add_per_ns(&mut self, name: &Name, def: PerNs) { fn add_per_ns(&mut self, name: &Name, def: PerNs) {
if let &Some((ty, _)) = &def.types { if let &Some((ty, _, _)) = &def.types {
self.add(name, ScopeDef::ModuleDef(ty)) self.add(name, ScopeDef::ModuleDef(ty))
} }
if let &Some((def, _)) = &def.values { if let &Some((def, _, _)) = &def.values {
self.add(name, ScopeDef::ModuleDef(def)) self.add(name, ScopeDef::ModuleDef(def))
} }
if let &Some((mac, _)) = &def.macros { if let &Some((mac, _, _)) = &def.macros {
self.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac))) self.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac)))
} }
if def.is_none() { if def.is_none() {

View file

@ -5,8 +5,8 @@ use la_arena::ArenaMap;
use syntax::ast; use syntax::ast;
use crate::{ use crate::{
db::DefDatabase, item_tree::ItemTreeNode, AssocItemLoc, ItemLoc, Macro2Loc, MacroRulesLoc, db::DefDatabase, item_tree::ItemTreeNode, AssocItemLoc, ItemLoc, Lookup, Macro2Loc,
ProcMacroLoc, MacroRulesLoc, ProcMacroLoc, UseId,
}; };
pub trait HasSource { pub trait HasSource {
@ -83,3 +83,18 @@ pub trait HasChildSource<ChildId> {
type Value; type Value;
fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<ChildId, Self::Value>>; fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<ChildId, Self::Value>>;
} }
impl HasChildSource<la_arena::Idx<ast::UseTree>> for UseId {
type Value = ast::UseTree;
fn child_source(
&self,
db: &dyn DefDatabase,
) -> InFile<ArenaMap<la_arena::Idx<ast::UseTree>, Self::Value>> {
let loc = &self.lookup(db);
let use_ = &loc.id.item_tree(db)[loc.id.value];
InFile::new(
loc.id.file_id(),
use_.use_tree_source_map(db, loc.id.file_id()).into_iter().collect(),
)
}
}

View file

@ -342,14 +342,7 @@ fn inner_attributes(
ast::Impl(it) => it.assoc_item_list()?.syntax().clone(), ast::Impl(it) => it.assoc_item_list()?.syntax().clone(),
ast::Module(it) => it.item_list()?.syntax().clone(), ast::Module(it) => it.item_list()?.syntax().clone(),
ast::BlockExpr(it) => { ast::BlockExpr(it) => {
use syntax::SyntaxKind::{BLOCK_EXPR , EXPR_STMT}; if !it.may_carry_attributes() {
// Block expressions accept outer and inner attributes, but only when they are the outer
// expression of an expression statement or the final expression of another block expression.
let may_carry_attributes = matches!(
it.syntax().parent().map(|it| it.kind()),
Some(BLOCK_EXPR | EXPR_STMT)
);
if !may_carry_attributes {
return None return None
} }
syntax.clone() syntax.clone()

View file

@ -37,7 +37,7 @@ use either::Either;
use syntax::{ use syntax::{
algo::{self, skip_trivia_token}, algo::{self, skip_trivia_token},
ast::{self, AstNode, HasDocComments}, ast::{self, AstNode, HasDocComments},
AstPtr, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, AstPtr, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
}; };
use crate::{ use crate::{
@ -544,7 +544,7 @@ impl MacroCallKind {
}; };
let range = match kind { let range = match kind {
MacroCallKind::FnLike { ast_id, .. } => ast_id.to_node(db).syntax().text_range(), MacroCallKind::FnLike { ast_id, .. } => ast_id.to_ptr(db).text_range(),
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => { MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: should be the range of the macro name, not the whole derive // FIXME: should be the range of the macro name, not the whole derive
// FIXME: handle `cfg_attr` // FIXME: handle `cfg_attr`
@ -642,6 +642,8 @@ impl ExpansionInfo {
db: &dyn db::ExpandDatabase, db: &dyn db::ExpandDatabase,
item: Option<ast::Item>, item: Option<ast::Item>,
token: InFile<&SyntaxToken>, token: InFile<&SyntaxToken>,
// FIXME: use this for range mapping, so that we can resolve inline format args
_relative_token_offset: Option<TextSize>,
) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> { ) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
assert_eq!(token.file_id, self.arg.file_id); assert_eq!(token.file_id, self.arg.file_id);
let token_id_in_attr_input = if let Some(item) = item { let token_id_in_attr_input = if let Some(item) = item {
@ -840,9 +842,6 @@ impl<N: AstIdNode> AstId<N> {
pub type ErasedAstId = InFile<ErasedFileAstId>; pub type ErasedAstId = InFile<ErasedFileAstId>;
impl ErasedAstId { impl ErasedAstId {
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
}
pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr { pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr {
db.ast_id_map(self.file_id).get_raw(self.value) db.ast_id_map(self.file_id).get_raw(self.value)
} }
@ -1054,16 +1053,6 @@ impl InFile<SyntaxToken> {
} }
} }
} }
pub fn ancestors_with_macros(
self,
db: &dyn db::ExpandDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
self.value.parent().into_iter().flat_map({
let file_id = self.file_id;
move |parent| InFile::new(file_id, &parent).ancestors_with_macros(db)
})
}
} }
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]

View file

@ -17,7 +17,8 @@ use smallvec::SmallVec;
use crate::{ use crate::{
consteval::unknown_const_as_generic, db::HirDatabase, infer::unify::InferenceTable, primitive, consteval::unknown_const_as_generic, db::HirDatabase, infer::unify::InferenceTable, primitive,
to_assoc_type_id, to_chalk_trait_id, utils::generics, Binders, BoundVar, CallableSig, to_assoc_type_id, to_chalk_trait_id, utils::generics, Binders, BoundVar, CallableSig,
GenericArg, Interner, ProjectionTy, Substitution, TraitRef, Ty, TyDefId, TyExt, TyKind, GenericArg, GenericArgData, Interner, ProjectionTy, Substitution, TraitRef, Ty, TyDefId, TyExt,
TyKind,
}; };
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
@ -79,9 +80,9 @@ impl<D> TyBuilder<D> {
let expected_kind = &self.param_kinds[self.vec.len()]; let expected_kind = &self.param_kinds[self.vec.len()];
let arg_kind = match arg.data(Interner) { let arg_kind = match arg.data(Interner) {
chalk_ir::GenericArgData::Ty(_) => ParamKind::Type, GenericArgData::Ty(_) => ParamKind::Type,
chalk_ir::GenericArgData::Lifetime(_) => panic!("Got lifetime in TyBuilder::push"), GenericArgData::Lifetime(_) => panic!("Got lifetime in TyBuilder::push"),
chalk_ir::GenericArgData::Const(c) => { GenericArgData::Const(c) => {
let c = c.data(Interner); let c = c.data(Interner);
ParamKind::Const(c.ty.clone()) ParamKind::Const(c.ty.clone())
} }
@ -139,8 +140,8 @@ impl<D> TyBuilder<D> {
fn assert_match_kind(&self, a: &chalk_ir::GenericArg<Interner>, e: &ParamKind) { fn assert_match_kind(&self, a: &chalk_ir::GenericArg<Interner>, e: &ParamKind) {
match (a.data(Interner), e) { match (a.data(Interner), e) {
(chalk_ir::GenericArgData::Ty(_), ParamKind::Type) (GenericArgData::Ty(_), ParamKind::Type)
| (chalk_ir::GenericArgData::Const(_), ParamKind::Const(_)) => (), | (GenericArgData::Const(_), ParamKind::Const(_)) => (),
_ => panic!("Mismatched kinds: {a:?}, {:?}, {:?}", self.vec, self.param_kinds), _ => panic!("Mismatched kinds: {a:?}, {:?}, {:?}", self.vec, self.param_kinds),
} }
} }

View file

@ -1,7 +1,7 @@
//! Constant evaluation details //! Constant evaluation details
use base_db::CrateId; use base_db::CrateId;
use chalk_ir::{BoundVar, DebruijnIndex, GenericArgData}; use chalk_ir::{cast::Cast, BoundVar, DebruijnIndex};
use hir_def::{ use hir_def::{
hir::Expr, hir::Expr,
path::Path, path::Path,
@ -120,7 +120,7 @@ pub fn unknown_const(ty: Ty) -> Const {
} }
pub fn unknown_const_as_generic(ty: Ty) -> GenericArg { pub fn unknown_const_as_generic(ty: Ty) -> GenericArg {
GenericArgData::Const(unknown_const(ty)).intern(Interner) unknown_const(ty).cast(Interner)
} }
/// Interns a constant scalar with the given type /// Interns a constant scalar with the given type

View file

@ -1203,6 +1203,27 @@ fn destructing_assignment() {
"#, "#,
5, 5,
); );
check_number(
r#"
const GOAL: u8 = {
let (mut a, mut b) = (2, 5);
(a, b) = (b, a);
a * 10 + b
};
"#,
52,
);
check_number(
r#"
struct Point { x: i32, y: i32 }
const GOAL: i32 = {
let mut p = Point { x: 5, y: 6 };
(p.x, _) = (p.y, p.x);
p.x * 10 + p.y
};
"#,
66,
);
} }
#[test] #[test]
@ -1432,6 +1453,30 @@ fn from_trait() {
); );
} }
#[test]
fn closure_clone() {
check_number(
r#"
//- minicore: clone, fn
struct S(u8);
impl Clone for S(u8) {
fn clone(&self) -> S {
S(self.0 + 5)
}
}
const GOAL: u8 = {
let s = S(3);
let cl = move || s;
let cl = cl.clone();
cl().0
}
"#,
8,
);
}
#[test] #[test]
fn builtin_derive_macro() { fn builtin_derive_macro() {
check_number( check_number(
@ -2396,14 +2441,14 @@ fn const_loop() {
fn const_transfer_memory() { fn const_transfer_memory() {
check_number( check_number(
r#" r#"
//- minicore: slice, index, coerce_unsized //- minicore: slice, index, coerce_unsized, option
const A1: &i32 = &1; const A1: &i32 = &1;
const A2: &i32 = &10; const A2: &i32 = &10;
const A3: [&i32; 3] = [&1, &2, &100]; const A3: [&i32; 3] = [&1, &2, &100];
const A4: (i32, &i32) = (1, &1000); const A4: (i32, &i32, Option<&i32>) = (1, &1000, Some(&10000));
const GOAL: i32 = *A1 + *A2 + *A3[2] + *A4.1; const GOAL: i32 = *A1 + *A2 + *A3[2] + *A4.1 + *A4.2.unwrap_or(&5);
"#, "#,
1111, 11111,
); );
} }

View file

@ -75,7 +75,7 @@ fn walk_unsafe(
Expr::Path(path) => { Expr::Path(path) => {
let resolver = resolver_for_expr(db.upcast(), def, current); let resolver = resolver_for_expr(db.upcast(), def, current);
let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path); let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path);
if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id))) = value_or_partial { if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id), _)) = value_or_partial {
if db.static_data(id).mutable { if db.static_data(id).mutable {
unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block }); unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
} }

View file

@ -1809,6 +1809,25 @@ impl HirDisplay for Path {
} }
} }
// Convert trait's `Self` bound back to the surface syntax. Note there is no associated
// trait, so there can only be one path segment that `has_self_type`. The `Self` type
// itself can contain further qualified path through, which will be handled by recursive
// `hir_fmt`s.
//
// `trait_mod::Trait<Self = type_mod::Type, Args>::Assoc`
// =>
// `<type_mod::Type as trait_mod::Trait<Args>>::Assoc`
let trait_self_ty = self.segments().iter().find_map(|seg| {
let generic_args = seg.args_and_bindings?;
generic_args.has_self_type.then(|| &generic_args.args[0])
});
if let Some(ty) = trait_self_ty {
write!(f, "<")?;
ty.hir_fmt(f)?;
write!(f, " as ")?;
// Now format the path of the trait...
}
for (seg_idx, segment) in self.segments().iter().enumerate() { for (seg_idx, segment) in self.segments().iter().enumerate() {
if !matches!(self.kind(), PathKind::Plain) || seg_idx > 0 { if !matches!(self.kind(), PathKind::Plain) || seg_idx > 0 {
write!(f, "::")?; write!(f, "::")?;
@ -1840,15 +1859,12 @@ impl HirDisplay for Path {
return Ok(()); return Ok(());
} }
write!(f, "<")?;
let mut first = true; let mut first = true;
for arg in generic_args.args.iter() { // Skip the `Self` bound if exists. It's handled outside the loop.
for arg in &generic_args.args[generic_args.has_self_type as usize..] {
if first { if first {
first = false; first = false;
if generic_args.has_self_type { write!(f, "<")?;
// FIXME: Convert to `<Ty as Trait>` form.
write!(f, "Self = ")?;
}
} else { } else {
write!(f, ", ")?; write!(f, ", ")?;
} }
@ -1857,6 +1873,7 @@ impl HirDisplay for Path {
for binding in generic_args.bindings.iter() { for binding in generic_args.bindings.iter() {
if first { if first {
first = false; first = false;
write!(f, "<")?;
} else { } else {
write!(f, ", ")?; write!(f, ", ")?;
} }
@ -1872,9 +1889,20 @@ impl HirDisplay for Path {
} }
} }
} }
write!(f, ">")?;
// There may be no generic arguments to print, in case of a trait having only a
// single `Self` bound which is converted to `<Ty as Trait>::Assoc`.
if !first {
write!(f, ">")?;
}
// Current position: `<Ty as Trait<Args>|`
if generic_args.has_self_type {
write!(f, ">")?;
}
} }
} }
Ok(()) Ok(())
} }
} }

View file

@ -1017,7 +1017,7 @@ impl<'a> InferenceContext<'a> {
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into()); let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into());
let (resolution, unresolved) = if value_ns { let (resolution, unresolved) = if value_ns {
match self.resolver.resolve_path_in_value_ns(self.db.upcast(), path) { match self.resolver.resolve_path_in_value_ns(self.db.upcast(), path) {
Some(ResolveValueResult::ValueNs(value)) => match value { Some(ResolveValueResult::ValueNs(value, _)) => match value {
ValueNs::EnumVariantId(var) => { ValueNs::EnumVariantId(var) => {
let substs = ctx.substs_from_path(path, var.into(), true); let substs = ctx.substs_from_path(path, var.into(), true);
let ty = self.db.ty(var.parent.into()); let ty = self.db.ty(var.parent.into());
@ -1033,12 +1033,14 @@ impl<'a> InferenceContext<'a> {
ValueNs::ImplSelf(impl_id) => (TypeNs::SelfType(impl_id), None), ValueNs::ImplSelf(impl_id) => (TypeNs::SelfType(impl_id), None),
_ => return (self.err_ty(), None), _ => return (self.err_ty(), None),
}, },
Some(ResolveValueResult::Partial(typens, unresolved)) => (typens, Some(unresolved)), Some(ResolveValueResult::Partial(typens, unresolved, _)) => {
(typens, Some(unresolved))
}
None => return (self.err_ty(), None), None => return (self.err_ty(), None),
} }
} else { } else {
match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path) { match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path) {
Some(it) => it, Some((it, idx, _)) => (it, idx),
None => return (self.err_ty(), None), None => return (self.err_ty(), None),
} }
}; };

View file

@ -322,7 +322,7 @@ impl InferenceContext<'_> {
Expr::Path(p) => { Expr::Path(p) => {
let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr); let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
if let Some(r) = resolver.resolve_path_in_value_ns(self.db.upcast(), p) { if let Some(r) = resolver.resolve_path_in_value_ns(self.db.upcast(), p) {
if let ResolveValueResult::ValueNs(v) = r { if let ResolveValueResult::ValueNs(v, _) = r {
if let ValueNs::LocalBinding(b) = v { if let ValueNs::LocalBinding(b) = v {
return Some(HirPlace { local: b, projections: vec![] }); return Some(HirPlace { local: b, projections: vec![] });
} }

View file

@ -5,9 +5,7 @@ use std::{
mem, mem,
}; };
use chalk_ir::{ use chalk_ir::{cast::Cast, fold::Shift, DebruijnIndex, Mutability, TyVariableKind};
cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyVariableKind,
};
use hir_def::{ use hir_def::{
generics::TypeOrConstParamData, generics::TypeOrConstParamData,
hir::{ hir::{
@ -750,7 +748,7 @@ impl InferenceContext<'_> {
self.resolve_associated_type_with_params( self.resolve_associated_type_with_params(
self_ty, self_ty,
self.resolve_ops_index_output(), self.resolve_ops_index_output(),
&[GenericArgData::Ty(index_ty).intern(Interner)], &[index_ty.cast(Interner)],
) )
} else { } else {
self.err_ty() self.err_ty()
@ -1721,16 +1719,13 @@ impl InferenceContext<'_> {
for (id, data) in def_generics.iter().skip(substs.len()) { for (id, data) in def_generics.iter().skip(substs.len()) {
match data { match data {
TypeOrConstParamData::TypeParamData(_) => { TypeOrConstParamData::TypeParamData(_) => {
substs.push(GenericArgData::Ty(self.table.new_type_var()).intern(Interner)) substs.push(self.table.new_type_var().cast(Interner))
}
TypeOrConstParamData::ConstParamData(_) => {
substs.push(
GenericArgData::Const(self.table.new_const_var(
self.db.const_param_ty(ConstParamId::from_unchecked(id)),
))
.intern(Interner),
)
} }
TypeOrConstParamData::ConstParamData(_) => substs.push(
self.table
.new_const_var(self.db.const_param_ty(ConstParamId::from_unchecked(id)))
.cast(Interner),
),
} }
} }
assert_eq!(substs.len(), total_len); assert_eq!(substs.len(), total_len);

View file

@ -61,8 +61,8 @@ impl InferenceContext<'_> {
self.resolver.resolve_path_in_value_ns(self.db.upcast(), path)?; self.resolver.resolve_path_in_value_ns(self.db.upcast(), path)?;
match value_or_partial { match value_or_partial {
ResolveValueResult::ValueNs(it) => (it, None), ResolveValueResult::ValueNs(it, _) => (it, None),
ResolveValueResult::Partial(def, remaining_index) => self ResolveValueResult::Partial(def, remaining_index, _) => self
.resolve_assoc_item(def, path, remaining_index, id) .resolve_assoc_item(def, path, remaining_index, id)
.map(|(it, substs)| (it, Some(substs)))?, .map(|(it, substs)| (it, Some(substs)))?,
} }

View file

@ -10,7 +10,6 @@ use chalk_solve::infer::ParameterEnaVariableExt;
use either::Either; use either::Either;
use ena::unify::UnifyKey; use ena::unify::UnifyKey;
use hir_expand::name; use hir_expand::name;
use stdx::never;
use triomphe::Arc; use triomphe::Arc;
use super::{InferOk, InferResult, InferenceContext, TypeError}; use super::{InferOk, InferResult, InferenceContext, TypeError};
@ -92,15 +91,10 @@ pub(crate) fn unify(
let vars = Substitution::from_iter( let vars = Substitution::from_iter(
Interner, Interner,
tys.binders.iter(Interner).map(|it| match &it.kind { tys.binders.iter(Interner).map(|it| match &it.kind {
chalk_ir::VariableKind::Ty(_) => { chalk_ir::VariableKind::Ty(_) => table.new_type_var().cast(Interner),
GenericArgData::Ty(table.new_type_var()).intern(Interner) // FIXME: maybe wrong?
} chalk_ir::VariableKind::Lifetime => table.new_type_var().cast(Interner),
chalk_ir::VariableKind::Lifetime => { chalk_ir::VariableKind::Const(ty) => table.new_const_var(ty.clone()).cast(Interner),
GenericArgData::Ty(table.new_type_var()).intern(Interner)
} // FIXME: maybe wrong?
chalk_ir::VariableKind::Const(ty) => {
GenericArgData::Const(table.new_const_var(ty.clone())).intern(Interner)
}
}), }),
); );
let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner); let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
@ -111,10 +105,10 @@ pub(crate) fn unify(
// default any type vars that weren't unified back to their original bound vars // default any type vars that weren't unified back to their original bound vars
// (kind of hacky) // (kind of hacky)
let find_var = |iv| { let find_var = |iv| {
vars.iter(Interner).position(|v| match v.interned() { vars.iter(Interner).position(|v| match v.data(Interner) {
chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner), GenericArgData::Ty(ty) => ty.inference_var(Interner),
chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner), GenericArgData::Lifetime(lt) => lt.inference_var(Interner),
chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner), GenericArgData::Const(c) => c.inference_var(Interner),
} == Some(iv)) } == Some(iv))
}; };
let fallback = |iv, kind, default, binder| match kind { let fallback = |iv, kind, default, binder| match kind {
@ -149,6 +143,9 @@ pub(crate) struct InferenceTable<'a> {
var_unification_table: ChalkInferenceTable, var_unification_table: ChalkInferenceTable,
type_variable_table: Vec<TypeVariableFlags>, type_variable_table: Vec<TypeVariableFlags>,
pending_obligations: Vec<Canonicalized<InEnvironment<Goal>>>, pending_obligations: Vec<Canonicalized<InEnvironment<Goal>>>,
/// Double buffer used in [`Self::resolve_obligations_as_possible`] to cut down on
/// temporary allocations.
resolve_obligations_buffer: Vec<Canonicalized<InEnvironment<Goal>>>,
} }
pub(crate) struct InferenceTableSnapshot { pub(crate) struct InferenceTableSnapshot {
@ -165,6 +162,7 @@ impl<'a> InferenceTable<'a> {
var_unification_table: ChalkInferenceTable::new(), var_unification_table: ChalkInferenceTable::new(),
type_variable_table: Vec::new(), type_variable_table: Vec::new(),
pending_obligations: Vec::new(), pending_obligations: Vec::new(),
resolve_obligations_buffer: Vec::new(),
} }
} }
@ -516,10 +514,10 @@ impl<'a> InferenceTable<'a> {
pub(crate) fn resolve_obligations_as_possible(&mut self) { pub(crate) fn resolve_obligations_as_possible(&mut self) {
let _span = profile::span("resolve_obligations_as_possible"); let _span = profile::span("resolve_obligations_as_possible");
let mut changed = true; let mut changed = true;
let mut obligations = Vec::new(); let mut obligations = mem::take(&mut self.resolve_obligations_buffer);
while changed { while mem::take(&mut changed) {
changed = false;
mem::swap(&mut self.pending_obligations, &mut obligations); mem::swap(&mut self.pending_obligations, &mut obligations);
for canonicalized in obligations.drain(..) { for canonicalized in obligations.drain(..) {
if !self.check_changed(&canonicalized) { if !self.check_changed(&canonicalized) {
self.pending_obligations.push(canonicalized); self.pending_obligations.push(canonicalized);
@ -534,6 +532,8 @@ impl<'a> InferenceTable<'a> {
self.register_obligation_in_env(uncanonical); self.register_obligation_in_env(uncanonical);
} }
} }
self.resolve_obligations_buffer = obligations;
self.resolve_obligations_buffer.clear();
} }
pub(crate) fn fudge_inference<T: TypeFoldable<Interner>>( pub(crate) fn fudge_inference<T: TypeFoldable<Interner>>(
@ -611,9 +611,9 @@ impl<'a> InferenceTable<'a> {
fn check_changed(&mut self, canonicalized: &Canonicalized<InEnvironment<Goal>>) -> bool { fn check_changed(&mut self, canonicalized: &Canonicalized<InEnvironment<Goal>>) -> bool {
canonicalized.free_vars.iter().any(|var| { canonicalized.free_vars.iter().any(|var| {
let iv = match var.data(Interner) { let iv = match var.data(Interner) {
chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner), GenericArgData::Ty(ty) => ty.inference_var(Interner),
chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner), GenericArgData::Lifetime(lt) => lt.inference_var(Interner),
chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner), GenericArgData::Const(c) => c.inference_var(Interner),
} }
.expect("free var is not inference var"); .expect("free var is not inference var");
if self.var_unification_table.probe_var(iv).is_some() { if self.var_unification_table.probe_var(iv).is_some() {
@ -690,14 +690,10 @@ impl<'a> InferenceTable<'a> {
.fill(|it| { .fill(|it| {
let arg = match it { let arg = match it {
ParamKind::Type => self.new_type_var(), ParamKind::Type => self.new_type_var(),
ParamKind::Const(ty) => { ParamKind::Const(_) => unreachable!("Tuple with const parameter"),
never!("Tuple with const parameter");
return GenericArgData::Const(self.new_const_var(ty.clone()))
.intern(Interner);
}
}; };
arg_tys.push(arg.clone()); arg_tys.push(arg.clone());
GenericArgData::Ty(arg).intern(Interner) arg.cast(Interner)
}) })
.build(); .build();

View file

@ -52,12 +52,14 @@ use hir_expand::name;
use la_arena::{Arena, Idx}; use la_arena::{Arena, Idx};
use mir::{MirEvalError, VTableMap}; use mir::{MirEvalError, VTableMap};
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use syntax::ast::{make, ConstArg};
use traits::FnTrait; use traits::FnTrait;
use triomphe::Arc; use triomphe::Arc;
use utils::Generics; use utils::Generics;
use crate::{ use crate::{
consteval::unknown_const, db::HirDatabase, infer::unify::InferenceTable, utils::generics, consteval::unknown_const, db::HirDatabase, display::HirDisplay, infer::unify::InferenceTable,
utils::generics,
}; };
pub use autoderef::autoderef; pub use autoderef::autoderef;
@ -719,3 +721,16 @@ where
value.visit_with(&mut collector, DebruijnIndex::INNERMOST); value.visit_with(&mut collector, DebruijnIndex::INNERMOST);
collector.placeholders.into_iter().collect() collector.placeholders.into_iter().collect()
} }
pub fn known_const_to_ast(konst: &Const, db: &dyn HirDatabase) -> Option<ConstArg> {
if let ConstValue::Concrete(c) = &konst.interned().value {
match c.interned {
ConstScalar::UnevaluatedConst(GeneralConstId::InTypeConstId(cid), _) => {
return Some(cid.source(db.upcast()));
}
ConstScalar::Unknown => return None,
_ => (),
}
}
Some(make::expr_const_value(konst.display(db).to_string().as_str()))
}

View file

@ -58,10 +58,9 @@ use crate::{
InTypeConstIdMetadata, InTypeConstIdMetadata,
}, },
AliasEq, AliasTy, Binders, BoundVar, CallableSig, Const, ConstScalar, DebruijnIndex, DynTy, AliasEq, AliasTy, Binders, BoundVar, CallableSig, Const, ConstScalar, DebruijnIndex, DynTy,
FnPointer, FnSig, FnSubst, GenericArgData, ImplTraitId, Interner, ParamKind, PolyFnSig, FnPointer, FnSig, FnSubst, ImplTraitId, Interner, ParamKind, PolyFnSig, ProjectionTy,
ProjectionTy, QuantifiedWhereClause, QuantifiedWhereClauses, ReturnTypeImplTrait, QuantifiedWhereClause, QuantifiedWhereClauses, ReturnTypeImplTrait, ReturnTypeImplTraits,
ReturnTypeImplTraits, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyKind, WhereClause,
TyKind, WhereClause,
}; };
#[derive(Debug)] #[derive(Debug)]
@ -213,6 +212,19 @@ impl<'a> TyLoweringContext<'a> {
self.lower_ty_ext(type_ref).0 self.lower_ty_ext(type_ref).0
} }
pub fn lower_const(&self, const_ref: &ConstRef, const_type: Ty) -> Const {
const_or_path_to_chalk(
self.db,
self.resolver,
self.owner,
const_type,
const_ref,
self.type_param_mode,
|| self.generics(),
self.in_binders,
)
}
fn generics(&self) -> Generics { fn generics(&self) -> Generics {
generics( generics(
self.db.upcast(), self.db.upcast(),
@ -242,17 +254,7 @@ impl<'a> TyLoweringContext<'a> {
} }
TypeRef::Array(inner, len) => { TypeRef::Array(inner, len) => {
let inner_ty = self.lower_ty(inner); let inner_ty = self.lower_ty(inner);
let const_len = const_or_path_to_chalk( let const_len = self.lower_const(len, TyBuilder::usize());
self.db,
self.resolver,
self.owner,
TyBuilder::usize(),
len,
self.type_param_mode,
|| self.generics(),
self.in_binders,
);
TyKind::Array(inner_ty, const_len).intern(Interner) TyKind::Array(inner_ty, const_len).intern(Interner)
} }
TypeRef::Slice(inner) => { TypeRef::Slice(inner) => {
@ -391,11 +393,9 @@ impl<'a> TyLoweringContext<'a> {
let ty = { let ty = {
let macro_call = macro_call.to_node(self.db.upcast()); let macro_call = macro_call.to_node(self.db.upcast());
let resolver = |path| { let resolver = |path| {
self.resolver.resolve_path_as_macro( self.resolver
self.db.upcast(), .resolve_path_as_macro(self.db.upcast(), &path, Some(MacroSubNs::Bang))
&path, .map(|(it, _)| it)
Some(MacroSubNs::Bang),
)
}; };
match expander.enter_expand::<ast::Type>(self.db.upcast(), macro_call, resolver) match expander.enter_expand::<ast::Type>(self.db.upcast(), macro_call, resolver)
{ {
@ -447,7 +447,7 @@ impl<'a> TyLoweringContext<'a> {
return None; return None;
} }
let resolution = match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path) { let resolution = match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path) {
Some((it, None)) => it, Some((it, None, _)) => it,
_ => return None, _ => return None,
}; };
match resolution { match resolution {
@ -627,7 +627,7 @@ impl<'a> TyLoweringContext<'a> {
return self.lower_ty_relative_path(ty, res, path.segments()); return self.lower_ty_relative_path(ty, res, path.segments());
} }
let (resolution, remaining_index) = let (resolution, remaining_index, _) =
match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path) { match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path) {
Some(it) => it, Some(it) => it,
None => return (TyKind::Error.intern(Interner), None), None => return (TyKind::Error.intern(Interner), None),
@ -847,18 +847,7 @@ impl<'a> TyLoweringContext<'a> {
arg, arg,
&mut (), &mut (),
|_, type_ref| self.lower_ty(type_ref), |_, type_ref| self.lower_ty(type_ref),
|_, c, ty| { |_, const_ref, ty| self.lower_const(const_ref, ty),
const_or_path_to_chalk(
self.db,
self.resolver,
self.owner,
ty,
c,
self.type_param_mode,
|| self.generics(),
self.in_binders,
)
},
) { ) {
had_explicit_args = true; had_explicit_args = true;
substs.push(x); substs.push(x);
@ -1604,24 +1593,35 @@ pub(crate) fn generic_defaults_query(
.iter() .iter()
.enumerate() .enumerate()
.map(|(idx, (id, p))| { .map(|(idx, (id, p))| {
let p = match p { match p {
TypeOrConstParamData::TypeParamData(p) => p, TypeOrConstParamData::TypeParamData(p) => {
TypeOrConstParamData::ConstParamData(_) => { let mut ty = p
// FIXME: implement const generic defaults .default
let val = unknown_const_as_generic( .as_ref()
db.const_param_ty(ConstParamId::from_unchecked(id)), .map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t));
); // Each default can only refer to previous parameters.
return make_binders(db, &generic_params, val); // Type variable default referring to parameter coming
// after it is forbidden (FIXME: report diagnostic)
ty = fallback_bound_vars(ty, idx, parent_start_idx);
crate::make_binders(db, &generic_params, ty.cast(Interner))
} }
}; TypeOrConstParamData::ConstParamData(p) => {
let mut ty = let mut val = p.default.as_ref().map_or_else(
p.default.as_ref().map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t)); || {
unknown_const_as_generic(
// Each default can only refer to previous parameters. db.const_param_ty(ConstParamId::from_unchecked(id)),
// Type variable default referring to parameter coming )
// after it is forbidden (FIXME: report diagnostic) },
ty = fallback_bound_vars(ty, idx, parent_start_idx); |c| {
crate::make_binders(db, &generic_params, ty.cast(Interner)) let c = ctx.lower_const(c, ctx.lower_ty(&p.ty));
c.cast(Interner)
},
);
// Each default can only refer to previous parameters, see above.
val = fallback_bound_vars(val, idx, parent_start_idx);
make_binders(db, &generic_params, val)
}
}
}) })
// FIXME: use `Arc::from_iter` when it becomes available // FIXME: use `Arc::from_iter` when it becomes available
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
@ -1643,9 +1643,7 @@ pub(crate) fn generic_defaults_recover(
.iter_id() .iter_id()
.map(|id| { .map(|id| {
let val = match id { let val = match id {
Either::Left(_) => { Either::Left(_) => TyKind::Error.intern(Interner).cast(Interner),
GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
}
Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)), Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
}; };
crate::make_binders(db, &generic_params, val) crate::make_binders(db, &generic_params, val)
@ -1991,16 +1989,9 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
} }
}; };
Some(match (arg, kind) { Some(match (arg, kind) {
(GenericArg::Type(type_ref), ParamKind::Type) => { (GenericArg::Type(type_ref), ParamKind::Type) => for_type(this, type_ref).cast(Interner),
let ty = for_type(this, type_ref); (GenericArg::Const(c), ParamKind::Const(c_ty)) => for_const(this, c, c_ty).cast(Interner),
GenericArgData::Ty(ty).intern(Interner) (GenericArg::Const(_), ParamKind::Type) => TyKind::Error.intern(Interner).cast(Interner),
}
(GenericArg::Const(c), ParamKind::Const(c_ty)) => {
GenericArgData::Const(for_const(this, c, c_ty)).intern(Interner)
}
(GenericArg::Const(_), ParamKind::Type) => {
GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
}
(GenericArg::Type(t), ParamKind::Const(c_ty)) => { (GenericArg::Type(t), ParamKind::Const(c_ty)) => {
// We want to recover simple idents, which parser detects them // We want to recover simple idents, which parser detects them
// as types. Maybe here is not the best place to do it, but // as types. Maybe here is not the best place to do it, but
@ -2010,9 +2001,7 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
if p.kind == PathKind::Plain { if p.kind == PathKind::Plain {
if let [n] = p.segments() { if let [n] = p.segments() {
let c = ConstRef::Path(n.clone()); let c = ConstRef::Path(n.clone());
return Some( return Some(for_const(this, &c, c_ty).cast(Interner));
GenericArgData::Const(for_const(this, &c, c_ty)).intern(Interner),
);
} }
} }
} }

View file

@ -10,7 +10,7 @@ use std::{
}; };
use base_db::{CrateId, FileId}; use base_db::{CrateId, FileId};
use chalk_ir::Mutability; use chalk_ir::{cast::Cast, Mutability};
use either::Either; use either::Either;
use hir_def::{ use hir_def::{
builtin_type::BuiltinType, builtin_type::BuiltinType,
@ -40,8 +40,8 @@ use crate::{
name, static_lifetime, name, static_lifetime,
traits::FnTrait, traits::FnTrait,
utils::{detect_variant_from_bytes, ClosureSubst}, utils::{detect_variant_from_bytes, ClosureSubst},
CallableDefId, ClosureId, Const, ConstScalar, FnDefId, GenericArgData, Interner, MemoryMap, CallableDefId, ClosureId, Const, ConstScalar, FnDefId, Interner, MemoryMap, Substitution,
Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
}; };
use super::{ use super::{
@ -2007,7 +2007,28 @@ impl Evaluator<'_> {
} }
} }
AdtId::UnionId(_) => (), AdtId::UnionId(_) => (),
AdtId::EnumId(_) => (), AdtId::EnumId(e) => {
if let Some((variant, layout)) = detect_variant_from_bytes(
&layout,
self.db,
self.trait_env.clone(),
self.read_memory(addr, layout.size.bytes_usize())?,
e,
) {
let ev = EnumVariantId { parent: e, local_id: variant };
for (i, (_, ty)) in self.db.field_types(ev.into()).iter().enumerate() {
let offset = layout.fields.offset(i).bytes_usize();
let ty = ty.clone().substitute(Interner, subst);
self.patch_addresses(
patch_map,
old_vtable,
addr.offset(offset),
&ty,
locals,
)?;
}
}
}
}, },
TyKind::Tuple(_, subst) => { TyKind::Tuple(_, subst) => {
for (id, ty) in subst.iter(Interner).enumerate() { for (id, ty) in subst.iter(Interner).enumerate() {
@ -2248,7 +2269,7 @@ impl Evaluator<'_> {
interval: args_for_target[0].interval.slice(0..self.ptr_size()), interval: args_for_target[0].interval.slice(0..self.ptr_size()),
ty: ty.clone(), ty: ty.clone(),
}; };
let ty = GenericArgData::Ty(ty.clone()).intern(Interner); let ty = ty.clone().cast(Interner);
let generics_for_target = Substitution::from_iter( let generics_for_target = Substitution::from_iter(
Interner, Interner,
generic_args.iter(Interner).enumerate().map(|(i, it)| { generic_args.iter(Interner).enumerate().map(|(i, it)| {

View file

@ -136,7 +136,10 @@ impl Evaluator<'_> {
not_supported!("wrong generic arg kind for clone"); not_supported!("wrong generic arg kind for clone");
}; };
// Clone has special impls for tuples and function pointers // Clone has special impls for tuples and function pointers
if matches!(self_ty.kind(Interner), TyKind::Function(_) | TyKind::Tuple(..)) { if matches!(
self_ty.kind(Interner),
TyKind::Function(_) | TyKind::Tuple(..) | TyKind::Closure(..)
) {
self.exec_clone(def, args, self_ty.clone(), locals, destination, span)?; self.exec_clone(def, args, self_ty.clone(), locals, destination, span)?;
return Ok(true); return Ok(true);
} }
@ -167,32 +170,26 @@ impl Evaluator<'_> {
return destination return destination
.write_from_interval(self, Interval { addr, size: destination.size }); .write_from_interval(self, Interval { addr, size: destination.size });
} }
TyKind::Closure(id, subst) => {
let [arg] = args else {
not_supported!("wrong arg count for clone");
};
let addr = Address::from_bytes(arg.get(self)?)?;
let (closure_owner, _) = self.db.lookup_intern_closure((*id).into());
let infer = self.db.infer(closure_owner);
let (captures, _) = infer.closure_info(id);
let layout = self.layout(&self_ty)?;
let ty_iter = captures.iter().map(|c| c.ty(subst));
self.exec_clone_for_fields(ty_iter, layout, addr, def, locals, destination, span)?;
}
TyKind::Tuple(_, subst) => { TyKind::Tuple(_, subst) => {
let [arg] = args else { let [arg] = args else {
not_supported!("wrong arg count for clone"); not_supported!("wrong arg count for clone");
}; };
let addr = Address::from_bytes(arg.get(self)?)?; let addr = Address::from_bytes(arg.get(self)?)?;
let layout = self.layout(&self_ty)?; let layout = self.layout(&self_ty)?;
for (i, ty) in subst.iter(Interner).enumerate() { let ty_iter = subst.iter(Interner).map(|ga| ga.assert_ty_ref(Interner).clone());
let ty = ty.assert_ty_ref(Interner); self.exec_clone_for_fields(ty_iter, layout, addr, def, locals, destination, span)?;
let size = self.layout(ty)?.size.bytes_usize();
let tmp = self.heap_allocate(self.ptr_size(), self.ptr_size())?;
let arg = IntervalAndTy {
interval: Interval { addr: tmp, size: self.ptr_size() },
ty: TyKind::Ref(Mutability::Not, static_lifetime(), ty.clone())
.intern(Interner),
};
let offset = layout.fields.offset(i).bytes_usize();
self.write_memory(tmp, &addr.offset(offset).to_bytes())?;
self.exec_clone(
def,
&[arg],
ty.clone(),
locals,
destination.slice(offset..offset + size),
span,
)?;
}
} }
_ => { _ => {
self.exec_fn_with_args( self.exec_fn_with_args(
@ -209,6 +206,37 @@ impl Evaluator<'_> {
Ok(()) Ok(())
} }
fn exec_clone_for_fields(
&mut self,
ty_iter: impl Iterator<Item = Ty>,
layout: Arc<Layout>,
addr: Address,
def: FunctionId,
locals: &Locals,
destination: Interval,
span: MirSpan,
) -> Result<()> {
for (i, ty) in ty_iter.enumerate() {
let size = self.layout(&ty)?.size.bytes_usize();
let tmp = self.heap_allocate(self.ptr_size(), self.ptr_size())?;
let arg = IntervalAndTy {
interval: Interval { addr: tmp, size: self.ptr_size() },
ty: TyKind::Ref(Mutability::Not, static_lifetime(), ty.clone()).intern(Interner),
};
let offset = layout.fields.offset(i).bytes_usize();
self.write_memory(tmp, &addr.offset(offset).to_bytes())?;
self.exec_clone(
def,
&[arg],
ty,
locals,
destination.slice(offset..offset + size),
span,
)?;
}
Ok(())
}
fn exec_alloc_fn( fn exec_alloc_fn(
&mut self, &mut self,
alloc_fn: &str, alloc_fn: &str,
@ -473,6 +501,38 @@ impl Evaluator<'_> {
self.write_memory_using_ref(destination.addr, destination.size)?.fill(0); self.write_memory_using_ref(destination.addr, destination.size)?.fill(0);
Ok(()) Ok(())
} }
"getenv" => {
let [name] = args else {
return Err(MirEvalError::TypeError("libc::write args are not provided"));
};
let mut name_buf = vec![];
let name = {
let mut index = Address::from_bytes(name.get(self)?)?;
loop {
let byte = self.read_memory(index, 1)?[0];
index = index.offset(1);
if byte == 0 {
break;
}
name_buf.push(byte);
}
String::from_utf8_lossy(&name_buf)
};
let value = self.db.crate_graph()[self.crate_id].env.get(&name);
match value {
None => {
// Write null as fail
self.write_memory_using_ref(destination.addr, destination.size)?.fill(0);
}
Some(mut value) => {
value.push('\0');
let addr = self.heap_allocate(value.len(), 1)?;
self.write_memory(addr, value.as_bytes())?;
self.write_memory(destination.addr, &addr.to_bytes())?;
}
}
Ok(())
}
_ => not_supported!("unknown external function {as_str}"), _ => not_supported!("unknown external function {as_str}"),
} }
} }

View file

@ -729,6 +729,48 @@ fn main() {
) )
} }
#[test]
fn posix_getenv() {
check_pass(
r#"
//- /main.rs env:foo=bar
type c_char = u8;
extern "C" {
pub fn getenv(s: *const c_char) -> *mut c_char;
}
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
fn main() {
let result = getenv(b"foo\0" as *const _);
if *result != b'b' {
should_not_reach();
}
let result = (result as usize + 1) as *const c_char;
if *result != b'a' {
should_not_reach();
}
let result = (result as usize + 1) as *const c_char;
if *result != b'r' {
should_not_reach();
}
let result = (result as usize + 1) as *const c_char;
if *result != 0 {
should_not_reach();
}
let result = getenv(b"not found\0" as *const _);
if result as usize != 0 {
should_not_reach();
}
}
"#,
);
}
#[test] #[test]
fn posix_tls() { fn posix_tls() {
check_pass( check_pass(

View file

@ -15,7 +15,7 @@ use hir_def::{
path::Path, path::Path,
resolver::{resolver_for_expr, HasResolver, ResolveValueResult, ValueNs}, resolver::{resolver_for_expr, HasResolver, ResolveValueResult, ValueNs},
AdtId, DefWithBodyId, EnumVariantId, GeneralConstId, HasModule, ItemContainerId, LocalFieldId, AdtId, DefWithBodyId, EnumVariantId, GeneralConstId, HasModule, ItemContainerId, LocalFieldId,
TraitId, TypeOrConstParamId, Lookup, TraitId, TypeOrConstParamId,
}; };
use hir_expand::name::Name; use hir_expand::name::Name;
use la_arena::ArenaMap; use la_arena::ArenaMap;
@ -372,7 +372,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
match &self.body.exprs[expr_id] { match &self.body.exprs[expr_id] {
Expr::Missing => { Expr::Missing => {
if let DefWithBodyId::FunctionId(f) = self.owner { if let DefWithBodyId::FunctionId(f) = self.owner {
let assoc = self.db.lookup_intern_function(f); let assoc = f.lookup(self.db.upcast());
if let ItemContainerId::TraitId(t) = assoc.container { if let ItemContainerId::TraitId(t) = assoc.container {
let name = &self.db.function_data(f).name; let name = &self.db.function_data(f).name;
return Err(MirLowerError::TraitFunctionDefinition(t, name.clone())); return Err(MirLowerError::TraitFunctionDefinition(t, name.clone()));
@ -1244,6 +1244,41 @@ impl<'ctx> MirLowerCtx<'ctx> {
} }
} }
fn lower_destructing_assignment(
&mut self,
mut current: BasicBlockId,
lhs: ExprId,
rhs: Place,
span: MirSpan,
) -> Result<Option<BasicBlockId>> {
match &self.body.exprs[lhs] {
Expr::Tuple { exprs, is_assignee_expr: _ } => {
for (i, expr) in exprs.iter().enumerate() {
let Some(c) = self.lower_destructing_assignment(
current,
*expr,
rhs.project(ProjectionElem::TupleOrClosureField(i)),
span,
)? else {
return Ok(None);
};
current = c;
}
Ok(Some(current))
}
Expr::Underscore => Ok(Some(current)),
_ => {
let Some((lhs_place, current)) =
self.lower_expr_as_place(current, lhs, false)?
else {
return Ok(None);
};
self.push_assignment(current, lhs_place, Operand::Copy(rhs).into(), span);
Ok(Some(current))
}
}
}
fn lower_assignment( fn lower_assignment(
&mut self, &mut self,
current: BasicBlockId, current: BasicBlockId,
@ -1259,6 +1294,15 @@ impl<'ctx> MirLowerCtx<'ctx> {
if matches!(&self.body.exprs[lhs], Expr::Underscore) { if matches!(&self.body.exprs[lhs], Expr::Underscore) {
return Ok(Some(current)); return Ok(Some(current));
} }
if matches!(
&self.body.exprs[lhs],
Expr::Tuple { .. } | Expr::RecordLit { .. } | Expr::Call { .. }
) {
let temp = self.temp(self.expr_ty_after_adjustments(rhs), current, rhs.into())?;
let temp = Place::from(temp);
self.push_assignment(current, temp.clone(), rhs_op.into(), span);
return self.lower_destructing_assignment(current, lhs, temp, span);
}
let Some((lhs_place, current)) = let Some((lhs_place, current)) =
self.lower_expr_as_place(current, lhs, false)? self.lower_expr_as_place(current, lhs, false)?
else { else {
@ -1308,14 +1352,14 @@ impl<'ctx> MirLowerCtx<'ctx> {
.resolve_path_in_value_ns(self.db.upcast(), c) .resolve_path_in_value_ns(self.db.upcast(), c)
.ok_or_else(unresolved_name)?; .ok_or_else(unresolved_name)?;
match pr { match pr {
ResolveValueResult::ValueNs(v) => { ResolveValueResult::ValueNs(v, _) => {
if let ValueNs::ConstId(c) = v { if let ValueNs::ConstId(c) = v {
self.lower_const_to_operand(Substitution::empty(Interner), c.into(), ty) self.lower_const_to_operand(Substitution::empty(Interner), c.into(), ty)
} else { } else {
not_supported!("bad path in range pattern"); not_supported!("bad path in range pattern");
} }
} }
ResolveValueResult::Partial(_, _) => { ResolveValueResult::Partial(_, _, _) => {
not_supported!("associated constants in range pattern") not_supported!("associated constants in range pattern")
} }
} }

View file

@ -323,7 +323,7 @@ impl MirLowerCtx<'_> {
break 'b (c, x.1); break 'b (c, x.1);
} }
} }
if let ResolveValueResult::ValueNs(v) = pr { if let ResolveValueResult::ValueNs(v, _) = pr {
if let ValueNs::ConstId(c) = v { if let ValueNs::ConstId(c) = v {
break 'b (c, Substitution::empty(Interner)); break 'b (c, Substitution::empty(Interner));
} }

View file

@ -3,18 +3,19 @@
use hir_def::{ use hir_def::{
attr::{AttrsWithOwner, Documentation}, attr::{AttrsWithOwner, Documentation},
item_scope::ItemInNs, item_scope::ItemInNs,
path::ModPath, path::{ModPath, Path},
resolver::HasResolver, per_ns::Namespace,
AttrDefId, GenericParamId, ModuleDefId, resolver::{HasResolver, Resolver, TypeNs},
AssocItemId, AttrDefId, GenericParamId, ModuleDefId,
}; };
use hir_expand::hygiene::Hygiene; use hir_expand::{hygiene::Hygiene, name::Name};
use hir_ty::db::HirDatabase; use hir_ty::db::HirDatabase;
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
use crate::{ use crate::{
Adt, AssocItem, Const, ConstParam, Enum, ExternCrateDecl, Field, Function, GenericParam, Impl, Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, Enum, ExternCrateDecl, Field,
LifetimeParam, Macro, Module, ModuleDef, Static, Struct, Trait, TraitAlias, TypeAlias, Function, GenericParam, Impl, LifetimeParam, Macro, Module, ModuleDef, Static, Struct, Trait,
TypeParam, Union, Variant, TraitAlias, TypeAlias, TypeParam, Union, Variant, VariantDef,
}; };
pub trait HasAttrs { pub trait HasAttrs {
@ -25,14 +26,14 @@ pub trait HasAttrs {
db: &dyn HirDatabase, db: &dyn HirDatabase,
link: &str, link: &str,
ns: Option<Namespace>, ns: Option<Namespace>,
) -> Option<ModuleDef>; ) -> Option<DocLinkDef>;
} }
#[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)] /// Subset of `ide_db::Definition` that doc links can resolve to.
pub enum Namespace { pub enum DocLinkDef {
Types, ModuleDef(ModuleDef),
Values, Field(Field),
Macros, SelfType(Trait),
} }
macro_rules! impl_has_attrs { macro_rules! impl_has_attrs {
@ -46,9 +47,14 @@ macro_rules! impl_has_attrs {
let def = AttrDefId::$def_id(self.into()); let def = AttrDefId::$def_id(self.into());
db.attrs(def).docs() db.attrs(def).docs()
} }
fn resolve_doc_path(self, db: &dyn HirDatabase, link: &str, ns: Option<Namespace>) -> Option<ModuleDef> { fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<Namespace>
) -> Option<DocLinkDef> {
let def = AttrDefId::$def_id(self.into()); let def = AttrDefId::$def_id(self.into());
resolve_doc_path(db, def, link, ns).map(ModuleDef::from) resolve_doc_path(db, def, link, ns)
} }
} }
)*}; )*};
@ -79,7 +85,12 @@ macro_rules! impl_has_attrs_enum {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> { fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
$enum::$variant(self).docs(db) $enum::$variant(self).docs(db)
} }
fn resolve_doc_path(self, db: &dyn HirDatabase, link: &str, ns: Option<Namespace>) -> Option<ModuleDef> { fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<Namespace>
) -> Option<DocLinkDef> {
$enum::$variant(self).resolve_doc_path(db, link, ns) $enum::$variant(self).resolve_doc_path(db, link, ns)
} }
} }
@ -111,7 +122,7 @@ impl HasAttrs for AssocItem {
db: &dyn HirDatabase, db: &dyn HirDatabase,
link: &str, link: &str,
ns: Option<Namespace>, ns: Option<Namespace>,
) -> Option<ModuleDef> { ) -> Option<DocLinkDef> {
match self { match self {
AssocItem::Function(it) => it.resolve_doc_path(db, link, ns), AssocItem::Function(it) => it.resolve_doc_path(db, link, ns),
AssocItem::Const(it) => it.resolve_doc_path(db, link, ns), AssocItem::Const(it) => it.resolve_doc_path(db, link, ns),
@ -147,9 +158,9 @@ impl HasAttrs for ExternCrateDecl {
db: &dyn HirDatabase, db: &dyn HirDatabase,
link: &str, link: &str,
ns: Option<Namespace>, ns: Option<Namespace>,
) -> Option<ModuleDef> { ) -> Option<DocLinkDef> {
let def = AttrDefId::ExternCrateId(self.into()); let def = AttrDefId::ExternCrateId(self.into());
resolve_doc_path(db, def, link, ns).map(ModuleDef::from) resolve_doc_path(db, def, link, ns)
} }
} }
@ -159,7 +170,7 @@ fn resolve_doc_path(
def: AttrDefId, def: AttrDefId,
link: &str, link: &str,
ns: Option<Namespace>, ns: Option<Namespace>,
) -> Option<ModuleDefId> { ) -> Option<DocLinkDef> {
let resolver = match def { let resolver = match def {
AttrDefId::ModuleId(it) => it.resolver(db.upcast()), AttrDefId::ModuleId(it) => it.resolver(db.upcast()),
AttrDefId::FieldId(it) => it.parent.resolver(db.upcast()), AttrDefId::FieldId(it) => it.parent.resolver(db.upcast()),
@ -184,8 +195,107 @@ fn resolve_doc_path(
.resolver(db.upcast()), .resolver(db.upcast()),
}; };
let modpath = { let mut modpath = modpath_from_str(db, link)?;
// FIXME: this is not how we should get a mod path here
let resolved = resolver.resolve_module_path_in_items(db.upcast(), &modpath);
if resolved.is_none() {
let last_name = modpath.pop_segment()?;
resolve_assoc_or_field(db, resolver, modpath, last_name, ns)
} else {
let def = match ns {
Some(Namespace::Types) => resolved.take_types(),
Some(Namespace::Values) => resolved.take_values(),
Some(Namespace::Macros) => resolved.take_macros().map(ModuleDefId::MacroId),
None => resolved.iter_items().next().map(|(it, _)| match it {
ItemInNs::Types(it) => it,
ItemInNs::Values(it) => it,
ItemInNs::Macros(it) => ModuleDefId::MacroId(it),
}),
};
Some(DocLinkDef::ModuleDef(def?.into()))
}
}
fn resolve_assoc_or_field(
db: &dyn HirDatabase,
resolver: Resolver,
path: ModPath,
name: Name,
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
let path = Path::from_known_path_with_no_generic(path);
// FIXME: This does not handle `Self` on trait definitions, which we should resolve to the
// trait itself.
let base_def = resolver.resolve_path_in_type_ns_fully(db.upcast(), &path)?;
let ty = match base_def {
TypeNs::SelfType(id) => Impl::from(id).self_ty(db),
TypeNs::GenericParam(_) => {
// Even if this generic parameter has some trait bounds, rustdoc doesn't
// resolve `name` to trait items.
return None;
}
TypeNs::AdtId(id) | TypeNs::AdtSelfType(id) => Adt::from(id).ty(db),
TypeNs::EnumVariantId(id) => {
// Enum variants don't have path candidates.
let variant = Variant::from(id);
return resolve_field(db, variant.into(), name, ns);
}
TypeNs::TypeAliasId(id) => {
let alias = TypeAlias::from(id);
if alias.as_assoc_item(db).is_some() {
// We don't normalize associated type aliases, so we have nothing to
// resolve `name` to.
return None;
}
alias.ty(db)
}
TypeNs::BuiltinType(id) => BuiltinType::from(id).ty(db),
TypeNs::TraitId(id) => {
// Doc paths in this context may only resolve to an item of this trait
// (i.e. no items of its supertraits), so we need to handle them here
// independently of others.
return db.trait_data(id).items.iter().find(|it| it.0 == name).map(|(_, assoc_id)| {
let def = match *assoc_id {
AssocItemId::FunctionId(it) => ModuleDef::Function(it.into()),
AssocItemId::ConstId(it) => ModuleDef::Const(it.into()),
AssocItemId::TypeAliasId(it) => ModuleDef::TypeAlias(it.into()),
};
DocLinkDef::ModuleDef(def)
});
}
TypeNs::TraitAliasId(_) => {
// XXX: Do these get resolved?
return None;
}
};
// FIXME: Resolve associated items here, e.g. `Option::map`. Note that associated items take
// precedence over fields.
let variant_def = match ty.as_adt()? {
Adt::Struct(it) => it.into(),
Adt::Union(it) => it.into(),
Adt::Enum(_) => return None,
};
resolve_field(db, variant_def, name, ns)
}
fn resolve_field(
db: &dyn HirDatabase,
def: VariantDef,
name: Name,
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
if let Some(Namespace::Types | Namespace::Macros) = ns {
return None;
}
def.fields(db).into_iter().find(|f| f.name(db) == name).map(DocLinkDef::Field)
}
fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option<ModPath> {
// FIXME: this is not how we should get a mod path here.
let try_get_modpath = |link: &str| {
let ast_path = ast::SourceFile::parse(&format!("type T = {link};")) let ast_path = ast::SourceFile::parse(&format!("type T = {link};"))
.syntax_node() .syntax_node()
.descendants() .descendants()
@ -193,23 +303,20 @@ fn resolve_doc_path(
if ast_path.syntax().text() != link { if ast_path.syntax().text() != link {
return None; return None;
} }
ModPath::from_src(db.upcast(), ast_path, &Hygiene::new_unhygienic())? ModPath::from_src(db.upcast(), ast_path, &Hygiene::new_unhygienic())
}; };
let resolved = resolver.resolve_module_path_in_items(db.upcast(), &modpath); let full = try_get_modpath(link);
let resolved = if resolved.is_none() { if full.is_some() {
resolver.resolve_module_path_in_trait_assoc_items(db.upcast(), &modpath)? return full;
} else {
resolved
};
match ns {
Some(Namespace::Types) => resolved.take_types(),
Some(Namespace::Values) => resolved.take_values(),
Some(Namespace::Macros) => resolved.take_macros().map(ModuleDefId::MacroId),
None => resolved.iter_items().next().map(|it| match it {
ItemInNs::Types(it) => it,
ItemInNs::Values(it) => it,
ItemInNs::Macros(it) => ModuleDefId::MacroId(it),
}),
} }
// Tuple field names cannot be a part of `ModPath` usually, but rustdoc can
// resolve doc paths like `TupleStruct::0`.
// FIXME: Find a better way to handle these.
let (base, maybe_tuple_field) = link.rsplit_once("::")?;
let tuple_field = Name::new_tuple_field(maybe_tuple_field.parse().ok()?);
let mut modpath = try_get_modpath(base)?;
modpath.push_segment(tuple_field);
Some(modpath)
} }

View file

@ -8,7 +8,6 @@ use hir_def::{
type_ref::{TypeBound, TypeRef}, type_ref::{TypeBound, TypeRef},
AdtId, GenericDefId, AdtId, GenericDefId,
}; };
use hir_expand::name;
use hir_ty::{ use hir_ty::{
display::{ display::{
write_bounds_like_dyn_trait_with_prefix, write_visibility, HirDisplay, HirDisplayError, write_bounds_like_dyn_trait_with_prefix, write_visibility, HirDisplay, HirDisplayError,
@ -19,8 +18,9 @@ use hir_ty::{
use crate::{ use crate::{
Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Enum, ExternCrateDecl, Field, Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Enum, ExternCrateDecl, Field,
Function, GenericParam, HasCrate, HasVisibility, LifetimeParam, Macro, Module, Static, Struct, Function, GenericParam, HasCrate, HasVisibility, LifetimeParam, Macro, Module, SelfParam,
Trait, TraitAlias, TyBuilder, Type, TypeAlias, TypeOrConstParam, TypeParam, Union, Variant, Static, Struct, Trait, TraitAlias, TyBuilder, Type, TypeAlias, TypeOrConstParam, TypeParam,
Union, Variant,
}; };
impl HirDisplay for Function { impl HirDisplay for Function {
@ -57,37 +57,21 @@ impl HirDisplay for Function {
f.write_char('(')?; f.write_char('(')?;
let write_self_param = |ty: &TypeRef, f: &mut HirFormatter<'_>| match ty {
TypeRef::Path(p) if p.is_self_type() => f.write_str("self"),
TypeRef::Reference(inner, lifetime, mut_) if matches!(&**inner, TypeRef::Path(p) if p.is_self_type()) =>
{
f.write_char('&')?;
if let Some(lifetime) = lifetime {
write!(f, "{} ", lifetime.name.display(f.db.upcast()))?;
}
if let hir_def::type_ref::Mutability::Mut = mut_ {
f.write_str("mut ")?;
}
f.write_str("self")
}
_ => {
f.write_str("self: ")?;
ty.hir_fmt(f)
}
};
let mut first = true; let mut first = true;
let mut skip_self = 0;
if let Some(self_param) = self.self_param(db) {
self_param.hir_fmt(f)?;
first = false;
skip_self = 1;
}
// FIXME: Use resolved `param.ty` once we no longer discard lifetimes // FIXME: Use resolved `param.ty` once we no longer discard lifetimes
for (type_ref, param) in data.params.iter().zip(self.assoc_fn_params(db)) { for (type_ref, param) in data.params.iter().zip(self.assoc_fn_params(db)).skip(skip_self) {
let local = param.as_local(db).map(|it| it.name(db)); let local = param.as_local(db).map(|it| it.name(db));
if !first { if !first {
f.write_str(", ")?; f.write_str(", ")?;
} else { } else {
first = false; first = false;
if local == Some(name!(self)) {
write_self_param(type_ref, f)?;
continue;
}
} }
match local { match local {
Some(name) => write!(f, "{}: ", name.display(f.db.upcast()))?, Some(name) => write!(f, "{}: ", name.display(f.db.upcast()))?,
@ -137,6 +121,31 @@ impl HirDisplay for Function {
} }
} }
impl HirDisplay for SelfParam {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
let data = f.db.function_data(self.func);
let param = data.params.first().unwrap();
match &**param {
TypeRef::Path(p) if p.is_self_type() => f.write_str("self"),
TypeRef::Reference(inner, lifetime, mut_) if matches!(&**inner, TypeRef::Path(p) if p.is_self_type()) =>
{
f.write_char('&')?;
if let Some(lifetime) = lifetime {
write!(f, "{} ", lifetime.name.display(f.db.upcast()))?;
}
if let hir_def::type_ref::Mutability::Mut = mut_ {
f.write_str("mut ")?;
}
f.write_str("self")
}
ty => {
f.write_str("self: ")?;
ty.hir_fmt(f)
}
}
}
}
impl HirDisplay for Adt { impl HirDisplay for Adt {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
match self { match self {
@ -357,6 +366,11 @@ fn write_generic_params(
delim(f)?; delim(f)?;
write!(f, "const {}: ", name.display(f.db.upcast()))?; write!(f, "const {}: ", name.display(f.db.upcast()))?;
c.ty.hir_fmt(f)?; c.ty.hir_fmt(f)?;
if let Some(default) = &c.default {
f.write_str(" = ")?;
write!(f, "{}", default.display(f.db.upcast()))?;
}
} }
} }
} }

View file

@ -63,12 +63,13 @@ use hir_ty::{
all_super_traits, autoderef, all_super_traits, autoderef,
consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt}, consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt},
diagnostics::BodyValidationDiagnostic, diagnostics::BodyValidationDiagnostic,
known_const_to_ast,
layout::{Layout as TyLayout, RustcEnumVariantIdx, TagEncoding}, layout::{Layout as TyLayout, RustcEnumVariantIdx, TagEncoding},
method_resolution::{self, TyFingerprint}, method_resolution::{self, TyFingerprint},
mir::{self, interpret_mir}, mir::{self, interpret_mir},
primitive::UintTy, primitive::UintTy,
traits::FnTrait, traits::FnTrait,
AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg,
GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution, GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution,
TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, ValueTyDefId, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, ValueTyDefId,
WhereClause, WhereClause,
@ -87,7 +88,7 @@ use triomphe::Arc;
use crate::db::{DefDatabase, HirDatabase}; use crate::db::{DefDatabase, HirDatabase};
pub use crate::{ pub use crate::{
attrs::{HasAttrs, Namespace}, attrs::{DocLinkDef, HasAttrs},
diagnostics::{ diagnostics::{
AnyDiagnostic, BreakOutsideOfLoop, CaseType, ExpectedFunction, InactiveCode, AnyDiagnostic, BreakOutsideOfLoop, CaseType, ExpectedFunction, InactiveCode,
IncoherentImpl, IncorrectCase, InvalidDeriveTarget, MacroDefError, MacroError, IncoherentImpl, IncorrectCase, InvalidDeriveTarget, MacroDefError, MacroError,
@ -121,6 +122,7 @@ pub use {
lang_item::LangItem, lang_item::LangItem,
nameres::{DefMap, ModuleSource}, nameres::{DefMap, ModuleSource},
path::{ModPath, PathKind}, path::{ModPath, PathKind},
per_ns::Namespace,
type_ref::{Mutability, TypeRef}, type_ref::{Mutability, TypeRef},
visibility::Visibility, visibility::Visibility,
// FIXME: This is here since some queries take it as input that are used // FIXME: This is here since some queries take it as input that are used
@ -719,20 +721,18 @@ fn emit_def_diagnostic_(
) { ) {
match diag { match diag {
DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => { DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => {
let decl = declaration.to_node(db.upcast()); let decl = declaration.to_ptr(db.upcast());
acc.push( acc.push(
UnresolvedModule { UnresolvedModule {
decl: InFile::new(declaration.file_id, AstPtr::new(&decl)), decl: InFile::new(declaration.file_id, decl),
candidates: candidates.clone(), candidates: candidates.clone(),
} }
.into(), .into(),
) )
} }
DefDiagnosticKind::UnresolvedExternCrate { ast } => { DefDiagnosticKind::UnresolvedExternCrate { ast } => {
let item = ast.to_node(db.upcast()); let item = ast.to_ptr(db.upcast());
acc.push( acc.push(UnresolvedExternCrate { decl: InFile::new(ast.file_id, item) }.into());
UnresolvedExternCrate { decl: InFile::new(ast.file_id, AstPtr::new(&item)) }.into(),
);
} }
DefDiagnosticKind::UnresolvedImport { id, index } => { DefDiagnosticKind::UnresolvedImport { id, index } => {
@ -747,14 +747,10 @@ fn emit_def_diagnostic_(
} }
DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } => { DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } => {
let item = ast.to_node(db.upcast()); let item = ast.to_ptr(db.upcast());
acc.push( acc.push(
InactiveCode { InactiveCode { node: ast.with_value(item), cfg: cfg.clone(), opts: opts.clone() }
node: ast.with_value(SyntaxNodePtr::new(&item).into()), .into(),
cfg: cfg.clone(),
opts: opts.clone(),
}
.into(),
); );
} }
DefDiagnosticKind::UnresolvedProcMacro { ast, krate } => { DefDiagnosticKind::UnresolvedProcMacro { ast, krate } => {
@ -1273,7 +1269,7 @@ impl Adt {
.fill(|x| { .fill(|x| {
let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner)); let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
match x { match x {
ParamKind::Type => GenericArgData::Ty(r).intern(Interner), ParamKind::Type => r.cast(Interner),
ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
} }
}) })
@ -2096,14 +2092,6 @@ impl SelfParam {
.unwrap_or(Access::Owned) .unwrap_or(Access::Owned)
} }
pub fn display(self, db: &dyn HirDatabase) -> &'static str {
match self.access(db) {
Access::Shared => "&self",
Access::Exclusive => "&mut self",
Access::Owned => "self",
}
}
pub fn source(&self, db: &dyn HirDatabase) -> Option<InFile<ast::SelfParam>> { pub fn source(&self, db: &dyn HirDatabase) -> Option<InFile<ast::SelfParam>> {
let InFile { file_id, value } = Function::from(self.func).source(db)?; let InFile { file_id, value } = Function::from(self.func).source(db)?;
value value
@ -3142,12 +3130,8 @@ impl TypeParam {
} }
pub fn default(self, db: &dyn HirDatabase) -> Option<Type> { pub fn default(self, db: &dyn HirDatabase) -> Option<Type> {
let params = db.generic_defaults(self.id.parent()); let ty = generic_arg_from_param(db, self.id.into())?;
let local_idx = hir_ty::param_idx(db, self.id.into())?;
let resolver = self.id.parent().resolver(db.upcast()); let resolver = self.id.parent().resolver(db.upcast());
let ty = params.get(local_idx)?.clone();
let subst = TyBuilder::placeholder_subst(db, self.id.parent());
let ty = ty.substitute(Interner, &subst);
match ty.data(Interner) { match ty.data(Interner) {
GenericArgData::Ty(it) => { GenericArgData::Ty(it) => {
Some(Type::new_with_resolver_inner(db, &resolver, it.clone())) Some(Type::new_with_resolver_inner(db, &resolver, it.clone()))
@ -3209,6 +3193,19 @@ impl ConstParam {
pub fn ty(self, db: &dyn HirDatabase) -> Type { pub fn ty(self, db: &dyn HirDatabase) -> Type {
Type::new(db, self.id.parent(), db.const_param_ty(self.id)) Type::new(db, self.id.parent(), db.const_param_ty(self.id))
} }
pub fn default(self, db: &dyn HirDatabase) -> Option<ast::ConstArg> {
let arg = generic_arg_from_param(db, self.id.into())?;
known_const_to_ast(arg.constant(Interner)?, db)
}
}
fn generic_arg_from_param(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<GenericArg> {
let params = db.generic_defaults(id.parent);
let local_idx = hir_ty::param_idx(db, id)?;
let ty = params.get(local_idx)?.clone();
let subst = TyBuilder::placeholder_subst(db, id.parent);
Some(ty.substitute(Interner, &subst))
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
@ -3716,7 +3713,7 @@ impl Type {
.fill(|x| { .fill(|x| {
let r = it.next().unwrap(); let r = it.next().unwrap();
match x { match x {
ParamKind::Type => GenericArgData::Ty(r).intern(Interner), ParamKind::Type => r.cast(Interner),
ParamKind::Const(ty) => { ParamKind::Const(ty) => {
// FIXME: this code is not covered in tests. // FIXME: this code is not covered in tests.
unknown_const_as_generic(ty.clone()) unknown_const_as_generic(ty.clone())
@ -3749,9 +3746,7 @@ impl Type {
.fill(|it| { .fill(|it| {
// FIXME: this code is not covered in tests. // FIXME: this code is not covered in tests.
match it { match it {
ParamKind::Type => { ParamKind::Type => args.next().unwrap().ty.clone().cast(Interner),
GenericArgData::Ty(args.next().unwrap().ty.clone()).intern(Interner)
}
ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
} }
}) })
@ -4414,14 +4409,13 @@ impl Callable {
Other => CallableKind::Other, Other => CallableKind::Other,
} }
} }
pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<(ast::SelfParam, Type)> { pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<(SelfParam, Type)> {
let func = match self.callee { let func = match self.callee {
Callee::Def(CallableDefId::FunctionId(it)) if self.is_bound_method => it, Callee::Def(CallableDefId::FunctionId(it)) if self.is_bound_method => it,
_ => return None, _ => return None,
}; };
let src = func.lookup(db.upcast()).source(db.upcast()); let func = Function { id: func };
let param_list = src.value.param_list()?; Some((func.self_param(db)?, self.ty.derived(self.sig.params()[0].clone())))
Some((param_list.self_param()?, self.ty.derived(self.sig.params()[0].clone())))
} }
pub fn n_params(&self) -> usize { pub fn n_params(&self) -> usize {
self.sig.params().len() - if self.is_bound_method { 1 } else { 0 } self.sig.params().len() - if self.is_bound_method { 1 } else { 0 }

View file

@ -170,6 +170,8 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.is_derive_annotated(item) self.imp.is_derive_annotated(item)
} }
/// Expand the macro call with a different token tree, mapping the `token_to_map` down into the
/// expansion. `token_to_map` should be a token from the `speculative args` node.
pub fn speculative_expand( pub fn speculative_expand(
&self, &self,
actual_macro_call: &ast::MacroCall, actual_macro_call: &ast::MacroCall,
@ -179,6 +181,8 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map) self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
} }
/// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the
/// expansion. `token_to_map` should be a token from the `speculative args` node.
pub fn speculative_expand_attr_macro( pub fn speculative_expand_attr_macro(
&self, &self,
actual_macro_call: &ast::Item, actual_macro_call: &ast::Item,
@ -201,14 +205,22 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
) )
} }
/// Descend the token into macrocalls to its first mapped counterpart. /// Descend the token into its macro call if it is part of one, returning the token in the
pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken { /// expansion that it is associated with. If `offset` points into the token's range, it will
self.imp.descend_into_macros_single(token) /// be considered for the mapping in case of inline format args.
pub fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken {
self.imp.descend_into_macros_single(token, offset)
} }
/// Descend the token into macrocalls to all its mapped counterparts. /// Descend the token into its macro call if it is part of one, returning the tokens in the
pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { /// expansion that it is associated with. If `offset` points into the token's range, it will
self.imp.descend_into_macros(token) /// be considered for the mapping in case of inline format args.
pub fn descend_into_macros(
&self,
token: SyntaxToken,
offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> {
self.imp.descend_into_macros(token, offset)
} }
/// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token. /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
@ -217,12 +229,17 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
pub fn descend_into_macros_with_same_text( pub fn descend_into_macros_with_same_text(
&self, &self,
token: SyntaxToken, token: SyntaxToken,
offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> { ) -> SmallVec<[SyntaxToken; 1]> {
self.imp.descend_into_macros_with_same_text(token) self.imp.descend_into_macros_with_same_text(token, offset)
} }
pub fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken { pub fn descend_into_macros_with_kind_preference(
self.imp.descend_into_macros_with_kind_preference(token) &self,
token: SyntaxToken,
offset: TextSize,
) -> SyntaxToken {
self.imp.descend_into_macros_with_kind_preference(token, offset)
} }
/// Maps a node down by mapping its first and last token down. /// Maps a node down by mapping its first and last token down.
@ -606,7 +623,7 @@ impl<'db> SemanticsImpl<'db> {
let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
resolver resolver
.resolve_path_as_macro(self.db.upcast(), &path, Some(MacroSubNs::Bang)) .resolve_path_as_macro(self.db.upcast(), &path, Some(MacroSubNs::Bang))
.map(|it| macro_id_to_def_id(self.db.upcast(), it)) .map(|(it, _)| macro_id_to_def_id(self.db.upcast(), it))
})?; })?;
hir_expand::db::expand_speculative( hir_expand::db::expand_speculative(
self.db.upcast(), self.db.upcast(),
@ -665,7 +682,7 @@ impl<'db> SemanticsImpl<'db> {
}; };
if first == last { if first == last {
self.descend_into_macros_impl(first, &mut |InFile { value, .. }| { self.descend_into_macros_impl(first, 0.into(), &mut |InFile { value, .. }| {
if let Some(node) = value.parent_ancestors().find_map(N::cast) { if let Some(node) = value.parent_ancestors().find_map(N::cast) {
res.push(node) res.push(node)
} }
@ -674,7 +691,7 @@ impl<'db> SemanticsImpl<'db> {
} else { } else {
// Descend first and last token, then zip them to look for the node they belong to // Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![]; let mut scratch: SmallVec<[_; 1]> = smallvec![];
self.descend_into_macros_impl(first, &mut |token| { self.descend_into_macros_impl(first, 0.into(), &mut |token| {
scratch.push(token); scratch.push(token);
false false
}); });
@ -682,6 +699,7 @@ impl<'db> SemanticsImpl<'db> {
let mut scratch = scratch.into_iter(); let mut scratch = scratch.into_iter();
self.descend_into_macros_impl( self.descend_into_macros_impl(
last, last,
0.into(),
&mut |InFile { value: last, file_id: last_fid }| { &mut |InFile { value: last, file_id: last_fid }| {
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
if first_fid == last_fid { if first_fid == last_fid {
@ -705,19 +723,27 @@ impl<'db> SemanticsImpl<'db> {
res res
} }
fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { fn descend_into_macros(
&self,
token: SyntaxToken,
offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> {
let mut res = smallvec![]; let mut res = smallvec![];
self.descend_into_macros_impl(token, &mut |InFile { value, .. }| { self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
res.push(value); res.push(value);
false false
}); });
res res
} }
fn descend_into_macros_with_same_text(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { fn descend_into_macros_with_same_text(
&self,
token: SyntaxToken,
offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> {
let text = token.text(); let text = token.text();
let mut res = smallvec![]; let mut res = smallvec![];
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| { self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
if value.text() == text { if value.text() == text {
res.push(value); res.push(value);
} }
@ -729,7 +755,11 @@ impl<'db> SemanticsImpl<'db> {
res res
} }
fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken { fn descend_into_macros_with_kind_preference(
&self,
token: SyntaxToken,
offset: TextSize,
) -> SyntaxToken {
let fetch_kind = |token: &SyntaxToken| match token.parent() { let fetch_kind = |token: &SyntaxToken| match token.parent() {
Some(node) => match node.kind() { Some(node) => match node.kind() {
kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => { kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => {
@ -741,7 +771,7 @@ impl<'db> SemanticsImpl<'db> {
}; };
let preferred_kind = fetch_kind(&token); let preferred_kind = fetch_kind(&token);
let mut res = None; let mut res = None;
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| { self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
if fetch_kind(&value) == preferred_kind { if fetch_kind(&value) == preferred_kind {
res = Some(value); res = Some(value);
true true
@ -755,9 +785,9 @@ impl<'db> SemanticsImpl<'db> {
res.unwrap_or(token) res.unwrap_or(token)
} }
fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken { fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken {
let mut res = token.clone(); let mut res = token.clone();
self.descend_into_macros_impl(token, &mut |InFile { value, .. }| { self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
res = value; res = value;
true true
}); });
@ -767,9 +797,13 @@ impl<'db> SemanticsImpl<'db> {
fn descend_into_macros_impl( fn descend_into_macros_impl(
&self, &self,
token: SyntaxToken, token: SyntaxToken,
// FIXME: We might want this to be Option<TextSize> to be able to opt out of subrange
// mapping, specifically for node downmapping
offset: TextSize,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool, f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
) { ) {
let _p = profile::span("descend_into_macros"); let _p = profile::span("descend_into_macros");
let relative_token_offset = token.text_range().start().checked_sub(offset);
let parent = match token.parent() { let parent = match token.parent() {
Some(it) => it, Some(it) => it,
None => return, None => return,
@ -796,7 +830,12 @@ impl<'db> SemanticsImpl<'db> {
self.cache(value, file_id); self.cache(value, file_id);
} }
let mapped_tokens = expansion_info.map_token_down(self.db.upcast(), item, token)?; let mapped_tokens = expansion_info.map_token_down(
self.db.upcast(),
item,
token,
relative_token_offset,
)?;
let len = stack.len(); let len = stack.len();
// requeue the tokens we got from mapping our current token down // requeue the tokens we got from mapping our current token down
@ -943,7 +982,7 @@ impl<'db> SemanticsImpl<'db> {
offset: TextSize, offset: TextSize,
) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ { ) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
node.token_at_offset(offset) node.token_at_offset(offset)
.map(move |token| self.descend_into_macros(token)) .map(move |token| self.descend_into_macros(token, offset))
.map(|descendants| { .map(|descendants| {
descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it)) descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
}) })
@ -1683,6 +1722,14 @@ impl SemanticsScope<'_> {
|name, id| cb(name, id.into()), |name, id| cb(name, id.into()),
) )
} }
pub fn extern_crates(&self) -> impl Iterator<Item = (Name, Module)> + '_ {
self.resolver.extern_crates_in_scope().map(|(name, id)| (name, Module { id }))
}
pub fn extern_crate_decls(&self) -> impl Iterator<Item = Name> + '_ {
self.resolver.extern_crate_decls_in_scope(self.db.upcast())
}
} }
#[derive(Debug)] #[derive(Debug)]

View file

@ -487,7 +487,7 @@ impl SourceAnalyzer {
let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?; let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?;
self.resolver self.resolver
.resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Bang)) .resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Bang))
.map(|it| it.into()) .map(|(it, _)| it.into())
} }
pub(crate) fn resolve_bind_pat_to_const( pub(crate) fn resolve_bind_pat_to_const(
@ -760,7 +760,7 @@ impl SourceAnalyzer {
let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| { let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
self.resolver self.resolver
.resolve_path_as_macro(db.upcast(), &path, Some(MacroSubNs::Bang)) .resolve_path_as_macro(db.upcast(), &path, Some(MacroSubNs::Bang))
.map(|it| macro_id_to_def_id(db.upcast(), it)) .map(|(it, _)| macro_id_to_def_id(db.upcast(), it))
})?; })?;
Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64) Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
} }
@ -966,6 +966,7 @@ pub(crate) fn resolve_hir_path_as_attr_macro(
) -> Option<Macro> { ) -> Option<Macro> {
resolver resolver
.resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Attr)) .resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Attr))
.map(|(it, _)| it)
.map(Into::into) .map(Into::into)
} }
@ -983,7 +984,7 @@ fn resolve_hir_path_(
res.map(|ty_ns| (ty_ns, path.segments().first())) res.map(|ty_ns| (ty_ns, path.segments().first()))
} }
None => { None => {
let (ty, remaining_idx) = resolver.resolve_path_in_type_ns(db.upcast(), path)?; let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db.upcast(), path)?;
match remaining_idx { match remaining_idx {
Some(remaining_idx) => { Some(remaining_idx) => {
if remaining_idx + 1 == path.segments().len() { if remaining_idx + 1 == path.segments().len() {
@ -1067,7 +1068,7 @@ fn resolve_hir_path_(
let macros = || { let macros = || {
resolver resolver
.resolve_path_as_macro(db.upcast(), path.mod_path()?, None) .resolve_path_as_macro(db.upcast(), path.mod_path()?, None)
.map(|def| PathResolution::Def(ModuleDef::Macro(def.into()))) .map(|(def, _)| PathResolution::Def(ModuleDef::Macro(def.into())))
}; };
if prefer_value_ns { values().or_else(types) } else { types().or_else(values) } if prefer_value_ns { values().or_else(types) } else { types().or_else(values) }

View file

@ -2,8 +2,10 @@
use base_db::FileRange; use base_db::FileRange;
use hir_def::{ use hir_def::{
src::HasSource, AdtId, AssocItemId, DefWithBodyId, HasModule, ImplId, Lookup, MacroId, item_scope::ItemInNs,
ModuleDefId, ModuleId, TraitId, src::{HasChildSource, HasSource},
AdtId, AssocItemId, DefWithBodyId, HasModule, ImplId, Lookup, MacroId, ModuleDefId, ModuleId,
TraitId,
}; };
use hir_expand::{HirFileId, InFile}; use hir_expand::{HirFileId, InFile};
use hir_ty::db::HirDatabase; use hir_ty::db::HirDatabase;
@ -167,6 +169,40 @@ impl<'a> SymbolCollector<'a> {
self.collect_from_impl(impl_id); self.collect_from_impl(impl_id);
} }
// Record renamed imports.
// In case it imports multiple items under different namespaces we just pick one arbitrarily
// for now.
for id in scope.imports() {
let loc = id.import.lookup(self.db.upcast());
loc.id.item_tree(self.db.upcast());
let source = id.import.child_source(self.db.upcast());
let Some(use_tree_src) = source.value.get(id.idx) else { continue };
let Some(rename) = use_tree_src.rename() else { continue };
let Some(name) = rename.name() else { continue };
let res = scope.fully_resolve_import(self.db.upcast(), id);
res.iter_items().for_each(|(item, _)| {
let def = match item {
ItemInNs::Types(def) | ItemInNs::Values(def) => def,
ItemInNs::Macros(def) => ModuleDefId::from(def),
}
.into();
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(use_tree_src.syntax()),
name_ptr: SyntaxNodePtr::new(name.syntax()),
};
self.symbols.push(FileSymbol {
name: name.text().into(),
def,
container_name: self.current_container_name.clone(),
loc: dec_loc,
is_alias: false,
});
});
}
for const_id in scope.unnamed_consts() { for const_id in scope.unnamed_consts() {
self.collect_from_body(const_id); self.collect_from_body(const_id);
} }

View file

@ -422,7 +422,7 @@ impl<'x, 'y, T, V, U: Default> Trait<'x, 'y, T, V, U> for () {
check_assist( check_assist(
add_missing_default_members, add_missing_default_members,
r#" r#"
struct Bar<const: N: bool> { struct Bar<const N: usize> {
bar: [i32, N] bar: [i32, N]
} }
@ -439,7 +439,7 @@ impl<const X: usize, Y, Z> Foo<X, Z> for S<Y> {
$0 $0
}"#, }"#,
r#" r#"
struct Bar<const: N: bool> { struct Bar<const N: usize> {
bar: [i32, N] bar: [i32, N]
} }
@ -483,6 +483,107 @@ impl<X> Foo<42, {20 + 22}, X> for () {
) )
} }
#[test]
fn test_const_substitution_with_defaults() {
check_assist(
add_missing_default_members,
r#"
trait Foo<T, const N: usize = 42, const M: bool = false, const P: char = 'a'> {
fn get_n(&self) -> usize { N }
fn get_m(&self) -> bool { M }
fn get_p(&self) -> char { P }
fn get_array(&self, arg: &T) -> [bool; N] { [M; N] }
}
impl<X> Foo<X> for () {
$0
}"#,
r#"
trait Foo<T, const N: usize = 42, const M: bool = false, const P: char = 'a'> {
fn get_n(&self) -> usize { N }
fn get_m(&self) -> bool { M }
fn get_p(&self) -> char { P }
fn get_array(&self, arg: &T) -> [bool; N] { [M; N] }
}
impl<X> Foo<X> for () {
$0fn get_n(&self) -> usize { 42 }
fn get_m(&self) -> bool { false }
fn get_p(&self) -> char { 'a' }
fn get_array(&self, arg: &X) -> [bool; 42] { [false; 42] }
}"#,
);
}
#[test]
fn test_const_substitution_with_defaults_2() {
check_assist(
add_missing_impl_members,
r#"
mod m {
pub const LEN: usize = 42;
pub trait Foo<const M: usize = LEN, const N: usize = M, T = [bool; N]> {
fn get_t(&self) -> T;
}
}
impl m::Foo for () {
$0
}"#,
r#"
mod m {
pub const LEN: usize = 42;
pub trait Foo<const M: usize = LEN, const N: usize = M, T = [bool; N]> {
fn get_t(&self) -> T;
}
}
impl m::Foo for () {
fn get_t(&self) -> [bool; m::LEN] {
${0:todo!()}
}
}"#,
)
}
#[test]
fn test_const_substitution_with_defaults_3() {
check_assist(
add_missing_default_members,
r#"
mod m {
pub const VAL: usize = 0;
pub trait Foo<const N: usize = {40 + 2}, const M: usize = {VAL + 1}> {
fn get_n(&self) -> usize { N }
fn get_m(&self) -> usize { M }
}
}
impl m::Foo for () {
$0
}"#,
r#"
mod m {
pub const VAL: usize = 0;
pub trait Foo<const N: usize = {40 + 2}, const M: usize = {VAL + 1}> {
fn get_n(&self) -> usize { N }
fn get_m(&self) -> usize { M }
}
}
impl m::Foo for () {
$0fn get_n(&self) -> usize { {40 + 2} }
fn get_m(&self) -> usize { {m::VAL + 1} }
}"#,
)
}
#[test] #[test]
fn test_cursor_after_empty_impl_def() { fn test_cursor_after_empty_impl_def() {
check_assist( check_assist(

View file

@ -1,6 +1,10 @@
use std::collections::VecDeque; use std::collections::VecDeque;
use syntax::ast::{self, AstNode}; use syntax::{
ast::{self, AstNode, Expr::BinExpr},
ted::{self, Position},
SyntaxKind,
};
use crate::{utils::invert_boolean_expression, AssistContext, AssistId, AssistKind, Assists}; use crate::{utils::invert_boolean_expression, AssistContext, AssistId, AssistKind, Assists};
@ -23,121 +27,117 @@ use crate::{utils::invert_boolean_expression, AssistContext, AssistId, AssistKin
// } // }
// ``` // ```
pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let expr = ctx.find_node_at_offset::<ast::BinExpr>()?; let mut bin_expr = ctx.find_node_at_offset::<ast::BinExpr>()?;
let op = expr.op_kind()?; let op = bin_expr.op_kind()?;
let op_range = expr.op_token()?.text_range(); let op_range = bin_expr.op_token()?.text_range();
let opposite_op = match op { // Is the cursor on the expression's logical operator?
ast::BinaryOp::LogicOp(ast::LogicOp::And) => "||", if !op_range.contains_range(ctx.selection_trimmed()) {
ast::BinaryOp::LogicOp(ast::LogicOp::Or) => "&&",
_ => return None,
};
let cursor_in_range = op_range.contains_range(ctx.selection_trimmed());
if !cursor_in_range {
return None; return None;
} }
let mut expr = expr;
// Walk up the tree while we have the same binary operator // Walk up the tree while we have the same binary operator
while let Some(parent_expr) = expr.syntax().parent().and_then(ast::BinExpr::cast) { while let Some(parent_expr) = bin_expr.syntax().parent().and_then(ast::BinExpr::cast) {
match expr.op_kind() { match parent_expr.op_kind() {
Some(parent_op) if parent_op == op => { Some(parent_op) if parent_op == op => {
expr = parent_expr; bin_expr = parent_expr;
} }
_ => break, _ => break,
} }
} }
let mut expr_stack = vec![expr.clone()]; let op = bin_expr.op_kind()?;
let mut terms = Vec::new(); let inv_token = match op {
let mut op_ranges = Vec::new(); ast::BinaryOp::LogicOp(ast::LogicOp::And) => SyntaxKind::PIPE2,
ast::BinaryOp::LogicOp(ast::LogicOp::Or) => SyntaxKind::AMP2,
_ => return None,
};
// Find all the children with the same binary operator let demorganed = bin_expr.clone_subtree().clone_for_update();
while let Some(expr) = expr_stack.pop() {
let mut traverse_bin_expr_arm = |expr| { ted::replace(demorganed.op_token()?, ast::make::token(inv_token));
if let ast::Expr::BinExpr(bin_expr) = expr { let mut exprs = VecDeque::from(vec![
if let Some(expr_op) = bin_expr.op_kind() { (bin_expr.lhs()?, demorganed.lhs()?),
if expr_op == op { (bin_expr.rhs()?, demorganed.rhs()?),
expr_stack.push(bin_expr); ]);
} else {
terms.push(ast::Expr::BinExpr(bin_expr)); while let Some((expr, dm)) = exprs.pop_front() {
} if let BinExpr(bin_expr) = &expr {
if let BinExpr(cbin_expr) = &dm {
if op == bin_expr.op_kind()? {
ted::replace(cbin_expr.op_token()?, ast::make::token(inv_token));
exprs.push_back((bin_expr.lhs()?, cbin_expr.lhs()?));
exprs.push_back((bin_expr.rhs()?, cbin_expr.rhs()?));
} else { } else {
terms.push(ast::Expr::BinExpr(bin_expr)); let mut inv = invert_boolean_expression(expr);
if inv.needs_parens_in(dm.syntax().parent()?) {
inv = ast::make::expr_paren(inv).clone_for_update();
}
ted::replace(dm.syntax(), inv.syntax());
} }
} else { } else {
terms.push(expr); return None;
} }
}; } else {
let mut inv = invert_boolean_expression(dm.clone_subtree()).clone_for_update();
op_ranges.extend(expr.op_token().map(|t| t.text_range())); if inv.needs_parens_in(dm.syntax().parent()?) {
traverse_bin_expr_arm(expr.lhs()?); inv = ast::make::expr_paren(inv).clone_for_update();
traverse_bin_expr_arm(expr.rhs()?); }
ted::replace(dm.syntax(), inv.syntax());
}
} }
let dm_lhs = demorganed.lhs()?;
acc.add( acc.add(
AssistId("apply_demorgan", AssistKind::RefactorRewrite), AssistId("apply_demorgan", AssistKind::RefactorRewrite),
"Apply De Morgan's law", "Apply De Morgan's law",
op_range, op_range,
|edit| { |edit| {
terms.sort_by_key(|t| t.syntax().text_range().start()); let paren_expr = bin_expr.syntax().parent().and_then(ast::ParenExpr::cast);
let mut terms = VecDeque::from(terms);
let paren_expr = expr.syntax().parent().and_then(ast::ParenExpr::cast);
let neg_expr = paren_expr let neg_expr = paren_expr
.clone() .clone()
.and_then(|paren_expr| paren_expr.syntax().parent()) .and_then(|paren_expr| paren_expr.syntax().parent())
.and_then(ast::PrefixExpr::cast) .and_then(ast::PrefixExpr::cast)
.and_then(|prefix_expr| { .and_then(|prefix_expr| {
if prefix_expr.op_kind().unwrap() == ast::UnaryOp::Not { if prefix_expr.op_kind()? == ast::UnaryOp::Not {
Some(prefix_expr) Some(prefix_expr)
} else { } else {
None None
} }
}); });
for op_range in op_ranges {
edit.replace(op_range, opposite_op);
}
if let Some(paren_expr) = paren_expr { if let Some(paren_expr) = paren_expr {
for term in terms {
let range = term.syntax().text_range();
let not_term = invert_boolean_expression(term);
edit.replace(range, not_term.syntax().text());
}
if let Some(neg_expr) = neg_expr { if let Some(neg_expr) = neg_expr {
cov_mark::hit!(demorgan_double_negation); cov_mark::hit!(demorgan_double_negation);
edit.replace(neg_expr.op_token().unwrap().text_range(), ""); edit.replace_ast(ast::Expr::PrefixExpr(neg_expr), demorganed.into());
} else { } else {
cov_mark::hit!(demorgan_double_parens); cov_mark::hit!(demorgan_double_parens);
edit.replace(paren_expr.l_paren_token().unwrap().text_range(), "!("); ted::insert_all_raw(
Position::before(dm_lhs.syntax()),
vec![
syntax::NodeOrToken::Token(ast::make::token(SyntaxKind::BANG)),
syntax::NodeOrToken::Token(ast::make::token(SyntaxKind::L_PAREN)),
],
);
ted::append_child_raw(
demorganed.syntax(),
syntax::NodeOrToken::Token(ast::make::token(SyntaxKind::R_PAREN)),
);
edit.replace_ast(ast::Expr::ParenExpr(paren_expr), demorganed.into());
} }
} else { } else {
if let Some(lhs) = terms.pop_front() { ted::insert_all_raw(
let lhs_range = lhs.syntax().text_range(); Position::before(dm_lhs.syntax()),
let not_lhs = invert_boolean_expression(lhs); vec![
syntax::NodeOrToken::Token(ast::make::token(SyntaxKind::BANG)),
edit.replace(lhs_range, format!("!({not_lhs}")); syntax::NodeOrToken::Token(ast::make::token(SyntaxKind::L_PAREN)),
} ],
);
if let Some(rhs) = terms.pop_back() { ted::append_child_raw(demorganed.syntax(), ast::make::token(SyntaxKind::R_PAREN));
let rhs_range = rhs.syntax().text_range(); edit.replace_ast(bin_expr, demorganed);
let not_rhs = invert_boolean_expression(rhs);
edit.replace(rhs_range, format!("{not_rhs})"));
}
for term in terms {
let term_range = term.syntax().text_range();
let not_term = invert_boolean_expression(term);
edit.replace(term_range, not_term.to_string());
}
} }
}, },
) )
@ -145,9 +145,8 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
use super::*; use super::*;
use crate::tests::{check_assist, check_assist_not_applicable};
#[test] #[test]
fn demorgan_handles_leq() { fn demorgan_handles_leq() {
@ -213,7 +212,7 @@ fn f() { !(S <= S || S < S) }
#[test] #[test]
fn demorgan_doesnt_double_negation() { fn demorgan_doesnt_double_negation() {
cov_mark::check!(demorgan_double_negation); cov_mark::check!(demorgan_double_negation);
check_assist(apply_demorgan, "fn f() { !(x ||$0 x) }", "fn f() { (!x && !x) }") check_assist(apply_demorgan, "fn f() { !(x ||$0 x) }", "fn f() { !x && !x }")
} }
#[test] #[test]
@ -222,13 +221,38 @@ fn f() { !(S <= S || S < S) }
check_assist(apply_demorgan, "fn f() { (x ||$0 x) }", "fn f() { !(!x && !x) }") check_assist(apply_demorgan, "fn f() { (x ||$0 x) }", "fn f() { !(!x && !x) }")
} }
// https://github.com/rust-lang/rust-analyzer/issues/10963 // FIXME : This needs to go.
// // https://github.com/rust-lang/rust-analyzer/issues/10963
// #[test]
// fn demorgan_doesnt_hang() {
// check_assist(
// apply_demorgan,
// "fn f() { 1 || 3 &&$0 4 || 5 }",
// "fn f() { !(!1 || !3 || !4) || 5 }",
// )
// }
#[test] #[test]
fn demorgan_doesnt_hang() { fn demorgan_keep_pars_for_op_precedence() {
check_assist( check_assist(
apply_demorgan, apply_demorgan,
"fn f() { 1 || 3 &&$0 4 || 5 }", "fn main() {
"fn f() { !(!1 || !3 || !4) || 5 }", let _ = !(!a ||$0 !(b || c));
}
",
"fn main() {
let _ = a && (b || c);
}
",
);
}
#[test]
fn demorgan_removes_pars_in_eq_precedence() {
check_assist(
apply_demorgan,
"fn() { let x = a && !(!b |$0| !c); }",
"fn() { let x = a && b && c; }",
) )
} }
} }

View file

@ -161,9 +161,9 @@ fn process_struct_name_reference(
let path_segment = name_ref.syntax().parent().and_then(ast::PathSegment::cast)?; let path_segment = name_ref.syntax().parent().and_then(ast::PathSegment::cast)?;
// A `PathSegment` always belongs to a `Path`, so there's at least one `Path` at this point. // A `PathSegment` always belongs to a `Path`, so there's at least one `Path` at this point.
let full_path = let full_path =
path_segment.syntax().parent()?.ancestors().map_while(ast::Path::cast).last().unwrap(); path_segment.syntax().parent()?.ancestors().map_while(ast::Path::cast).last()?;
if full_path.segment().unwrap().name_ref()? != *name_ref { if full_path.segment()?.name_ref()? != *name_ref {
// `name_ref` isn't the last segment of the path, so `full_path` doesn't point to the // `name_ref` isn't the last segment of the path, so `full_path` doesn't point to the
// struct we want to edit. // struct we want to edit.
return None; return None;

View file

@ -58,7 +58,7 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext<'
return None; return None;
} }
let bound_ident = pat.fields().next().unwrap(); let bound_ident = pat.fields().next()?;
if !ast::IdentPat::can_cast(bound_ident.syntax().kind()) { if !ast::IdentPat::can_cast(bound_ident.syntax().kind()) {
return None; return None;
} }
@ -108,6 +108,15 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext<'
then_block.syntax().last_child_or_token().filter(|t| t.kind() == T!['}'])?; then_block.syntax().last_child_or_token().filter(|t| t.kind() == T!['}'])?;
let then_block_items = then_block.dedent(IndentLevel(1)).clone_for_update();
let end_of_then = then_block_items.syntax().last_child_or_token()?;
let end_of_then = if end_of_then.prev_sibling_or_token().map(|n| n.kind()) == Some(WHITESPACE) {
end_of_then.prev_sibling_or_token()?
} else {
end_of_then
};
let target = if_expr.syntax().text_range(); let target = if_expr.syntax().text_range();
acc.add( acc.add(
AssistId("convert_to_guarded_return", AssistKind::RefactorRewrite), AssistId("convert_to_guarded_return", AssistKind::RefactorRewrite),
@ -141,16 +150,6 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext<'
} }
}; };
let then_block_items = then_block.dedent(IndentLevel(1)).clone_for_update();
let end_of_then = then_block_items.syntax().last_child_or_token().unwrap();
let end_of_then =
if end_of_then.prev_sibling_or_token().map(|n| n.kind()) == Some(WHITESPACE) {
end_of_then.prev_sibling_or_token().unwrap()
} else {
end_of_then
};
let then_statements = replacement let then_statements = replacement
.children_with_tokens() .children_with_tokens()
.chain( .chain(

View file

@ -48,7 +48,7 @@ pub(crate) fn extract_expressions_from_format_string(
let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?; let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
let expanded_t = ast::String::cast( let expanded_t = ast::String::cast(
ctx.sema.descend_into_macros_with_kind_preference(fmt_string.syntax().clone()), ctx.sema.descend_into_macros_with_kind_preference(fmt_string.syntax().clone(), 0.into()),
)?; )?;
if !is_format_string(&expanded_t) { if !is_format_string(&expanded_t) {
return None; return None;

View file

@ -750,7 +750,7 @@ impl FunctionBody {
.descendants_with_tokens() .descendants_with_tokens()
.filter_map(SyntaxElement::into_token) .filter_map(SyntaxElement::into_token)
.filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self])) .filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self]))
.flat_map(|t| sema.descend_into_macros(t)) .flat_map(|t| sema.descend_into_macros(t, 0.into()))
.for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast))); .for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast)));
} }
} }
@ -810,7 +810,7 @@ impl FunctionBody {
(true, konst.body(), Some(sema.to_def(&konst)?.ty(sema.db))) (true, konst.body(), Some(sema.to_def(&konst)?.ty(sema.db)))
}, },
ast::ConstParam(cp) => { ast::ConstParam(cp) => {
(true, cp.default_val(), Some(sema.to_def(&cp)?.ty(sema.db))) (true, cp.default_val()?.expr(), Some(sema.to_def(&cp)?.ty(sema.db)))
}, },
ast::ConstBlockPat(cbp) => { ast::ConstBlockPat(cbp) => {
let expr = cbp.block_expr().map(ast::Expr::BlockExpr); let expr = cbp.block_expr().map(ast::Expr::BlockExpr);
@ -1385,31 +1385,30 @@ enum FlowHandler {
impl FlowHandler { impl FlowHandler {
fn from_ret_ty(fun: &Function, ret_ty: &FunType) -> FlowHandler { fn from_ret_ty(fun: &Function, ret_ty: &FunType) -> FlowHandler {
match &fun.control_flow.kind { if fun.contains_tail_expr {
None => FlowHandler::None, return FlowHandler::None;
Some(flow_kind) => { }
let action = flow_kind.clone(); let Some(action) = fun.control_flow.kind.clone() else {
if let FunType::Unit = ret_ty { return FlowHandler::None;
match flow_kind { };
FlowKind::Return(None)
| FlowKind::Break(_, None) if let FunType::Unit = ret_ty {
| FlowKind::Continue(_) => FlowHandler::If { action }, match action {
FlowKind::Return(_) | FlowKind::Break(_, _) => { FlowKind::Return(None) | FlowKind::Break(_, None) | FlowKind::Continue(_) => {
FlowHandler::IfOption { action } FlowHandler::If { action }
}
FlowKind::Try { kind } => FlowHandler::Try { kind: kind.clone() },
}
} else {
match flow_kind {
FlowKind::Return(None)
| FlowKind::Break(_, None)
| FlowKind::Continue(_) => FlowHandler::MatchOption { none: action },
FlowKind::Return(_) | FlowKind::Break(_, _) => {
FlowHandler::MatchResult { err: action }
}
FlowKind::Try { kind } => FlowHandler::Try { kind: kind.clone() },
}
} }
FlowKind::Return(_) | FlowKind::Break(_, _) => FlowHandler::IfOption { action },
FlowKind::Try { kind } => FlowHandler::Try { kind },
}
} else {
match action {
FlowKind::Return(None) | FlowKind::Break(_, None) | FlowKind::Continue(_) => {
FlowHandler::MatchOption { none: action }
}
FlowKind::Return(_) | FlowKind::Break(_, _) => {
FlowHandler::MatchResult { err: action }
}
FlowKind::Try { kind } => FlowHandler::Try { kind },
} }
} }
} }
@ -1654,11 +1653,7 @@ impl Function {
fn make_ret_ty(&self, ctx: &AssistContext<'_>, module: hir::Module) -> Option<ast::RetType> { fn make_ret_ty(&self, ctx: &AssistContext<'_>, module: hir::Module) -> Option<ast::RetType> {
let fun_ty = self.return_type(ctx); let fun_ty = self.return_type(ctx);
let handler = if self.contains_tail_expr { let handler = FlowHandler::from_ret_ty(self, &fun_ty);
FlowHandler::None
} else {
FlowHandler::from_ret_ty(self, &fun_ty)
};
let ret_ty = match &handler { let ret_ty = match &handler {
FlowHandler::None => { FlowHandler::None => {
if matches!(fun_ty, FunType::Unit) { if matches!(fun_ty, FunType::Unit) {
@ -1728,11 +1723,7 @@ fn make_body(
fun: &Function, fun: &Function,
) -> ast::BlockExpr { ) -> ast::BlockExpr {
let ret_ty = fun.return_type(ctx); let ret_ty = fun.return_type(ctx);
let handler = if fun.contains_tail_expr { let handler = FlowHandler::from_ret_ty(fun, &ret_ty);
FlowHandler::None
} else {
FlowHandler::from_ret_ty(fun, &ret_ty)
};
let block = match &fun.body { let block = match &fun.body {
FunctionBody::Expr(expr) => { FunctionBody::Expr(expr) => {
@ -4471,7 +4462,7 @@ async fn foo() -> Result<(), ()> {
"#, "#,
r#" r#"
async fn foo() -> Result<(), ()> { async fn foo() -> Result<(), ()> {
fun_name().await? fun_name().await
} }
async fn $0fun_name() -> Result<(), ()> { async fn $0fun_name() -> Result<(), ()> {
@ -4690,7 +4681,7 @@ fn $0fun_name() {
check_assist( check_assist(
extract_function, extract_function,
r#" r#"
//- minicore: result //- minicore: result, try
fn foo() -> Result<(), i64> { fn foo() -> Result<(), i64> {
$0Result::<i32, i64>::Ok(0)?; $0Result::<i32, i64>::Ok(0)?;
Ok(())$0 Ok(())$0
@ -4698,7 +4689,7 @@ fn foo() -> Result<(), i64> {
"#, "#,
r#" r#"
fn foo() -> Result<(), i64> { fn foo() -> Result<(), i64> {
fun_name()? fun_name()
} }
fn $0fun_name() -> Result<(), i64> { fn $0fun_name() -> Result<(), i64> {
@ -5753,6 +5744,34 @@ fn $0fun_name<T, V>(t: T, v: V) -> i32 where T: Into<i32> + Copy, V: Into<i32> {
); );
} }
#[test]
fn tail_expr_no_extra_control_flow() {
check_assist(
extract_function,
r#"
//- minicore: result
fn fallible() -> Result<(), ()> {
$0if true {
return Err(());
}
Ok(())$0
}
"#,
r#"
fn fallible() -> Result<(), ()> {
fun_name()
}
fn $0fun_name() -> Result<(), ()> {
if true {
return Err(());
}
Ok(())
}
"#,
);
}
#[test] #[test]
fn non_tail_expr_of_tail_expr_loop() { fn non_tail_expr_of_tail_expr_loop() {
check_assist( check_assist(
@ -5800,12 +5819,6 @@ fn $0fun_name() -> ControlFlow<()> {
extract_function, extract_function,
r#" r#"
//- minicore: option, try //- minicore: option, try
impl<T> core::ops::Try for Option<T> {
type Output = T;
type Residual = Option<!>;
}
impl<T> core::ops::FromResidual for Option<T> {}
fn f() -> Option<()> { fn f() -> Option<()> {
if true { if true {
let a = $0if true { let a = $0if true {
@ -5820,12 +5833,6 @@ fn f() -> Option<()> {
} }
"#, "#,
r#" r#"
impl<T> core::ops::Try for Option<T> {
type Output = T;
type Residual = Option<!>;
}
impl<T> core::ops::FromResidual for Option<T> {}
fn f() -> Option<()> { fn f() -> Option<()> {
if true { if true {
let a = fun_name()?;; let a = fun_name()?;;
@ -5852,12 +5859,6 @@ fn $0fun_name() -> Option<()> {
extract_function, extract_function,
r#" r#"
//- minicore: option, try //- minicore: option, try
impl<T> core::ops::Try for Option<T> {
type Output = T;
type Residual = Option<!>;
}
impl<T> core::ops::FromResidual for Option<T> {}
fn f() -> Option<()> { fn f() -> Option<()> {
if true { if true {
$0{ $0{
@ -5874,15 +5875,9 @@ fn f() -> Option<()> {
} }
"#, "#,
r#" r#"
impl<T> core::ops::Try for Option<T> {
type Output = T;
type Residual = Option<!>;
}
impl<T> core::ops::FromResidual for Option<T> {}
fn f() -> Option<()> { fn f() -> Option<()> {
if true { if true {
fun_name()? fun_name()
} else { } else {
None None
} }

View file

@ -95,6 +95,9 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
let Some(impl_def) = find_struct_impl(ctx, &adt, std::slice::from_ref(&name)) else { let Some(impl_def) = find_struct_impl(ctx, &adt, std::slice::from_ref(&name)) else {
continue; continue;
}; };
let field = make::ext::field_from_idents(["self", &field_name])?;
acc.add_group( acc.add_group(
&GroupLabel("Generate delegate methods…".to_owned()), &GroupLabel("Generate delegate methods…".to_owned()),
AssistId("generate_delegate_methods", AssistKind::Generate), AssistId("generate_delegate_methods", AssistKind::Generate),
@ -115,11 +118,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
Some(list) => convert_param_list_to_arg_list(list), Some(list) => convert_param_list_to_arg_list(list),
None => make::arg_list([]), None => make::arg_list([]),
}; };
let tail_expr = make::expr_method_call( let tail_expr = make::expr_method_call(field, make::name_ref(&name), arg_list);
make::ext::field_from_idents(["self", &field_name]).unwrap(), // This unwrap is ok because we have at least 1 arg in the list
make::name_ref(&name),
arg_list,
);
let ret_type = method_source.ret_type(); let ret_type = method_source.ret_type();
let is_async = method_source.async_token().is_some(); let is_async = method_source.async_token().is_some();
let is_const = method_source.const_token().is_some(); let is_const = method_source.const_token().is_some();

View file

@ -27,13 +27,19 @@ pub(crate) fn generate_derive(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
let cap = ctx.config.snippet_cap?; let cap = ctx.config.snippet_cap?;
let nominal = ctx.find_node_at_offset::<ast::Adt>()?; let nominal = ctx.find_node_at_offset::<ast::Adt>()?;
let target = nominal.syntax().text_range(); let target = nominal.syntax().text_range();
let derive_attr = nominal
.attrs()
.filter_map(|x| x.as_simple_call())
.filter(|(name, _arg)| name == "derive")
.map(|(_name, arg)| arg)
.next();
let delimiter = match &derive_attr {
None => None,
Some(tt) => Some(tt.right_delimiter_token()?),
};
acc.add(AssistId("generate_derive", AssistKind::Generate), "Add `#[derive]`", target, |edit| { acc.add(AssistId("generate_derive", AssistKind::Generate), "Add `#[derive]`", target, |edit| {
let derive_attr = nominal
.attrs()
.filter_map(|x| x.as_simple_call())
.filter(|(name, _arg)| name == "derive")
.map(|(_name, arg)| arg)
.next();
match derive_attr { match derive_attr {
None => { None => {
let derive = make::attr_outer(make::meta_token_tree( let derive = make::attr_outer(make::meta_token_tree(
@ -45,16 +51,23 @@ pub(crate) fn generate_derive(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
let nominal = edit.make_mut(nominal); let nominal = edit.make_mut(nominal);
nominal.add_attr(derive.clone()); nominal.add_attr(derive.clone());
let delimiter = derive
.meta()
.expect("make::attr_outer was expected to have Meta")
.token_tree()
.expect("failed to get token tree out of Meta")
.r_paren_token()
.expect("make::attr_outer was expected to have a R_PAREN");
edit.add_tabstop_before_token(cap, delimiter);
}
Some(_) => {
// Just move the cursor.
edit.add_tabstop_before_token( edit.add_tabstop_before_token(
cap, cap,
derive.meta().unwrap().token_tree().unwrap().r_paren_token().unwrap(), delimiter.expect("Right delim token could not be found."),
); );
} }
Some(tt) => {
// Just move the cursor.
let tt = edit.make_mut(tt);
edit.add_tabstop_before_token(cap, tt.right_delimiter_token().unwrap());
}
}; };
}) })
} }

View file

@ -39,14 +39,11 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
let replacements = let replacements =
macro_calls.into_iter().filter_map(compute_dbg_replacement).collect::<Vec<_>>(); macro_calls.into_iter().filter_map(compute_dbg_replacement).collect::<Vec<_>>();
if replacements.is_empty() {
return None;
}
acc.add( acc.add(
AssistId("remove_dbg", AssistKind::Refactor), AssistId("remove_dbg", AssistKind::Refactor),
"Remove dbg!()", "Remove dbg!()",
replacements.iter().map(|&(range, _)| range).reduce(|acc, range| acc.cover(range)).unwrap(), replacements.iter().map(|&(range, _)| range).reduce(|acc, range| acc.cover(range))?,
|builder| { |builder| {
for (range, expr) in replacements { for (range, expr) in replacements {
if let Some(expr) = expr { if let Some(expr) = expr {

View file

@ -67,7 +67,7 @@ pub(crate) fn remove_unused_imports(acc: &mut Assists, ctx: &AssistContext<'_>)
// This case maps to the situation where the * token is braced. // This case maps to the situation where the * token is braced.
// In this case, the parent use tree's path is the one we should use to resolve the glob. // In this case, the parent use tree's path is the one we should use to resolve the glob.
match u.syntax().ancestors().skip(1).find_map(ast::UseTree::cast) { match u.syntax().ancestors().skip(1).find_map(ast::UseTree::cast) {
Some(parent_u) if parent_u.path().is_some() => parent_u.path().unwrap(), Some(parent_u) if parent_u.path().is_some() => parent_u.path()?,
_ => return None, _ => return None,
} }
} else { } else {

View file

@ -20,6 +20,7 @@ pub(crate) mod r#type;
pub(crate) mod use_; pub(crate) mod use_;
pub(crate) mod vis; pub(crate) mod vis;
pub(crate) mod env_vars; pub(crate) mod env_vars;
pub(crate) mod extern_crate;
use std::iter; use std::iter;
@ -703,7 +704,9 @@ pub(super) fn complete_name_ref(
TypeLocation::TypeAscription(ascription) => { TypeLocation::TypeAscription(ascription) => {
r#type::complete_ascribed_type(acc, ctx, path_ctx, ascription); r#type::complete_ascribed_type(acc, ctx, path_ctx, ascription);
} }
TypeLocation::GenericArgList(_) TypeLocation::GenericArg { .. }
| TypeLocation::AssocConstEq
| TypeLocation::AssocTypeEq
| TypeLocation::TypeBound | TypeLocation::TypeBound
| TypeLocation::ImplTarget | TypeLocation::ImplTarget
| TypeLocation::ImplTrait | TypeLocation::ImplTrait
@ -737,6 +740,7 @@ pub(super) fn complete_name_ref(
} }
} }
} }
NameRefKind::ExternCrate => extern_crate::complete_extern_crate(acc, ctx),
NameRefKind::DotAccess(dot_access) => { NameRefKind::DotAccess(dot_access) => {
flyimport::import_on_the_fly_dot(acc, ctx, dot_access); flyimport::import_on_the_fly_dot(acc, ctx, dot_access);
dot::complete_dot(acc, ctx, dot_access); dot::complete_dot(acc, ctx, dot_access);

View file

@ -0,0 +1,71 @@
//! Completion for extern crates
use hir::{HasAttrs, Name};
use ide_db::SymbolKind;
use crate::{context::CompletionContext, CompletionItem, CompletionItemKind};
use super::Completions;
pub(crate) fn complete_extern_crate(acc: &mut Completions, ctx: &CompletionContext<'_>) {
let imported_extern_crates: Vec<Name> = ctx.scope.extern_crate_decls().collect();
for (name, module) in ctx.scope.extern_crates() {
if imported_extern_crates.contains(&name) {
continue;
}
let mut item = CompletionItem::new(
CompletionItemKind::SymbolKind(SymbolKind::Module),
ctx.source_range(),
name.to_smol_str(),
);
item.set_documentation(module.docs(ctx.db));
item.add_to(acc, ctx.db);
}
}
#[cfg(test)]
mod test {
use crate::tests::completion_list_no_kw;
#[test]
fn can_complete_extern_crate() {
let case = r#"
//- /lib.rs crate:other_crate_a
// nothing here
//- /other_crate_b.rs crate:other_crate_b
pub mod good_mod{}
//- /lib.rs crate:crate_c
// nothing here
//- /lib.rs crate:lib deps:other_crate_a,other_crate_b,crate_c extern-prelude:other_crate_a
extern crate oth$0
mod other_mod {}
"#;
let completion_list = completion_list_no_kw(case);
assert_eq!("md other_crate_a\n".to_string(), completion_list);
}
#[test]
fn will_not_complete_existing_import() {
let case = r#"
//- /lib.rs crate:other_crate_a
// nothing here
//- /lib.rs crate:crate_c
// nothing here
//- /lib.rs crate:other_crate_b
//
//- /lib.rs crate:lib deps:other_crate_a,other_crate_b,crate_c extern-prelude:other_crate_a,other_crate_b
extern crate other_crate_b;
extern crate oth$0
mod other_mod {}
"#;
let completion_list = completion_list_no_kw(case);
assert_eq!("md other_crate_a\n".to_string(), completion_list);
}
}

View file

@ -1,7 +1,7 @@
//! Completion of names from the current scope in type position. //! Completion of names from the current scope in type position.
use hir::{HirDisplay, ScopeDef}; use hir::{HirDisplay, ScopeDef};
use syntax::{ast, AstNode, SyntaxKind}; use syntax::{ast, AstNode};
use crate::{ use crate::{
context::{PathCompletionCtx, Qualified, TypeAscriptionTarget, TypeLocation}, context::{PathCompletionCtx, Qualified, TypeAscriptionTarget, TypeLocation},
@ -20,16 +20,15 @@ pub(crate) fn complete_type_path(
let scope_def_applicable = |def| { let scope_def_applicable = |def| {
use hir::{GenericParam::*, ModuleDef::*}; use hir::{GenericParam::*, ModuleDef::*};
match def { match def {
ScopeDef::GenericParam(LifetimeParam(_)) | ScopeDef::Label(_) => false, ScopeDef::GenericParam(LifetimeParam(_)) => location.complete_lifetimes(),
ScopeDef::Label(_) => false,
// no values in type places // no values in type places
ScopeDef::ModuleDef(Function(_) | Variant(_) | Static(_)) | ScopeDef::Local(_) => false, ScopeDef::ModuleDef(Function(_) | Variant(_) | Static(_)) | ScopeDef::Local(_) => false,
// unless its a constant in a generic arg list position // unless its a constant in a generic arg list position
ScopeDef::ModuleDef(Const(_)) | ScopeDef::GenericParam(ConstParam(_)) => { ScopeDef::ModuleDef(Const(_)) | ScopeDef::GenericParam(ConstParam(_)) => {
matches!(location, TypeLocation::GenericArgList(_)) location.complete_consts()
}
ScopeDef::ImplSelfType(_) => {
!matches!(location, TypeLocation::ImplTarget | TypeLocation::ImplTrait)
} }
ScopeDef::ImplSelfType(_) => location.complete_self_type(),
// Don't suggest attribute macros and derives. // Don't suggest attribute macros and derives.
ScopeDef::ModuleDef(Macro(mac)) => mac.is_fn_like(ctx.db), ScopeDef::ModuleDef(Macro(mac)) => mac.is_fn_like(ctx.db),
// Type things are fine // Type things are fine
@ -38,12 +37,12 @@ pub(crate) fn complete_type_path(
) )
| ScopeDef::AdtSelfType(_) | ScopeDef::AdtSelfType(_)
| ScopeDef::Unknown | ScopeDef::Unknown
| ScopeDef::GenericParam(TypeParam(_)) => true, | ScopeDef::GenericParam(TypeParam(_)) => location.complete_types(),
} }
}; };
let add_assoc_item = |acc: &mut Completions, item| match item { let add_assoc_item = |acc: &mut Completions, item| match item {
hir::AssocItem::Const(ct) if matches!(location, TypeLocation::GenericArgList(_)) => { hir::AssocItem::Const(ct) if matches!(location, TypeLocation::GenericArg { .. }) => {
acc.add_const(ctx, ct) acc.add_const(ctx, ct)
} }
hir::AssocItem::Function(_) | hir::AssocItem::Const(_) => (), hir::AssocItem::Function(_) | hir::AssocItem::Const(_) => (),
@ -157,56 +156,30 @@ pub(crate) fn complete_type_path(
}); });
return; return;
} }
TypeLocation::GenericArgList(Some(arg_list)) => { TypeLocation::GenericArg {
let in_assoc_type_arg = ctx args: Some(arg_list), of_trait: Some(trait_), ..
.original_token } => {
.parent_ancestors() if arg_list.syntax().ancestors().find_map(ast::TypeBound::cast).is_some() {
.any(|node| node.kind() == SyntaxKind::ASSOC_TYPE_ARG); let arg_idx = arg_list
.generic_args()
.filter(|arg| {
arg.syntax().text_range().end()
< ctx.original_token.text_range().start()
})
.count();
if !in_assoc_type_arg { let n_required_params = trait_.type_or_const_param_count(ctx.sema.db, true);
if let Some(path_seg) = if arg_idx >= n_required_params {
arg_list.syntax().parent().and_then(ast::PathSegment::cast) trait_.items_with_supertraits(ctx.sema.db).into_iter().for_each(|it| {
{ if let hir::AssocItem::TypeAlias(alias) = it {
if path_seg cov_mark::hit!(complete_assoc_type_in_generics_list);
.syntax() acc.add_type_alias_with_eq(ctx, alias);
.ancestors()
.find_map(ast::TypeBound::cast)
.is_some()
{
if let Some(hir::PathResolution::Def(hir::ModuleDef::Trait(
trait_,
))) = ctx.sema.resolve_path(&path_seg.parent_path())
{
let arg_idx = arg_list
.generic_args()
.filter(|arg| {
arg.syntax().text_range().end()
< ctx.original_token.text_range().start()
})
.count();
let n_required_params =
trait_.type_or_const_param_count(ctx.sema.db, true);
if arg_idx >= n_required_params {
trait_
.items_with_supertraits(ctx.sema.db)
.into_iter()
.for_each(|it| {
if let hir::AssocItem::TypeAlias(alias) = it {
cov_mark::hit!(
complete_assoc_type_in_generics_list
);
acc.add_type_alias_with_eq(ctx, alias);
}
});
let n_params =
trait_.type_or_const_param_count(ctx.sema.db, false);
if arg_idx >= n_params {
return; // only show assoc types
}
}
} }
});
let n_params = trait_.type_or_const_param_count(ctx.sema.db, false);
if arg_idx >= n_params {
return; // only show assoc types
} }
} }
} }

View file

@ -155,13 +155,63 @@ pub(crate) struct ExprCtx {
pub(crate) enum TypeLocation { pub(crate) enum TypeLocation {
TupleField, TupleField,
TypeAscription(TypeAscriptionTarget), TypeAscription(TypeAscriptionTarget),
GenericArgList(Option<ast::GenericArgList>), /// Generic argument position e.g. `Foo<$0>`
GenericArg {
/// The generic argument list containing the generic arg
args: Option<ast::GenericArgList>,
/// `Some(trait_)` if `trait_` is being instantiated with `args`
of_trait: Option<hir::Trait>,
/// The generic parameter being filled in by the generic arg
corresponding_param: Option<ast::GenericParam>,
},
/// Associated type equality constraint e.g. `Foo<Bar = $0>`
AssocTypeEq,
/// Associated constant equality constraint e.g. `Foo<X = $0>`
AssocConstEq,
TypeBound, TypeBound,
ImplTarget, ImplTarget,
ImplTrait, ImplTrait,
Other, Other,
} }
impl TypeLocation {
pub(crate) fn complete_lifetimes(&self) -> bool {
matches!(
self,
TypeLocation::GenericArg {
corresponding_param: Some(ast::GenericParam::LifetimeParam(_)),
..
}
)
}
pub(crate) fn complete_consts(&self) -> bool {
match self {
TypeLocation::GenericArg {
corresponding_param: Some(ast::GenericParam::ConstParam(_)),
..
} => true,
TypeLocation::AssocConstEq => true,
_ => false,
}
}
pub(crate) fn complete_types(&self) -> bool {
match self {
TypeLocation::GenericArg { corresponding_param: Some(param), .. } => {
matches!(param, ast::GenericParam::TypeParam(_))
}
TypeLocation::AssocConstEq => false,
TypeLocation::AssocTypeEq => true,
_ => true,
}
}
pub(crate) fn complete_self_type(&self) -> bool {
self.complete_types() && !matches!(self, TypeLocation::ImplTarget | TypeLocation::ImplTrait)
}
}
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub(crate) enum TypeAscriptionTarget { pub(crate) enum TypeAscriptionTarget {
Let(Option<ast::Pat>), Let(Option<ast::Pat>),
@ -301,6 +351,7 @@ pub(super) enum NameRefKind {
expr: ast::RecordExpr, expr: ast::RecordExpr,
}, },
Pattern(PatternContext), Pattern(PatternContext),
ExternCrate,
} }
/// The identifier we are currently completing. /// The identifier we are currently completing.

View file

@ -1,11 +1,11 @@
//! Module responsible for analyzing the code surrounding the cursor for completion. //! Module responsible for analyzing the code surrounding the cursor for completion.
use std::iter; use std::iter;
use hir::{Semantics, Type, TypeInfo, Variant}; use hir::{HasSource, Semantics, Type, TypeInfo, Variant};
use ide_db::{active_parameter::ActiveParameter, RootDatabase}; use ide_db::{active_parameter::ActiveParameter, RootDatabase};
use syntax::{ use syntax::{
algo::{find_node_at_offset, non_trivia_sibling}, algo::{find_node_at_offset, non_trivia_sibling},
ast::{self, AttrKind, HasArgList, HasLoopBody, HasName, NameOrNameRef}, ast::{self, AttrKind, HasArgList, HasGenericParams, HasLoopBody, HasName, NameOrNameRef},
match_ast, AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, match_ast, AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode,
SyntaxToken, TextRange, TextSize, T, SyntaxToken, TextRange, TextSize, T,
}; };
@ -624,6 +624,10 @@ fn classify_name_ref(
}); });
return Some(make_res(kind)); return Some(make_res(kind));
}, },
ast::ExternCrate(_) => {
let kind = NameRefKind::ExternCrate;
return Some(make_res(kind));
},
ast::MethodCallExpr(method) => { ast::MethodCallExpr(method) => {
let receiver = find_opt_node_in_file(original_file, method.receiver()); let receiver = find_opt_node_in_file(original_file, method.receiver());
let kind = NameRefKind::DotAccess(DotAccess { let kind = NameRefKind::DotAccess(DotAccess {
@ -719,6 +723,136 @@ fn classify_name_ref(
None None
}; };
let generic_arg_location = |arg: ast::GenericArg| {
let mut override_location = None;
let location = find_opt_node_in_file_compensated(
sema,
original_file,
arg.syntax().parent().and_then(ast::GenericArgList::cast),
)
.map(|args| {
let mut in_trait = None;
let param = (|| {
let parent = args.syntax().parent()?;
let params = match_ast! {
match parent {
ast::PathSegment(segment) => {
match sema.resolve_path(&segment.parent_path().top_path())? {
hir::PathResolution::Def(def) => match def {
hir::ModuleDef::Function(func) => {
func.source(sema.db)?.value.generic_param_list()
}
hir::ModuleDef::Adt(adt) => {
adt.source(sema.db)?.value.generic_param_list()
}
hir::ModuleDef::Variant(variant) => {
variant.parent_enum(sema.db).source(sema.db)?.value.generic_param_list()
}
hir::ModuleDef::Trait(trait_) => {
if let ast::GenericArg::AssocTypeArg(arg) = &arg {
let arg_name = arg.name_ref()?;
let arg_name = arg_name.text();
for item in trait_.items_with_supertraits(sema.db) {
match item {
hir::AssocItem::TypeAlias(assoc_ty) => {
if assoc_ty.name(sema.db).as_str()? == arg_name {
override_location = Some(TypeLocation::AssocTypeEq);
return None;
}
},
hir::AssocItem::Const(const_) => {
if const_.name(sema.db)?.as_str()? == arg_name {
override_location = Some(TypeLocation::AssocConstEq);
return None;
}
},
_ => (),
}
}
return None;
} else {
in_trait = Some(trait_);
trait_.source(sema.db)?.value.generic_param_list()
}
}
hir::ModuleDef::TraitAlias(trait_) => {
trait_.source(sema.db)?.value.generic_param_list()
}
hir::ModuleDef::TypeAlias(ty_) => {
ty_.source(sema.db)?.value.generic_param_list()
}
_ => None,
},
_ => None,
}
},
ast::MethodCallExpr(call) => {
let func = sema.resolve_method_call(&call)?;
func.source(sema.db)?.value.generic_param_list()
},
ast::AssocTypeArg(arg) => {
let trait_ = ast::PathSegment::cast(arg.syntax().parent()?.parent()?)?;
match sema.resolve_path(&trait_.parent_path().top_path())? {
hir::PathResolution::Def(def) => match def {
hir::ModuleDef::Trait(trait_) => {
let arg_name = arg.name_ref()?;
let arg_name = arg_name.text();
let trait_items = trait_.items_with_supertraits(sema.db);
let assoc_ty = trait_items.iter().find_map(|item| match item {
hir::AssocItem::TypeAlias(assoc_ty) => {
(assoc_ty.name(sema.db).as_str()? == arg_name)
.then_some(assoc_ty)
},
_ => None,
})?;
assoc_ty.source(sema.db)?.value.generic_param_list()
}
_ => None,
},
_ => None,
}
},
_ => None,
}
}?;
// Determine the index of the argument in the `GenericArgList` and match it with
// the corresponding parameter in the `GenericParamList`. Since lifetime parameters
// are often omitted, ignore them for the purposes of matching the argument with
// its parameter unless a lifetime argument is provided explicitly. That is, for
// `struct S<'a, 'b, T>`, match `S::<$0>` to `T` and `S::<'a, $0, _>` to `'b`.
// FIXME: This operates on the syntax tree and will produce incorrect results when
// generic parameters are disabled by `#[cfg]` directives. It should operate on the
// HIR, but the functionality necessary to do so is not exposed at the moment.
let mut explicit_lifetime_arg = false;
let arg_idx = arg
.syntax()
.siblings(Direction::Prev)
// Skip the node itself
.skip(1)
.map(|arg| if ast::LifetimeArg::can_cast(arg.kind()) { explicit_lifetime_arg = true })
.count();
let param_idx = if explicit_lifetime_arg {
arg_idx
} else {
// Lifetimes parameters always precede type and generic parameters,
// so offset the argument index by the total number of lifetime params
arg_idx + params.lifetime_params().count()
};
params.generic_params().nth(param_idx)
})();
(args, in_trait, param)
});
let (arg_list, of_trait, corresponding_param) = match location {
Some((arg_list, of_trait, param)) => (Some(arg_list), of_trait, param),
_ => (None, None, None),
};
override_location.unwrap_or(TypeLocation::GenericArg {
args: arg_list,
of_trait,
corresponding_param,
})
};
let type_location = |node: &SyntaxNode| { let type_location = |node: &SyntaxNode| {
let parent = node.parent()?; let parent = node.parent()?;
let res = match_ast! { let res = match_ast! {
@ -774,9 +908,12 @@ fn classify_name_ref(
ast::TypeBound(_) => TypeLocation::TypeBound, ast::TypeBound(_) => TypeLocation::TypeBound,
// is this case needed? // is this case needed?
ast::TypeBoundList(_) => TypeLocation::TypeBound, ast::TypeBoundList(_) => TypeLocation::TypeBound,
ast::GenericArg(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, it.syntax().parent().and_then(ast::GenericArgList::cast))), ast::GenericArg(it) => generic_arg_location(it),
// is this case needed? // is this case needed?
ast::GenericArgList(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, Some(it))), ast::GenericArgList(it) => {
let args = find_opt_node_in_file_compensated(sema, original_file, Some(it));
TypeLocation::GenericArg { args, of_trait: None, corresponding_param: None }
},
ast::TupleField(_) => TypeLocation::TupleField, ast::TupleField(_) => TypeLocation::TupleField,
_ => return None, _ => return None,
} }

View file

@ -1286,3 +1286,57 @@ macro_rules! println {
expect![""], expect![""],
) )
} }
#[test]
fn no_completions_for_external_doc_hidden_in_path() {
check(
r#"
//- /main.rs crate:main deps:dep
fn main() {
Span$0
}
//- /lib.rs crate:dep
#[doc(hidden)]
pub mod bridge {
pub mod server {
pub trait Span
}
}
pub mod bridge2 {
#[doc(hidden)]
pub mod server2 {
pub trait Span
}
}
"#,
expect![""],
);
// unless re-exported
check(
r#"
//- /main.rs crate:main deps:dep
fn main() {
Span$0
}
//- /lib.rs crate:dep
#[doc(hidden)]
pub mod bridge {
pub mod server {
pub trait Span
}
}
pub use bridge::server::Span;
pub mod bridge2 {
#[doc(hidden)]
pub mod server2 {
pub trait Span2
}
}
pub use bridge2::server2::Span2;
"#,
expect![[r#"
tt Span (use dep::Span)
tt Span2 (use dep::Span2)
"#]],
);
}

View file

@ -384,10 +384,8 @@ trait Trait2<T>: Trait1 {
fn foo<'lt, T: Trait2<$0>, const CONST_PARAM: usize>(_: T) {} fn foo<'lt, T: Trait2<$0>, const CONST_PARAM: usize>(_: T) {}
"#, "#,
expect![[r#" expect![[r#"
ct CONST
cp CONST_PARAM
en Enum en Enum
ma makro!() macro_rules! makro ma makro!() macro_rules! makro
md module md module
st Record st Record
st Tuple st Tuple
@ -404,14 +402,13 @@ fn foo<'lt, T: Trait2<$0>, const CONST_PARAM: usize>(_: T) {}
); );
check( check(
r#" r#"
trait Trait2 { trait Trait2<T> {
type Foo; type Foo;
} }
fn foo<'lt, T: Trait2<self::$0>, const CONST_PARAM: usize>(_: T) {} fn foo<'lt, T: Trait2<self::$0>, const CONST_PARAM: usize>(_: T) {}
"#, "#,
expect![[r#" expect![[r#"
ct CONST
en Enum en Enum
ma makro!() macro_rules! makro ma makro!() macro_rules! makro
md module md module
@ -437,7 +434,6 @@ trait Tr<T> {
impl Tr<$0 impl Tr<$0
"#, "#,
expect![[r#" expect![[r#"
ct CONST
en Enum en Enum
ma makro!() macro_rules! makro ma makro!() macro_rules! makro
md module md module
@ -485,7 +481,6 @@ trait MyTrait<T, U> {
fn f(t: impl MyTrait<u$0 fn f(t: impl MyTrait<u$0
"#, "#,
expect![[r#" expect![[r#"
ct CONST
en Enum en Enum
ma makro!() macro_rules! makro ma makro!() macro_rules! makro
md module md module
@ -511,7 +506,6 @@ trait MyTrait<T, U> {
fn f(t: impl MyTrait<u8, u$0 fn f(t: impl MyTrait<u8, u$0
"#, "#,
expect![[r#" expect![[r#"
ct CONST
en Enum en Enum
ma makro!() macro_rules! makro ma makro!() macro_rules! makro
md module md module
@ -555,7 +549,6 @@ trait MyTrait<T, U = u8> {
fn f(t: impl MyTrait<u$0 fn f(t: impl MyTrait<u$0
"#, "#,
expect![[r#" expect![[r#"
ct CONST
en Enum en Enum
ma makro!() macro_rules! makro ma makro!() macro_rules! makro
md module md module
@ -581,7 +574,6 @@ trait MyTrait<T, U = u8> {
fn f(t: impl MyTrait<u8, u$0 fn f(t: impl MyTrait<u8, u$0
"#, "#,
expect![[r#" expect![[r#"
ct CONST
en Enum en Enum
ma makro!() macro_rules! makro ma makro!() macro_rules! makro
md module md module
@ -627,7 +619,6 @@ trait MyTrait {
fn f(t: impl MyTrait<Item1 = $0 fn f(t: impl MyTrait<Item1 = $0
"#, "#,
expect![[r#" expect![[r#"
ct CONST
en Enum en Enum
ma makro!() macro_rules! makro ma makro!() macro_rules! makro
md module md module
@ -653,7 +644,6 @@ trait MyTrait {
fn f(t: impl MyTrait<Item1 = u8, Item2 = $0 fn f(t: impl MyTrait<Item1 = u8, Item2 = $0
"#, "#,
expect![[r#" expect![[r#"
ct CONST
en Enum en Enum
ma makro!() macro_rules! makro ma makro!() macro_rules! makro
md module md module
@ -668,6 +658,22 @@ fn f(t: impl MyTrait<Item1 = u8, Item2 = $0
kw self:: kw self::
"#]], "#]],
); );
check(
r#"
trait MyTrait {
const C: usize;
};
fn f(t: impl MyTrait<C = $0
"#,
expect![[r#"
ct CONST
ma makro!() macro_rules! makro
kw crate::
kw self::
"#]],
);
} }
#[test] #[test]
@ -719,3 +725,267 @@ pub struct S;
"#]], "#]],
) )
} }
#[test]
fn completes_const_and_type_generics_separately() {
// Function generic params
check(
r#"
struct Foo;
const X: usize = 0;
fn foo<T, const N: usize>() {}
fn main() {
foo::<F$0, _>();
}
"#,
expect![[r#"
en Enum
ma makro!() macro_rules! makro
md module
st Foo
st Record
st Tuple
st Unit
tt Trait
un Union
bt u32
kw crate::
kw self::
"#]],
);
// FIXME: This should probably also suggest completions for types, at least those that have
// associated constants usable in this position. For example, a user could be typing
// `foo::<_, { usize::MAX }>()`, but we currently don't suggest `usize` in constant position.
check(
r#"
struct Foo;
const X: usize = 0;
fn foo<T, const N: usize>() {}
fn main() {
foo::<_, $0>();
}
"#,
expect![[r#"
ct CONST
ct X
ma makro!() macro_rules! makro
kw crate::
kw self::
"#]],
);
// Method generic params
check(
r#"
const X: usize = 0;
struct Foo;
impl Foo { fn bar<const N: usize, T>(self) {} }
fn main() {
Foo.bar::<_, $0>();
}
"#,
expect![[r#"
en Enum
ma makro!() macro_rules! makro
md module
st Foo
st Record
st Tuple
st Unit
tt Trait
un Union
bt u32
kw crate::
kw self::
"#]],
);
check(
r#"
const X: usize = 0;
struct Foo;
impl Foo { fn bar<const N: usize, T>(self) {} }
fn main() {
Foo.bar::<X$0, _>();
}
"#,
expect![[r#"
ct CONST
ct X
ma makro!() macro_rules! makro
kw crate::
kw self::
"#]],
);
// Associated type generic params
check(
r#"
const X: usize = 0;
struct Foo;
trait Bar {
type Baz<T, const X: usize>;
}
fn foo(_: impl Bar<Baz<F$0, 0> = ()>) {}
"#,
expect![[r#"
en Enum
ma makro!() macro_rules! makro
md module
st Foo
st Record
st Tuple
st Unit
tt Bar
tt Trait
un Union
bt u32
kw crate::
kw self::
"#]],
);
check(
r#"
const X: usize = 0;
struct Foo;
trait Bar {
type Baz<T, const X: usize>;
}
fn foo<T: Bar<Baz<(), $0> = ()>>() {}
"#,
expect![[r#"
ct CONST
ct X
ma makro!() macro_rules! makro
kw crate::
kw self::
"#]],
);
// Type generic params
check(
r#"
const X: usize = 0;
struct Foo<T, const N: usize>(T);
fn main() {
let _: Foo::<_, $0> = Foo(());
}
"#,
expect![[r#"
ct CONST
ct X
ma makro!() macro_rules! makro
kw crate::
kw self::
"#]],
);
// Type alias generic params
check(
r#"
const X: usize = 0;
struct Foo<T, const N: usize>(T);
type Bar<const X: usize, U> = Foo<U, X>;
fn main() {
let _: Bar::<X$0, _> = Bar(());
}
"#,
expect![[r#"
ct CONST
ct X
ma makro!() macro_rules! makro
kw crate::
kw self::
"#]],
);
// Enum variant params
check(
r#"
const X: usize = 0;
enum Foo<T, const N: usize> { A(T), B }
fn main() {
Foo::B::<(), $0>;
}
"#,
expect![[r#"
ct CONST
ct X
ma makro!() macro_rules! makro
kw crate::
kw self::
"#]],
);
// Trait params
check(
r#"
const X: usize = 0;
trait Foo<T, const N: usize> {}
impl Foo<(), $0> for () {}
"#,
expect![[r#"
ct CONST
ct X
ma makro!() macro_rules! makro
kw crate::
kw self::
"#]],
);
// Trait alias params
check(
r#"
#![feature(trait_alias)]
const X: usize = 0;
trait Foo<T, const N: usize> {}
trait Bar<const M: usize, U> = Foo<U, M>;
fn foo<T: Bar<X$0, ()>>() {}
"#,
expect![[r#"
ct CONST
ct X
ma makro!() macro_rules! makro
kw crate::
kw self::
"#]],
);
// Omitted lifetime params
check(
r#"
struct S<'a, 'b, const C: usize, T>(core::marker::PhantomData<&'a &'b T>);
fn foo<'a>() { S::<F$0, _>; }
"#,
expect![[r#"
ct CONST
ma makro!() macro_rules! makro
kw crate::
kw self::
"#]],
);
// Explicit lifetime params
check(
r#"
struct S<'a, 'b, const C: usize, T>(core::marker::PhantomData<&'a &'b T>);
fn foo<'a>() { S::<'static, 'static, F$0, _>; }
"#,
expect![[r#"
ct CONST
ma makro!() macro_rules! makro
kw crate::
kw self::
"#]],
);
check(
r#"
struct S<'a, 'b, const C: usize, T>(core::marker::PhantomData<&'a &'b T>);
fn foo<'a>() { S::<'static, F$0, _, _>; }
"#,
expect![[r#"
lt 'a
ma makro!() macro_rules! makro
kw crate::
kw self::
"#]],
);
}

View file

@ -7,7 +7,7 @@
use arrayvec::ArrayVec; use arrayvec::ArrayVec;
use hir::{ use hir::{
Adt, AsAssocItem, AssocItem, BuiltinAttr, BuiltinType, Const, Crate, DeriveHelper, Adt, AsAssocItem, AssocItem, BuiltinAttr, BuiltinType, Const, Crate, DeriveHelper, DocLinkDef,
ExternCrateDecl, Field, Function, GenericParam, HasVisibility, Impl, Label, Local, Macro, ExternCrateDecl, Field, Function, GenericParam, HasVisibility, Impl, Label, Local, Macro,
Module, ModuleDef, Name, PathResolution, Semantics, Static, ToolModule, Trait, TraitAlias, Module, ModuleDef, Name, PathResolution, Semantics, Static, ToolModule, Trait, TraitAlias,
TypeAlias, Variant, Visibility, TypeAlias, Variant, Visibility,
@ -649,3 +649,13 @@ impl From<ModuleDef> for Definition {
} }
} }
} }
impl From<DocLinkDef> for Definition {
fn from(def: DocLinkDef) -> Self {
match def {
DocLinkDef::ModuleDef(it) => it.into(),
DocLinkDef::Field(it) => it.into(),
DocLinkDef::SelfType(it) => it.into(),
}
}
}

View file

@ -117,7 +117,7 @@ pub fn get_definition(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken, token: SyntaxToken,
) -> Option<Definition> { ) -> Option<Definition> {
for token in sema.descend_into_macros(token) { for token in sema.descend_into_macros(token, 0.into()) {
let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops); let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
if let Some(&[x]) = def.as_deref() { if let Some(&[x]) = def.as_deref() {
return Some(x); return Some(x);

View file

@ -6,7 +6,7 @@ use hir::{
use itertools::Itertools; use itertools::Itertools;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use syntax::{ use syntax::{
ast::{self, HasName}, ast::{self, make, HasName},
utils::path_to_string_stripping_turbo_fish, utils::path_to_string_stripping_turbo_fish,
AstNode, SyntaxNode, AstNode, SyntaxNode,
}; };
@ -607,7 +607,7 @@ impl ImportCandidate {
fn for_name(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option<Self> { fn for_name(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option<Self> {
if sema if sema
.scope(name.syntax())? .scope(name.syntax())?
.speculative_resolve(&ast::make::ext::ident_path(&name.text())) .speculative_resolve(&make::ext::ident_path(&name.text()))
.is_some() .is_some()
{ {
return None; return None;

View file

@ -94,18 +94,21 @@ impl fmt::Debug for RootDatabase {
} }
impl Upcast<dyn ExpandDatabase> for RootDatabase { impl Upcast<dyn ExpandDatabase> for RootDatabase {
#[inline]
fn upcast(&self) -> &(dyn ExpandDatabase + 'static) { fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
&*self &*self
} }
} }
impl Upcast<dyn DefDatabase> for RootDatabase { impl Upcast<dyn DefDatabase> for RootDatabase {
#[inline]
fn upcast(&self) -> &(dyn DefDatabase + 'static) { fn upcast(&self) -> &(dyn DefDatabase + 'static) {
&*self &*self
} }
} }
impl Upcast<dyn HirDatabase> for RootDatabase { impl Upcast<dyn HirDatabase> for RootDatabase {
#[inline]
fn upcast(&self) -> &(dyn HirDatabase + 'static) { fn upcast(&self) -> &(dyn HirDatabase + 'static) {
&*self &*self
} }

View file

@ -5,7 +5,7 @@ use either::Either;
use hir::{AsAssocItem, HirDisplay, SemanticsScope}; use hir::{AsAssocItem, HirDisplay, SemanticsScope};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use syntax::{ use syntax::{
ast::{self, AstNode}, ast::{self, make, AstNode},
ted, SyntaxNode, ted, SyntaxNode,
}; };
@ -21,6 +21,7 @@ enum TypeOrConst {
} }
type LifetimeName = String; type LifetimeName = String;
type DefaultedParam = Either<hir::TypeParam, hir::ConstParam>;
/// `PathTransform` substitutes path in SyntaxNodes in bulk. /// `PathTransform` substitutes path in SyntaxNodes in bulk.
/// ///
@ -115,7 +116,7 @@ impl<'a> PathTransform<'a> {
}; };
let mut type_substs: FxHashMap<hir::TypeParam, ast::Type> = Default::default(); let mut type_substs: FxHashMap<hir::TypeParam, ast::Type> = Default::default();
let mut const_substs: FxHashMap<hir::ConstParam, SyntaxNode> = Default::default(); let mut const_substs: FxHashMap<hir::ConstParam, SyntaxNode> = Default::default();
let mut default_types: Vec<hir::TypeParam> = Default::default(); let mut defaulted_params: Vec<DefaultedParam> = Default::default();
self.generic_def self.generic_def
.into_iter() .into_iter()
.flat_map(|it| it.type_params(db)) .flat_map(|it| it.type_params(db))
@ -138,8 +139,8 @@ impl<'a> PathTransform<'a> {
if let Some(default) = if let Some(default) =
&default.display_source_code(db, source_module.into(), false).ok() &default.display_source_code(db, source_module.into(), false).ok()
{ {
type_substs.insert(k, ast::make::ty(default).clone_for_update()); type_substs.insert(k, make::ty(default).clone_for_update());
default_types.push(k); defaulted_params.push(Either::Left(k));
} }
} }
} }
@ -155,11 +156,19 @@ impl<'a> PathTransform<'a> {
// is a standalone statement or a part of another expresson) // is a standalone statement or a part of another expresson)
// and sometimes require slight modifications; see // and sometimes require slight modifications; see
// https://doc.rust-lang.org/reference/statements.html#expression-statements // https://doc.rust-lang.org/reference/statements.html#expression-statements
// (default values in curly brackets can cause the same problem)
const_substs.insert(k, expr.syntax().clone()); const_substs.insert(k, expr.syntax().clone());
} }
} }
(Either::Left(_), None) => (), // FIXME: get default const value (Either::Left(k), None) => {
_ => (), // ignore mismatching params if let Some(default) = k.default(db) {
if let Some(default) = default.expr() {
const_substs.insert(k, default.syntax().clone_for_update());
defaulted_params.push(Either::Right(k));
}
}
}
_ => (), // ignore mismatching params
}); });
let lifetime_substs: FxHashMap<_, _> = self let lifetime_substs: FxHashMap<_, _> = self
.generic_def .generic_def
@ -175,7 +184,7 @@ impl<'a> PathTransform<'a> {
target_module, target_module,
source_scope: self.source_scope, source_scope: self.source_scope,
}; };
ctx.transform_default_type_substs(default_types); ctx.transform_default_values(defaulted_params);
ctx ctx
} }
} }
@ -212,13 +221,19 @@ impl Ctx<'_> {
}); });
} }
fn transform_default_type_substs(&self, default_types: Vec<hir::TypeParam>) { fn transform_default_values(&self, defaulted_params: Vec<DefaultedParam>) {
for k in default_types { // By now the default values are simply copied from where they are declared
let v = self.type_substs.get(&k).unwrap(); // and should be transformed. As any value is allowed to refer to previous
// generic (both type and const) parameters, they should be all iterated left-to-right.
for param in defaulted_params {
let value = match param {
Either::Left(k) => self.type_substs.get(&k).unwrap().syntax(),
Either::Right(k) => self.const_substs.get(&k).unwrap(),
};
// `transform_path` may update a node's parent and that would break the // `transform_path` may update a node's parent and that would break the
// tree traversal. Thus all paths in the tree are collected into a vec // tree traversal. Thus all paths in the tree are collected into a vec
// so that such operation is safe. // so that such operation is safe.
let paths = postorder(&v.syntax()).filter_map(ast::Path::cast).collect::<Vec<_>>(); let paths = postorder(value).filter_map(ast::Path::cast).collect::<Vec<_>>();
for path in paths { for path in paths {
self.transform_path(path); self.transform_path(path);
} }
@ -263,15 +278,14 @@ impl Ctx<'_> {
hir::ModuleDef::Trait(trait_ref), hir::ModuleDef::Trait(trait_ref),
false, false,
)?; )?;
match ast::make::ty_path(mod_path_to_ast(&found_path)) { match make::ty_path(mod_path_to_ast(&found_path)) {
ast::Type::PathType(path_ty) => Some(path_ty), ast::Type::PathType(path_ty) => Some(path_ty),
_ => None, _ => None,
} }
}); });
let segment = ast::make::path_segment_ty(subst.clone(), trait_ref); let segment = make::path_segment_ty(subst.clone(), trait_ref);
let qualified = let qualified = make::path_from_segments(std::iter::once(segment), false);
ast::make::path_from_segments(std::iter::once(segment), false);
ted::replace(path.syntax(), qualified.clone_for_update().syntax()); ted::replace(path.syntax(), qualified.clone_for_update().syntax());
} else if let Some(path_ty) = ast::PathType::cast(parent) { } else if let Some(path_ty) = ast::PathType::cast(parent) {
ted::replace( ted::replace(

View file

@ -456,14 +456,14 @@ impl<'a> FindUsages<'a> {
it.text().trim_start_matches("r#") == name it.text().trim_start_matches("r#") == name
}) })
.into_iter() .into_iter()
.flat_map(|token| { .flat_map(move |token| {
// FIXME: There should be optimization potential here // FIXME: There should be optimization potential here
// Currently we try to descend everything we find which // Currently we try to descend everything we find which
// means we call `Semantics::descend_into_macros` on // means we call `Semantics::descend_into_macros` on
// every textual hit. That function is notoriously // every textual hit. That function is notoriously
// expensive even for things that do not get down mapped // expensive even for things that do not get down mapped
// into macros. // into macros.
sema.descend_into_macros(token).into_iter().filter_map(|it| it.parent()) sema.descend_into_macros(token, offset).into_iter().filter_map(|it| it.parent())
}) })
}; };

View file

@ -323,6 +323,8 @@ impl Query {
hir::ModuleDef::Adt(..) hir::ModuleDef::Adt(..)
| hir::ModuleDef::TypeAlias(..) | hir::ModuleDef::TypeAlias(..)
| hir::ModuleDef::BuiltinType(..) | hir::ModuleDef::BuiltinType(..)
| hir::ModuleDef::TraitAlias(..)
| hir::ModuleDef::Trait(..)
) )
{ {
continue; continue;
@ -417,9 +419,16 @@ const CONST_WITH_INNER: () = {
mod b_mod; mod b_mod;
use define_struct as really_define_struct;
use Macro as ItemLikeMacro;
use Macro as Trait; // overlay namespaces
//- /b_mod.rs //- /b_mod.rs
struct StructInModB; struct StructInModB;
"#, use super::Macro as SuperItemLikeMacro;
use crate::b_mod::StructInModB as ThisStruct;
use crate::Trait as IsThisJustATrait;
"#,
); );
let symbols: Vec<_> = Crate::from(db.test_crate()) let symbols: Vec<_> = Crate::from(db.test_crate())

View file

@ -118,6 +118,35 @@
container_name: None, container_name: None,
is_alias: false, is_alias: false,
}, },
FileSymbol {
name: "ItemLikeMacro",
def: Macro(
Macro {
id: Macro2Id(
Macro2Id(
0,
),
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 654..676,
},
name_ptr: SyntaxNodePtr {
kind: NAME,
range: 663..676,
},
},
container_name: None,
is_alias: false,
},
FileSymbol { FileSymbol {
name: "Macro", name: "Macro",
def: Macro( def: Macro(
@ -352,6 +381,35 @@
container_name: None, container_name: None,
is_alias: false, is_alias: false,
}, },
FileSymbol {
name: "Trait",
def: Macro(
Macro {
id: Macro2Id(
Macro2Id(
0,
),
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 682..696,
},
name_ptr: SyntaxNodePtr {
kind: NAME,
range: 691..696,
},
},
container_name: None,
is_alias: false,
},
FileSymbol { FileSymbol {
name: "Union", name: "Union",
def: Adt( def: Adt(
@ -551,6 +609,35 @@
container_name: None, container_name: None,
is_alias: false, is_alias: false,
}, },
FileSymbol {
name: "really_define_struct",
def: Macro(
Macro {
id: MacroRulesId(
MacroRulesId(
1,
),
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 611..648,
},
name_ptr: SyntaxNodePtr {
kind: NAME,
range: 628..648,
},
},
container_name: None,
is_alias: false,
},
FileSymbol { FileSymbol {
name: "trait_fn", name: "trait_fn",
def: Function( def: Function(
@ -631,6 +718,35 @@
}, },
}, },
[ [
FileSymbol {
name: "IsThisJustATrait",
def: Macro(
Macro {
id: Macro2Id(
Macro2Id(
0,
),
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
1,
),
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 111..143,
},
name_ptr: SyntaxNodePtr {
kind: NAME,
range: 127..143,
},
},
container_name: None,
is_alias: false,
},
FileSymbol { FileSymbol {
name: "StructInModB", name: "StructInModB",
def: Adt( def: Adt(
@ -660,6 +776,93 @@
container_name: None, container_name: None,
is_alias: false, is_alias: false,
}, },
FileSymbol {
name: "SuperItemLikeMacro",
def: Macro(
Macro {
id: Macro2Id(
Macro2Id(
0,
),
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
1,
),
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 25..59,
},
name_ptr: SyntaxNodePtr {
kind: NAME,
range: 41..59,
},
},
container_name: None,
is_alias: false,
},
FileSymbol {
name: "ThisStruct",
def: Adt(
Struct(
Struct {
id: StructId(
3,
),
},
),
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
1,
),
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 65..105,
},
name_ptr: SyntaxNodePtr {
kind: NAME,
range: 95..105,
},
},
container_name: None,
is_alias: false,
},
FileSymbol {
name: "ThisStruct",
def: Adt(
Struct(
Struct {
id: StructId(
3,
),
},
),
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
1,
),
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 65..105,
},
name_ptr: SyntaxNodePtr {
kind: NAME,
range: 95..105,
},
},
container_name: None,
is_alias: false,
},
], ],
), ),
] ]

View file

@ -1,31 +1,29 @@
//! Functionality for generating trivial constructors //! Functionality for generating trivial constructors
use hir::StructKind; use hir::StructKind;
use syntax::ast; use syntax::ast::{make, Expr, Path};
/// given a type return the trivial constructor (if one exists) /// given a type return the trivial constructor (if one exists)
pub fn use_trivial_constructor( pub fn use_trivial_constructor(
db: &crate::RootDatabase, db: &crate::RootDatabase,
path: ast::Path, path: Path,
ty: &hir::Type, ty: &hir::Type,
) -> Option<ast::Expr> { ) -> Option<Expr> {
match ty.as_adt() { match ty.as_adt() {
Some(hir::Adt::Enum(x)) => { Some(hir::Adt::Enum(x)) => {
if let &[variant] = &*x.variants(db) { if let &[variant] = &*x.variants(db) {
if variant.kind(db) == hir::StructKind::Unit { if variant.kind(db) == hir::StructKind::Unit {
let path = ast::make::path_qualified( let path = make::path_qualified(
path, path,
syntax::ast::make::path_segment(ast::make::name_ref( make::path_segment(make::name_ref(&variant.name(db).to_smol_str())),
&variant.name(db).to_smol_str(),
)),
); );
return Some(syntax::ast::make::expr_path(path)); return Some(make::expr_path(path));
} }
} }
} }
Some(hir::Adt::Struct(x)) if x.kind(db) == StructKind::Unit => { Some(hir::Adt::Struct(x)) if x.kind(db) == StructKind::Unit => {
return Some(syntax::ast::make::expr_path(path)); return Some(make::expr_path(path));
} }
_ => {} _ => {}
} }

View file

@ -560,8 +560,10 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
placeholder_value.autoref_kind = self placeholder_value.autoref_kind = self
.sema .sema
.resolve_method_call_as_callable(code) .resolve_method_call_as_callable(code)
.and_then(|callable| callable.receiver_param(self.sema.db)) .and_then(|callable| {
.map(|(self_param, _)| self_param.kind()) let (self_param, _) = callable.receiver_param(self.sema.db)?;
Some(self_param.source(self.sema.db)?.value.kind())
})
.unwrap_or(ast::SelfParamKind::Owned); .unwrap_or(ast::SelfParamKind::Owned);
} }
} }

View file

@ -74,18 +74,20 @@ pub(crate) fn incoming_calls(
Some(calls.into_items()) Some(calls.into_items())
} }
pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> { pub(crate) fn outgoing_calls(
db: &RootDatabase,
FilePosition { file_id, offset }: FilePosition,
) -> Option<Vec<CallItem>> {
let sema = Semantics::new(db); let sema = Semantics::new(db);
let file_id = position.file_id;
let file = sema.parse(file_id); let file = sema.parse(file_id);
let file = file.syntax(); let file = file.syntax();
let token = pick_best_token(file.token_at_offset(position.offset), |kind| match kind { let token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
IDENT => 1, IDENT => 1,
_ => 0, _ => 0,
})?; })?;
let mut calls = CallLocations::default(); let mut calls = CallLocations::default();
sema.descend_into_macros(token) sema.descend_into_macros(token, offset)
.into_iter() .into_iter()
.filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast)) .filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast))
.filter_map(|item| match item { .filter_map(|item| match item {

View file

@ -131,19 +131,19 @@ pub(crate) fn remove_links(markdown: &str) -> String {
// |=== // |===
pub(crate) fn external_docs( pub(crate) fn external_docs(
db: &RootDatabase, db: &RootDatabase,
position: &FilePosition, FilePosition { file_id, offset }: FilePosition,
target_dir: Option<&OsStr>, target_dir: Option<&OsStr>,
sysroot: Option<&OsStr>, sysroot: Option<&OsStr>,
) -> Option<DocumentationLinks> { ) -> Option<DocumentationLinks> {
let sema = &Semantics::new(db); let sema = &Semantics::new(db);
let file = sema.parse(position.file_id).syntax().clone(); let file = sema.parse(file_id).syntax().clone();
let token = pick_best_token(file.token_at_offset(position.offset), |kind| match kind { let token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
IDENT | INT_NUMBER | T![self] => 3, IDENT | INT_NUMBER | T![self] => 3,
T!['('] | T![')'] => 2, T!['('] | T![')'] => 2,
kind if kind.is_trivia() => 0, kind if kind.is_trivia() => 0,
_ => 1, _ => 1,
})?; })?;
let token = sema.descend_into_macros_single(token); let token = sema.descend_into_macros_single(token, offset);
let node = token.parent()?; let node = token.parent()?;
let definition = match_ast! { let definition = match_ast! {
@ -285,7 +285,7 @@ impl DocCommentToken {
let original_start = doc_token.text_range().start(); let original_start = doc_token.text_range().start();
let relative_comment_offset = offset - original_start - prefix_len; let relative_comment_offset = offset - original_start - prefix_len;
sema.descend_into_macros(doc_token).into_iter().find_map(|t| { sema.descend_into_macros(doc_token, offset).into_iter().find_map(|t| {
let (node, descended_prefix_len) = match_ast! { let (node, descended_prefix_len) = match_ast! {
match t { match t {
ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?), ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?),

View file

@ -517,6 +517,62 @@ fn function();
) )
} }
#[test]
fn doc_links_field() {
check_doc_links(
r#"
/// [`S::f`]
/// [`S2::f`]
/// [`T::0`]
/// [`U::a`]
/// [`E::A::f`]
/// [`E::B::0`]
struct S$0 {
f: i32,
//^ S::f
//^ S2::f
}
type S2 = S;
struct T(i32);
//^^^ T::0
union U {
a: i32,
//^ U::a
}
enum E {
A { f: i32 },
//^ E::A::f
B(i32),
//^^^ E::B::0
}
"#,
);
}
#[test]
fn doc_links_field_via_self() {
check_doc_links(
r#"
/// [`Self::f`]
struct S$0 {
f: i32,
//^ Self::f
}
"#,
);
}
#[test]
fn doc_links_tuple_field_via_self() {
check_doc_links(
r#"
/// [`Self::0`]
struct S$0(i32);
//^^^ Self::0
"#,
);
}
#[test] #[test]
fn rewrite_html_root_url() { fn rewrite_html_root_url() {
check_rewrite( check_rewrite(

View file

@ -40,28 +40,33 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
// struct Bar; // struct Bar;
// ``` // ```
let derive = sema.descend_into_macros(tok.clone()).into_iter().find_map(|descended| { let derive =
let hir_file = sema.hir_file_for(&descended.parent()?); sema.descend_into_macros(tok.clone(), 0.into()).into_iter().find_map(|descended| {
if !hir_file.is_derive_attr_pseudo_expansion(db) { let hir_file = sema.hir_file_for(&descended.parent()?);
return None; if !hir_file.is_derive_attr_pseudo_expansion(db) {
} return None;
}
let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string(); let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string();
// up map out of the #[derive] expansion // up map out of the #[derive] expansion
let token = hir::InFile::new(hir_file, descended).upmap(db)?.value; let token = hir::InFile::new(hir_file, descended).upmap(db)?.value;
let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
let expansions = sema.expand_derive_macro(&attr)?; let expansions = sema.expand_derive_macro(&attr)?;
let idx = attr let idx = attr
.token_tree()? .token_tree()?
.token_trees_and_tokens() .token_trees_and_tokens()
.filter_map(NodeOrToken::into_token) .filter_map(NodeOrToken::into_token)
.take_while(|it| it != &token) .take_while(|it| it != &token)
.filter(|it| it.kind() == T![,]) .filter(|it| it.kind() == T![,])
.count(); .count();
let expansion = let expansion = format(
format(db, SyntaxKind::MACRO_ITEMS, position.file_id, expansions.get(idx).cloned()?); db,
Some(ExpandedMacro { name, expansion }) SyntaxKind::MACRO_ITEMS,
}); position.file_id,
expansions.get(idx).cloned()?,
);
Some(ExpandedMacro { name, expansion })
});
if derive.is_some() { if derive.is_some() {
return derive; return derive;

View file

@ -17,8 +17,6 @@ use crate::FileRange;
// Extends or shrinks the current selection to the encompassing syntactic construct // Extends or shrinks the current selection to the encompassing syntactic construct
// (expression, statement, item, module, etc). It works with multiple cursors. // (expression, statement, item, module, etc). It works with multiple cursors.
// //
// This is a standard LSP feature and not a protocol extension.
//
// |=== // |===
// | Editor | Shortcut // | Editor | Shortcut
// //
@ -142,8 +140,10 @@ fn extend_tokens_from_range(
// compute original mapped token range // compute original mapped token range
let extended = { let extended = {
let fst_expanded = sema.descend_into_macros_single(first_token.clone()); let fst_expanded =
let lst_expanded = sema.descend_into_macros_single(last_token.clone()); sema.descend_into_macros_single(first_token.clone(), original_range.start());
let lst_expanded =
sema.descend_into_macros_single(last_token.clone(), original_range.end());
let mut lca = let mut lca =
algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?; algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?;
lca = shallowest_node(&lca); lca = shallowest_node(&lca);
@ -154,13 +154,16 @@ fn extend_tokens_from_range(
}; };
// Compute parent node range // Compute parent node range
let validate = |token: &SyntaxToken| -> bool { let validate = |offset: TextSize| {
let expanded = sema.descend_into_macros_single(token.clone()); let extended = &extended;
let parent = match expanded.parent() { move |token: &SyntaxToken| -> bool {
Some(it) => it, let expanded = sema.descend_into_macros_single(token.clone(), offset);
None => return false, let parent = match expanded.parent() {
}; Some(it) => it,
algo::least_common_ancestor(&extended, &parent).as_ref() == Some(&extended) None => return false,
};
algo::least_common_ancestor(extended, &parent).as_ref() == Some(extended)
}
}; };
// Find the first and last text range under expanded parent // Find the first and last text range under expanded parent
@ -168,14 +171,14 @@ fn extend_tokens_from_range(
let token = token.prev_token()?; let token = token.prev_token()?;
skip_trivia_token(token, Direction::Prev) skip_trivia_token(token, Direction::Prev)
}) })
.take_while(validate) .take_while(validate(original_range.start()))
.last()?; .last()?;
let last = successors(Some(last_token), |token| { let last = successors(Some(last_token), |token| {
let token = token.next_token()?; let token = token.next_token()?;
skip_trivia_token(token, Direction::Next) skip_trivia_token(token, Direction::Next)
}) })
.take_while(validate) .take_while(validate(original_range.end()))
.last()?; .last()?;
let range = first.text_range().cover(last.text_range()); let range = first.text_range().cover(last.text_range());

View file

@ -20,16 +20,16 @@ use crate::{
// - fields in patterns will navigate to the field declaration of the struct, union or variant // - fields in patterns will navigate to the field declaration of the struct, union or variant
pub(crate) fn goto_declaration( pub(crate) fn goto_declaration(
db: &RootDatabase, db: &RootDatabase,
position: FilePosition, position @ FilePosition { file_id, offset }: FilePosition,
) -> Option<RangeInfo<Vec<NavigationTarget>>> { ) -> Option<RangeInfo<Vec<NavigationTarget>>> {
let sema = Semantics::new(db); let sema = Semantics::new(db);
let file = sema.parse(position.file_id).syntax().clone(); let file = sema.parse(file_id).syntax().clone();
let original_token = file let original_token = file
.token_at_offset(position.offset) .token_at_offset(offset)
.find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?; .find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?;
let range = original_token.text_range(); let range = original_token.text_range();
let info: Vec<NavigationTarget> = sema let info: Vec<NavigationTarget> = sema
.descend_into_macros(original_token) .descend_into_macros(original_token, offset)
.iter() .iter()
.filter_map(|token| { .filter_map(|token| {
let parent = token.parent()?; let parent = token.parent()?;

View file

@ -29,45 +29,39 @@ use syntax::{ast, AstNode, AstToken, SyntaxKind::*, SyntaxToken, TextRange, T};
// image::https://user-images.githubusercontent.com/48062697/113065563-025fbe00-91b1-11eb-83e4-a5a703610b23.gif[] // image::https://user-images.githubusercontent.com/48062697/113065563-025fbe00-91b1-11eb-83e4-a5a703610b23.gif[]
pub(crate) fn goto_definition( pub(crate) fn goto_definition(
db: &RootDatabase, db: &RootDatabase,
position: FilePosition, FilePosition { file_id, offset }: FilePosition,
) -> Option<RangeInfo<Vec<NavigationTarget>>> { ) -> Option<RangeInfo<Vec<NavigationTarget>>> {
let sema = &Semantics::new(db); let sema = &Semantics::new(db);
let file = sema.parse(position.file_id).syntax().clone(); let file = sema.parse(file_id).syntax().clone();
let original_token = let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
pick_best_token(file.token_at_offset(position.offset), |kind| match kind { IDENT
IDENT | INT_NUMBER
| INT_NUMBER | LIFETIME_IDENT
| LIFETIME_IDENT | T![self]
| T![self] | T![super]
| T![super] | T![crate]
| T![crate] | T![Self]
| T![Self] | COMMENT => 4,
| COMMENT => 4, // index and prefix ops
// index and prefix ops T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] => 3,
T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] => 3, kind if kind.is_keyword() => 2,
kind if kind.is_keyword() => 2, T!['('] | T![')'] => 2,
T!['('] | T![')'] => 2, kind if kind.is_trivia() => 0,
kind if kind.is_trivia() => 0, _ => 1,
_ => 1, })?;
})?;
if let Some(doc_comment) = token_as_doc_comment(&original_token) { if let Some(doc_comment) = token_as_doc_comment(&original_token) {
return doc_comment.get_definition_with_descend_at( return doc_comment.get_definition_with_descend_at(sema, offset, |def, _, link_range| {
sema, let nav = def.try_to_nav(db)?;
position.offset, Some(RangeInfo::new(link_range, vec![nav]))
|def, _, link_range| { });
let nav = def.try_to_nav(db)?;
Some(RangeInfo::new(link_range, vec![nav]))
},
);
} }
let navs = sema let navs = sema
.descend_into_macros(original_token.clone()) .descend_into_macros(original_token.clone(), offset)
.into_iter() .into_iter()
.filter_map(|token| { .filter_map(|token| {
let parent = token.parent()?; let parent = token.parent()?;
if let Some(tt) = ast::TokenTree::cast(parent) { if let Some(tt) = ast::TokenTree::cast(parent) {
if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), position.file_id) if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), file_id) {
{
return Some(vec![x]); return Some(vec![x]);
} }
} }

View file

@ -22,20 +22,19 @@ use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav};
// image::https://user-images.githubusercontent.com/48062697/113065566-02f85480-91b1-11eb-9288-aaad8abd8841.gif[] // image::https://user-images.githubusercontent.com/48062697/113065566-02f85480-91b1-11eb-9288-aaad8abd8841.gif[]
pub(crate) fn goto_implementation( pub(crate) fn goto_implementation(
db: &RootDatabase, db: &RootDatabase,
position: FilePosition, FilePosition { file_id, offset }: FilePosition,
) -> Option<RangeInfo<Vec<NavigationTarget>>> { ) -> Option<RangeInfo<Vec<NavigationTarget>>> {
let sema = Semantics::new(db); let sema = Semantics::new(db);
let source_file = sema.parse(position.file_id); let source_file = sema.parse(file_id);
let syntax = source_file.syntax().clone(); let syntax = source_file.syntax().clone();
let original_token = let original_token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
pick_best_token(syntax.token_at_offset(position.offset), |kind| match kind { IDENT | T![self] | INT_NUMBER => 1,
IDENT | T![self] | INT_NUMBER => 1, _ => 0,
_ => 0, })?;
})?;
let range = original_token.text_range(); let range = original_token.text_range();
let navs = let navs =
sema.descend_into_macros(original_token) sema.descend_into_macros(original_token, offset)
.into_iter() .into_iter()
.filter_map(|token| token.parent().and_then(ast::NameLike::cast)) .filter_map(|token| token.parent().and_then(ast::NameLike::cast))
.filter_map(|node| match &node { .filter_map(|node| match &node {

View file

@ -16,13 +16,13 @@ use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav};
// image::https://user-images.githubusercontent.com/48062697/113020657-b560f500-917a-11eb-9007-0f809733a338.gif[] // image::https://user-images.githubusercontent.com/48062697/113020657-b560f500-917a-11eb-9007-0f809733a338.gif[]
pub(crate) fn goto_type_definition( pub(crate) fn goto_type_definition(
db: &RootDatabase, db: &RootDatabase,
position: FilePosition, FilePosition { file_id, offset }: FilePosition,
) -> Option<RangeInfo<Vec<NavigationTarget>>> { ) -> Option<RangeInfo<Vec<NavigationTarget>>> {
let sema = hir::Semantics::new(db); let sema = hir::Semantics::new(db);
let file: ast::SourceFile = sema.parse(position.file_id); let file: ast::SourceFile = sema.parse(file_id);
let token: SyntaxToken = let token: SyntaxToken =
pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind { pick_best_token(file.syntax().token_at_offset(offset), |kind| match kind {
IDENT | INT_NUMBER | T![self] => 2, IDENT | INT_NUMBER | T![self] => 2,
kind if kind.is_trivia() => 0, kind if kind.is_trivia() => 0,
_ => 1, _ => 1,
@ -37,7 +37,7 @@ pub(crate) fn goto_type_definition(
} }
}; };
let range = token.text_range(); let range = token.text_range();
sema.descend_into_macros(token) sema.descend_into_macros(token, offset)
.into_iter() .into_iter()
.filter_map(|token| { .filter_map(|token| {
let ty = sema let ty = sema

View file

@ -15,6 +15,7 @@ use syntax::{
SyntaxKind::{self, IDENT, INT_NUMBER}, SyntaxKind::{self, IDENT, INT_NUMBER},
SyntaxNode, SyntaxToken, TextRange, T, SyntaxNode, SyntaxToken, TextRange, T,
}; };
use text_edit::TextSize;
use crate::{navigation_target::ToNav, references, NavigationTarget, TryToNav}; use crate::{navigation_target::ToNav, references, NavigationTarget, TryToNav};
@ -51,7 +52,7 @@ pub struct HighlightRelatedConfig {
pub(crate) fn highlight_related( pub(crate) fn highlight_related(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
config: HighlightRelatedConfig, config: HighlightRelatedConfig,
FilePosition { offset, file_id }: FilePosition, pos @ FilePosition { offset, file_id }: FilePosition,
) -> Option<Vec<HighlightedRange>> { ) -> Option<Vec<HighlightedRange>> {
let _p = profile::span("highlight_related"); let _p = profile::span("highlight_related");
let syntax = sema.parse(file_id).syntax().clone(); let syntax = sema.parse(file_id).syntax().clone();
@ -79,7 +80,7 @@ pub(crate) fn highlight_related(
} }
T![|] if config.closure_captures => highlight_closure_captures(sema, token, file_id), T![|] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
T![move] if config.closure_captures => highlight_closure_captures(sema, token, file_id), T![move] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
_ if config.references => highlight_references(sema, &syntax, token, file_id), _ if config.references => highlight_references(sema, &syntax, token, pos),
_ => None, _ => None,
} }
} }
@ -129,9 +130,9 @@ fn highlight_references(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
node: &SyntaxNode, node: &SyntaxNode,
token: SyntaxToken, token: SyntaxToken,
file_id: FileId, FilePosition { file_id, offset }: FilePosition,
) -> Option<Vec<HighlightedRange>> { ) -> Option<Vec<HighlightedRange>> {
let defs = find_defs(sema, token.clone()); let defs = find_defs(sema, token.clone(), offset);
let usages = defs let usages = defs
.iter() .iter()
.filter_map(|&d| { .filter_map(|&d| {
@ -455,8 +456,12 @@ fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange
} }
} }
fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSet<Definition> { fn find_defs(
sema.descend_into_macros(token) sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
offset: TextSize,
) -> FxHashSet<Definition> {
sema.descend_into_macros(token, offset)
.into_iter() .into_iter()
.filter_map(|token| IdentClass::classify_token(sema, &token)) .filter_map(|token| IdentClass::classify_token(sema, &token))
.map(IdentClass::definitions_no_ops) .map(IdentClass::definitions_no_ops)

View file

@ -162,9 +162,9 @@ fn hover_simple(
// prefer descending the same token kind in attribute expansions, in normal macros text // prefer descending the same token kind in attribute expansions, in normal macros text
// equivalency is more important // equivalency is more important
let descended = if in_attr { let descended = if in_attr {
[sema.descend_into_macros_with_kind_preference(original_token.clone())].into() [sema.descend_into_macros_with_kind_preference(original_token.clone(), offset)].into()
} else { } else {
sema.descend_into_macros_with_same_text(original_token.clone()) sema.descend_into_macros_with_same_text(original_token.clone(), offset)
}; };
let descended = || descended.iter(); let descended = || descended.iter();

View file

@ -1556,6 +1556,49 @@ fn test_hover_function_show_types() {
); );
} }
#[test]
fn test_hover_function_associated_type_params() {
check(
r#"
trait Foo { type Bar; }
impl Foo for i32 { type Bar = i64; }
fn foo(arg: <i32 as Foo>::Bar) {}
fn main() { foo$0; }
"#,
expect![[r#"
*foo*
```rust
test
```
```rust
fn foo(arg: <i32 as Foo>::Bar)
```
"#]],
);
check(
r#"
trait Foo<T> { type Bar<U>; }
impl Foo<i64> for i32 { type Bar<U> = i32; }
fn foo(arg: <<i32 as Foo<i64>>::Bar<i8> as Foo<i64>>::Bar<i8>) {}
fn main() { foo$0; }
"#,
expect![[r#"
*foo*
```rust
test
```
```rust
fn foo(arg: <<i32 as Foo<i64>>::Bar<i8> as Foo<i64>>::Bar<i8>)
```
"#]],
);
}
#[test] #[test]
fn test_hover_function_pointer_show_identifiers() { fn test_hover_function_pointer_show_identifiers() {
check( check(
@ -3292,7 +3335,50 @@ struct S$0T<const C: usize = 1, T = Foo>(T);
``` ```
```rust ```rust
struct ST<const C: usize, T = Foo> struct ST<const C: usize = 1, T = Foo>
```
"#]],
);
}
#[test]
fn const_generic_default_value() {
check(
r#"
struct Foo;
struct S$0T<const C: usize = {40 + 2}, T = Foo>(T);
"#,
expect![[r#"
*ST*
```rust
test
```
```rust
struct ST<const C: usize = {const}, T = Foo>
```
"#]],
);
}
#[test]
fn const_generic_default_value_2() {
check(
r#"
struct Foo;
const VAL = 1;
struct S$0T<const C: usize = VAL, T = Foo>(T);
"#,
expect![[r#"
*ST*
```rust
test
```
```rust
struct ST<const C: usize = VAL, T = Foo>
``` ```
"#]], "#]],
); );
@ -6469,3 +6555,22 @@ fn test() {
"#]], "#]],
); );
} }
#[test]
fn generic_params_disabled_by_cfg() {
check(
r#"
struct S<#[cfg(never)] T>;
fn test() {
let s$0: S = S;
}
"#,
expect![[r#"
*s*
```rust
let s: S // size = 0, align = 1
```
"#]],
);
}

View file

@ -484,7 +484,7 @@ impl Analysis {
sysroot: Option<&OsStr>, sysroot: Option<&OsStr>,
) -> Cancellable<doc_links::DocumentationLinks> { ) -> Cancellable<doc_links::DocumentationLinks> {
self.with_db(|db| { self.with_db(|db| {
doc_links::external_docs(db, &position, target_dir, sysroot).unwrap_or_default() doc_links::external_docs(db, position, target_dir, sysroot).unwrap_or_default()
}) })
} }

View file

@ -99,7 +99,7 @@ pub(crate) fn moniker(
}); });
} }
let navs = sema let navs = sema
.descend_into_macros(original_token.clone()) .descend_into_macros(original_token.clone(), offset)
.into_iter() .into_iter()
.filter_map(|token| { .filter_map(|token| {
IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| { IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| {

Some files were not shown because too many files have changed in this diff Show more