⬆️ rust-analyzer

This commit is contained in:
Laurențiu Nicola 2023-03-20 08:31:01 +02:00
parent 544b4cfe4d
commit dbf04a5ee2
106 changed files with 2219 additions and 609 deletions

16
Cargo.lock generated
View file

@ -169,9 +169,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chalk-derive"
version = "0.88.0"
version = "0.89.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4df80a3fbc1f0e59f560eeeebca94bf655566a8ad3023c210a109deb6056455a"
checksum = "ea176c50987dc4765961aa165001e8eb5a722a26308c5797a47303ea91686aab"
dependencies = [
"proc-macro2",
"quote",
@ -181,9 +181,9 @@ dependencies = [
[[package]]
name = "chalk-ir"
version = "0.88.0"
version = "0.89.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f39e5272016916956298cceea5147006f897972c274a768ed4d6e074efe5d3fb"
checksum = "473b480241695428c14e8f84f1c9a47ef232450a50faf3a4041e5c9dc11e0a3b"
dependencies = [
"bitflags",
"chalk-derive",
@ -192,9 +192,9 @@ dependencies = [
[[package]]
name = "chalk-recursive"
version = "0.88.0"
version = "0.89.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9d60b42ad7478d3e027e2f9ea4e99fbbb8fdee0c8c3cf068be269f57e603618"
checksum = "6764b4fe67cac3a3758185084efbfbd39bf0352795824ba849ddd2b64cd4bb28"
dependencies = [
"chalk-derive",
"chalk-ir",
@ -205,9 +205,9 @@ dependencies = [
[[package]]
name = "chalk-solve"
version = "0.88.0"
version = "0.89.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab30620ea5b36819525eaab2204f4b8e1842fc7ee36826424a28bef59ae7fecf"
checksum = "55a7e6160966eceb6e7dcc2f479a2af4c477aaf5bccbc640d82515995ab1a6cc"
dependencies = [
"chalk-derive",
"chalk-ir",

View file

@ -341,8 +341,8 @@ macro_rules! __ra_macro_fixture339 {($name : ident )=>{ impl Clone for $name
macro_rules! __ra_macro_fixture340 {([$($stack : tt )*])=>{$($stack )* }; ([$($stack : tt )*]{$($tail : tt )* })=>{$($stack )* { remove_sections_inner ! ([]$($tail )*); }}; ([$($stack : tt )*]$t : tt $($tail : tt )*)=>{ remove_sections ! ([$($stack )* $t ]$($tail )*); }; }
macro_rules! __ra_macro_fixture341 {($t : ty ,$z : expr )=>{ impl Zero for $t { fn zero ()-> Self {$z as $t } fn is_zero (& self )-> bool { self == & Self :: zero ()}}}; }
macro_rules! __ra_macro_fixture342 {($($ident : ident ),* $(,)?)=>{$(# [ allow ( bad_style )] pub const $ident : super :: Name = super :: Name :: new_inline ( stringify ! ($ident )); )* }; }
macro_rules! __ra_macro_fixture343 {($($trait : ident =>$expand : ident ),* )=>{# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum BuiltinDeriveExpander {$($trait ),* } impl BuiltinDeriveExpander { pub fn expand (& self , db : & dyn AstDatabase , id : LazyMacroId , tt : & tt :: Subtree , )-> Result < tt :: Subtree , mbe :: ExpandError > { let expander = match * self {$(BuiltinDeriveExpander ::$trait =>$expand , )* }; expander ( db , id , tt )} fn find_by_name ( name : & name :: Name )-> Option < Self > { match name {$(id if id == & name :: name ! [$trait ]=> Some ( BuiltinDeriveExpander ::$trait ), )* _ => None , }}}}; }
macro_rules! __ra_macro_fixture344 {( LAZY : $(($name : ident , $kind : ident )=>$expand : ident ),* , EAGER : $(($e_name : ident , $e_kind : ident )=>$e_expand : ident ),* )=>{# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum BuiltinFnLikeExpander {$($kind ),* }# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum EagerExpander {$($e_kind ),* } impl BuiltinFnLikeExpander { pub fn expand (& self , db : & dyn AstDatabase , id : LazyMacroId , tt : & tt :: Subtree , )-> ExpandResult < tt :: Subtree > { let expander = match * self {$(BuiltinFnLikeExpander ::$kind =>$expand , )* }; expander ( db , id , tt )}} impl EagerExpander { pub fn expand (& self , db : & dyn AstDatabase , arg_id : EagerMacroId , tt : & tt :: Subtree , )-> ExpandResult < Option < ( tt :: Subtree , FragmentKind )>> { let expander = match * self {$(EagerExpander ::$e_kind =>$e_expand , )* }; expander ( db , arg_id , tt )}} fn find_by_name ( ident : & name :: Name )-> Option < Either < BuiltinFnLikeExpander , EagerExpander >> { match ident {$(id if id == & name :: name ! [$name ]=> Some ( Either :: Left ( BuiltinFnLikeExpander ::$kind )), )* $(id if id == & name :: name ! [$e_name ]=> Some ( Either :: Right ( EagerExpander ::$e_kind )), )* _ => return None , }}}; }
macro_rules! __ra_macro_fixture343 {($($trait : ident =>$expand : ident ),* )=>{# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum BuiltinDeriveExpander {$($trait ),* } impl BuiltinDeriveExpander { pub fn expand (& self , db : & dyn ExpandDatabase , id : LazyMacroId , tt : & tt :: Subtree , )-> Result < tt :: Subtree , mbe :: ExpandError > { let expander = match * self {$(BuiltinDeriveExpander ::$trait =>$expand , )* }; expander ( db , id , tt )} fn find_by_name ( name : & name :: Name )-> Option < Self > { match name {$(id if id == & name :: name ! [$trait ]=> Some ( BuiltinDeriveExpander ::$trait ), )* _ => None , }}}}; }
macro_rules! __ra_macro_fixture344 {( LAZY : $(($name : ident , $kind : ident )=>$expand : ident ),* , EAGER : $(($e_name : ident , $e_kind : ident )=>$e_expand : ident ),* )=>{# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum BuiltinFnLikeExpander {$($kind ),* }# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum EagerExpander {$($e_kind ),* } impl BuiltinFnLikeExpander { pub fn expand (& self , db : & dyn ExpandDatabase , id : LazyMacroId , tt : & tt :: Subtree , )-> ExpandResult < tt :: Subtree > { let expander = match * self {$(BuiltinFnLikeExpander ::$kind =>$expand , )* }; expander ( db , id , tt )}} impl EagerExpander { pub fn expand (& self , db : & dyn ExpandDatabase , arg_id : EagerMacroId , tt : & tt :: Subtree , )-> ExpandResult < Option < ( tt :: Subtree , FragmentKind )>> { let expander = match * self {$(EagerExpander ::$e_kind =>$e_expand , )* }; expander ( db , arg_id , tt )}} fn find_by_name ( ident : & name :: Name )-> Option < Either < BuiltinFnLikeExpander , EagerExpander >> { match ident {$(id if id == & name :: name ! [$name ]=> Some ( Either :: Left ( BuiltinFnLikeExpander ::$kind )), )* $(id if id == & name :: name ! [$e_name ]=> Some ( Either :: Right ( EagerExpander ::$e_kind )), )* _ => return None , }}}; }
macro_rules! __ra_macro_fixture345 {($($ty : ty =>$this : ident $im : block );*)=>{$(impl ToTokenTree for $ty { fn to_token ($this )-> tt :: TokenTree { let leaf : tt :: Leaf = $im . into (); leaf . into ()}} impl ToTokenTree for &$ty { fn to_token ($this )-> tt :: TokenTree { let leaf : tt :: Leaf = $im . clone (). into (); leaf . into ()}})* }}
macro_rules! __ra_macro_fixture346 {($name : ident )=>{ impl $crate :: salsa :: InternKey for $name { fn from_intern_id ( v : $crate :: salsa :: InternId )-> Self {$name ( v )} fn as_intern_id (& self )-> $crate :: salsa :: InternId { self . 0 }}}; }
macro_rules! __ra_macro_fixture347 {($($var : ident ($t : ty )),+ )=>{$(impl From <$t > for AttrOwner { fn from ( t : $t )-> AttrOwner { AttrOwner ::$var ( t )}})+ }; }

View file

@ -40,6 +40,7 @@ pub struct StructData {
pub repr: Option<ReprOptions>,
pub visibility: RawVisibility,
pub rustc_has_incoherent_inherent_impls: bool,
pub fundamental: bool,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@ -173,10 +174,10 @@ impl StructData {
let item_tree = loc.id.item_tree(db);
let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone();
let rustc_has_incoherent_inherent_impls = item_tree
.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into())
.by_key("rustc_has_incoherent_inherent_impls")
.exists();
let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into());
let rustc_has_incoherent_inherent_impls =
attrs.by_key("rustc_has_incoherent_inherent_impls").exists();
let fundamental = attrs.by_key("fundamental").exists();
let strukt = &item_tree[loc.id.value];
let (variant_data, diagnostics) = lower_fields(
@ -196,6 +197,7 @@ impl StructData {
repr,
visibility: item_tree[strukt.visibility].clone(),
rustc_has_incoherent_inherent_impls,
fundamental,
}),
diagnostics.into(),
)
@ -215,10 +217,10 @@ impl StructData {
let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone();
let rustc_has_incoherent_inherent_impls = item_tree
.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into())
.by_key("rustc_has_incoherent_inherent_impls")
.exists();
let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into());
let rustc_has_incoherent_inherent_impls =
attrs.by_key("rustc_has_incoherent_inherent_impls").exists();
let fundamental = attrs.by_key("fundamental").exists();
let union = &item_tree[loc.id.value];
let (variant_data, diagnostics) = lower_fields(
@ -238,6 +240,7 @@ impl StructData {
repr,
visibility: item_tree[union.visibility].clone(),
rustc_has_incoherent_inherent_impls,
fundamental,
}),
diagnostics.into(),
)

View file

@ -24,7 +24,9 @@ use syntax::{ast, AstPtr, SyntaxNode, SyntaxNodePtr};
use crate::{
attr::Attrs,
db::DefDatabase,
expr::{dummy_expr_id, Binding, BindingId, Expr, ExprId, Label, LabelId, Pat, PatId},
expr::{
dummy_expr_id, Binding, BindingId, Expr, ExprId, Label, LabelId, Pat, PatId, RecordFieldPat,
},
item_scope::BuiltinShadowMode,
macro_id_to_def_id,
nameres::DefMap,
@ -432,6 +434,44 @@ impl Body {
pats.shrink_to_fit();
bindings.shrink_to_fit();
}
pub fn walk_bindings_in_pat(&self, pat_id: PatId, mut f: impl FnMut(BindingId)) {
self.walk_pats(pat_id, &mut |pat| {
if let Pat::Bind { id, .. } = pat {
f(*id);
}
});
}
pub fn walk_pats(&self, pat_id: PatId, f: &mut impl FnMut(&Pat)) {
let pat = &self[pat_id];
f(pat);
match pat {
Pat::Range { .. }
| Pat::Lit(..)
| Pat::Path(..)
| Pat::ConstBlock(..)
| Pat::Wild
| Pat::Missing => {}
&Pat::Bind { subpat, .. } => {
if let Some(subpat) = subpat {
self.walk_pats(subpat, f);
}
}
Pat::Or(args) | Pat::Tuple { args, .. } | Pat::TupleStruct { args, .. } => {
args.iter().copied().for_each(|p| self.walk_pats(p, f));
}
Pat::Ref { pat, .. } => self.walk_pats(*pat, f),
Pat::Slice { prefix, slice, suffix } => {
let total_iter = prefix.iter().chain(slice.iter()).chain(suffix.iter());
total_iter.copied().for_each(|p| self.walk_pats(p, f));
}
Pat::Record { args, .. } => {
args.iter().for_each(|RecordFieldPat { pat, .. }| self.walk_pats(*pat, f));
}
Pat::Box { inner } => self.walk_pats(*inner, f),
}
}
}
impl Default for Body {

View file

@ -499,6 +499,8 @@ impl ExprCollector<'_> {
Movability::Movable
};
ClosureKind::Generator(movability)
} else if e.async_token().is_some() {
ClosureKind::Async
} else {
ClosureKind::Closure
};

View file

@ -360,8 +360,14 @@ impl<'a> Printer<'a> {
w!(self, "]");
}
Expr::Closure { args, arg_types, ret_type, body, closure_kind } => {
if let ClosureKind::Generator(Movability::Static) = closure_kind {
w!(self, "static ");
match closure_kind {
ClosureKind::Generator(Movability::Static) => {
w!(self, "static ");
}
ClosureKind::Async => {
w!(self, "async ");
}
_ => (),
}
w!(self, "|");
for (i, (pat, ty)) in args.iter().zip(arg_types.iter()).enumerate() {

View file

@ -35,6 +35,7 @@ pub struct FunctionData {
pub visibility: RawVisibility,
pub abi: Option<Interned<str>>,
pub legacy_const_generics_indices: Box<[u32]>,
pub rustc_allow_incoherent_impl: bool,
flags: FnFlags,
}
@ -84,13 +85,14 @@ impl FunctionData {
}
}
let legacy_const_generics_indices = item_tree
.attrs(db, krate, ModItem::from(loc.id.value).into())
let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
let legacy_const_generics_indices = attrs
.by_key("rustc_legacy_const_generics")
.tt_values()
.next()
.map(parse_rustc_legacy_const_generics)
.unwrap_or_default();
let rustc_allow_incoherent_impl = attrs.by_key("rustc_allow_incoherent_impl").exists();
Arc::new(FunctionData {
name: func.name.clone(),
@ -108,6 +110,7 @@ impl FunctionData {
abi: func.abi.clone(),
legacy_const_generics_indices,
flags,
rustc_allow_incoherent_impl,
})
}
@ -171,6 +174,7 @@ pub struct TypeAliasData {
pub visibility: RawVisibility,
pub is_extern: bool,
pub rustc_has_incoherent_inherent_impls: bool,
pub rustc_allow_incoherent_impl: bool,
/// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl).
pub bounds: Vec<Interned<TypeBound>>,
}
@ -189,10 +193,14 @@ impl TypeAliasData {
item_tree[typ.visibility].clone()
};
let rustc_has_incoherent_inherent_impls = item_tree
.attrs(db, loc.container.module(db).krate(), ModItem::from(loc.id.value).into())
.by_key("rustc_has_incoherent_inherent_impls")
.exists();
let attrs = item_tree.attrs(
db,
loc.container.module(db).krate(),
ModItem::from(loc.id.value).into(),
);
let rustc_has_incoherent_inherent_impls =
attrs.by_key("rustc_has_incoherent_inherent_impls").exists();
let rustc_allow_incoherent_impl = attrs.by_key("rustc_allow_incoherent_impl").exists();
Arc::new(TypeAliasData {
name: typ.name.clone(),
@ -200,6 +208,7 @@ impl TypeAliasData {
visibility,
is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
rustc_has_incoherent_inherent_impls,
rustc_allow_incoherent_impl,
bounds: typ.bounds.to_vec(),
})
}
@ -212,11 +221,12 @@ pub struct TraitData {
pub is_auto: bool,
pub is_unsafe: bool,
pub rustc_has_incoherent_inherent_impls: bool,
pub skip_array_during_method_dispatch: bool,
pub fundamental: bool,
pub visibility: RawVisibility,
/// Whether the trait has `#[rust_skip_array_during_method_dispatch]`. `hir_ty` will ignore
/// method calls to this trait's methods when the receiver is an array and the crate edition is
/// 2015 or 2018.
pub skip_array_during_method_dispatch: bool,
// box it as the vec is usually empty anyways
pub attribute_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
}
@ -245,6 +255,7 @@ impl TraitData {
attrs.by_key("rustc_skip_array_during_method_dispatch").exists();
let rustc_has_incoherent_inherent_impls =
attrs.by_key("rustc_has_incoherent_inherent_impls").exists();
let fundamental = attrs.by_key("fundamental").exists();
let mut collector =
AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::TraitId(tr));
collector.collect(&item_tree, tree_id.tree_id(), &tr_def.items);
@ -260,6 +271,7 @@ impl TraitData {
visibility,
skip_array_during_method_dispatch,
rustc_has_incoherent_inherent_impls,
fundamental,
}),
diagnostics.into(),
)
@ -450,6 +462,7 @@ pub struct ConstData {
pub name: Option<Name>,
pub type_ref: Interned<TypeRef>,
pub visibility: RawVisibility,
pub rustc_allow_incoherent_impl: bool,
}
impl ConstData {
@ -463,10 +476,16 @@ impl ConstData {
item_tree[konst.visibility].clone()
};
let rustc_allow_incoherent_impl = item_tree
.attrs(db, loc.container.module(db).krate(), ModItem::from(loc.id.value).into())
.by_key("rustc_allow_incoherent_impl")
.exists();
Arc::new(ConstData {
name: konst.name.clone(),
type_ref: konst.type_ref.clone(),
visibility,
rustc_allow_incoherent_impl,
})
}
}

View file

@ -3,7 +3,7 @@ use std::sync::Arc;
use base_db::{salsa, CrateId, SourceDatabase, Upcast};
use either::Either;
use hir_expand::{db::AstDatabase, HirFileId};
use hir_expand::{db::ExpandDatabase, HirFileId};
use intern::Interned;
use la_arena::ArenaMap;
use syntax::{ast, AstPtr};
@ -64,7 +64,7 @@ pub trait InternDatabase: SourceDatabase {
}
#[salsa::query_group(DefDatabaseStorage)]
pub trait DefDatabase: InternDatabase + AstDatabase + Upcast<dyn AstDatabase> {
pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDatabase> {
#[salsa::input]
fn enable_proc_attr_macros(&self) -> bool;

View file

@ -245,6 +245,7 @@ pub enum Expr {
pub enum ClosureKind {
Closure,
Generator(Movability),
Async,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]

View file

@ -20,7 +20,7 @@ use ::mbe::TokenMap;
use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
use expect_test::Expect;
use hir_expand::{
db::{AstDatabase, TokenExpander},
db::{ExpandDatabase, TokenExpander},
AstId, InFile, MacroDefId, MacroDefKind, MacroFile,
};
use stdx::format_to;

View file

@ -120,6 +120,8 @@ pub struct DefMap {
registered_tools: Vec<SmolStr>,
/// Unstable features of Rust enabled with `#![feature(A, B)]`.
unstable_features: FxHashSet<SmolStr>,
/// #[rustc_coherence_is_core]
rustc_coherence_is_core: bool,
edition: Edition,
recursion_limit: Option<u32>,
@ -215,7 +217,7 @@ pub struct ModuleData {
pub origin: ModuleOrigin,
/// Declared visibility of this module.
pub visibility: Visibility,
/// Always [`None`] for block modules
pub parent: Option<LocalModuleId>,
pub children: FxHashMap<Name, LocalModuleId>,
pub scope: ItemScope,
@ -292,6 +294,7 @@ impl DefMap {
registered_tools: Vec::new(),
unstable_features: FxHashSet::default(),
diagnostics: Vec::new(),
rustc_coherence_is_core: false,
}
}
@ -325,6 +328,10 @@ impl DefMap {
self.unstable_features.contains(feature)
}
pub fn is_rustc_coherence_is_core(&self) -> bool {
self.rustc_coherence_is_core
}
pub fn root(&self) -> LocalModuleId {
self.root
}
@ -337,7 +344,7 @@ impl DefMap {
self.proc_macro_loading_error.as_deref()
}
pub(crate) fn krate(&self) -> CrateId {
pub fn krate(&self) -> CrateId {
self.krate
}
@ -425,7 +432,7 @@ impl DefMap {
Some(self.block?.parent)
}
/// Returns the module containing `local_mod`, either the parent `mod`, or the module containing
/// Returns the module containing `local_mod`, either the parent `mod`, or the module (or block) containing
/// the block, if `self` corresponds to a block expression.
pub fn containing_module(&self, local_mod: LocalModuleId) -> Option<ModuleId> {
match self[local_mod].parent {
@ -498,6 +505,7 @@ impl DefMap {
krate: _,
prelude: _,
root: _,
rustc_coherence_is_core: _,
} = self;
extern_prelude.shrink_to_fit();

View file

@ -87,10 +87,7 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: T
// FIXME: a hacky way to create a Name from string.
let name =
tt::Ident { text: it.name.clone(), span: tt::TokenId::unspecified() };
(
name.as_name(),
ProcMacroExpander::new(def_map.krate, base_db::ProcMacroId(idx as u32)),
)
(name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32)))
})
.collect()
}
@ -299,6 +296,11 @@ impl DefCollector<'_> {
continue;
}
if attr_name.as_text().as_deref() == Some("rustc_coherence_is_core") {
self.def_map.rustc_coherence_is_core = true;
continue;
}
if *attr_name == hir_expand::name![feature] {
let features =
attr.parse_path_comma_token_tree().into_iter().flatten().filter_map(
@ -581,7 +583,7 @@ impl DefCollector<'_> {
let kind = def.kind.to_basedb_kind();
let (expander, kind) = match self.proc_macros.iter().find(|(n, _)| n == &def.name) {
Some(&(_, expander)) => (expander, kind),
None => (ProcMacroExpander::dummy(self.def_map.krate), kind),
None => (ProcMacroExpander::dummy(), kind),
};
let proc_macro_id =

View file

@ -9,7 +9,7 @@ use base_db::{
salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, FilePosition,
SourceDatabase, Upcast,
};
use hir_expand::{db::AstDatabase, InFile};
use hir_expand::{db::ExpandDatabase, InFile};
use stdx::hash::NoHashHashSet;
use syntax::{algo, ast, AstNode};
@ -23,7 +23,7 @@ use crate::{
#[salsa::database(
base_db::SourceDatabaseExtStorage,
base_db::SourceDatabaseStorage,
hir_expand::db::AstDatabaseStorage,
hir_expand::db::ExpandDatabaseStorage,
crate::db::InternDatabaseStorage,
crate::db::DefDatabaseStorage
)]
@ -40,8 +40,8 @@ impl Default for TestDB {
}
}
impl Upcast<dyn AstDatabase> for TestDB {
fn upcast(&self) -> &(dyn AstDatabase + 'static) {
impl Upcast<dyn ExpandDatabase> for TestDB {
fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
&*self
}
}

View file

@ -131,21 +131,23 @@ impl Visibility {
// visibility as the containing module (even though no items are directly nameable from
// there, getting this right is important for method resolution).
// In that case, we adjust the visibility of `to_module` to point to the containing module.
// Additional complication: `to_module` might be in `from_module`'s `DefMap`, which we're
// currently computing, so we must not call the `def_map` query for it.
let arc;
let to_module_def_map =
if to_module.krate == def_map.krate() && to_module.block == def_map.block_id() {
cov_mark::hit!(is_visible_from_same_block_def_map);
def_map
} else {
arc = to_module.def_map(db);
&arc
};
let is_block_root =
to_module.block.is_some() && to_module_def_map[to_module.local_id].parent.is_none();
if is_block_root {
to_module = to_module_def_map.containing_module(to_module.local_id).unwrap();
let mut arc;
loop {
let to_module_def_map =
if to_module.krate == def_map.krate() && to_module.block == def_map.block_id() {
cov_mark::hit!(is_visible_from_same_block_def_map);
def_map
} else {
arc = to_module.def_map(db);
&arc
};
match to_module_def_map.parent() {
Some(parent) => to_module = parent,
None => break,
}
}
// from_module needs to be a descendant of to_module

View file

@ -10,7 +10,7 @@ use smallvec::{smallvec, SmallVec};
use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode};
use crate::{
db::AstDatabase,
db::ExpandDatabase,
hygiene::Hygiene,
mod_path::{ModPath, PathKind},
name::AsName,
@ -38,7 +38,7 @@ impl ops::Deref for RawAttrs {
impl RawAttrs {
pub const EMPTY: Self = Self { entries: None };
pub fn new(db: &dyn AstDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
pub fn new(db: &dyn ExpandDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
let entries = collect_attrs(owner)
.filter_map(|(id, attr)| match attr {
Either::Left(attr) => {
@ -55,7 +55,7 @@ impl RawAttrs {
Self { entries: if entries.is_empty() { None } else { Some(entries) } }
}
pub fn from_attrs_owner(db: &dyn AstDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self {
pub fn from_attrs_owner(db: &dyn ExpandDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self {
let hygiene = Hygiene::new(db, owner.file_id);
Self::new(db, owner.value, &hygiene)
}
@ -87,7 +87,7 @@ impl RawAttrs {
/// Processes `cfg_attr`s, returning the resulting semantic `Attrs`.
// FIXME: This should return a different type
pub fn filter(self, db: &dyn AstDatabase, krate: CrateId) -> RawAttrs {
pub fn filter(self, db: &dyn ExpandDatabase, krate: CrateId) -> RawAttrs {
let has_cfg_attrs = self
.iter()
.any(|attr| attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]));
@ -199,7 +199,7 @@ impl fmt::Display for AttrInput {
impl Attr {
fn from_src(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
ast: ast::Meta,
hygiene: &Hygiene,
id: AttrId,
@ -221,7 +221,7 @@ impl Attr {
}
fn from_tt(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
tt: &tt::Subtree,
hygiene: &Hygiene,
id: AttrId,

View file

@ -1,6 +1,6 @@
//! Builtin attributes.
use crate::{db::AstDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind};
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind};
macro_rules! register_builtin {
( $(($name:ident, $variant:ident) => $expand:ident),* ) => {
@ -12,7 +12,7 @@ macro_rules! register_builtin {
impl BuiltinAttrExpander {
pub fn expand(
&self,
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -60,7 +60,7 @@ pub fn find_builtin_attr(ident: &name::Name) -> Option<BuiltinAttrExpander> {
}
fn dummy_attr_expand(
_db: &dyn AstDatabase,
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -90,7 +90,7 @@ fn dummy_attr_expand(
/// So this hacky approach is a lot more friendly for us, though it does require a bit of support in
/// [`hir::Semantics`] to make this work.
fn derive_attr_expand(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {

View file

@ -9,7 +9,7 @@ use syntax::{
match_ast,
};
use crate::{db::AstDatabase, name, quote, ExpandError, ExpandResult, MacroCallId};
use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId};
macro_rules! register_builtin {
( $($trait:ident => $expand:ident),* ) => {
@ -21,7 +21,7 @@ macro_rules! register_builtin {
impl BuiltinDeriveExpander {
pub fn expand(
&self,
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -141,7 +141,7 @@ fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResu
ExpandResult::ok(expanded)
}
fn find_builtin_crate(db: &dyn AstDatabase, id: MacroCallId) -> tt::TokenTree {
fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree {
// FIXME: make hygiene works for builtin derive macro
// such that $crate can be used here.
let cg = db.crate_graph();
@ -158,7 +158,7 @@ fn find_builtin_crate(db: &dyn AstDatabase, id: MacroCallId) -> tt::TokenTree {
}
fn copy_expand(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -167,7 +167,7 @@ fn copy_expand(
}
fn clone_expand(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -176,7 +176,7 @@ fn clone_expand(
}
fn default_expand(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -185,7 +185,7 @@ fn default_expand(
}
fn debug_expand(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -194,7 +194,7 @@ fn debug_expand(
}
fn hash_expand(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -202,13 +202,17 @@ fn hash_expand(
expand_simple_derive(tt, quote! { #krate::hash::Hash })
}
fn eq_expand(db: &dyn AstDatabase, id: MacroCallId, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
fn eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
expand_simple_derive(tt, quote! { #krate::cmp::Eq })
}
fn partial_eq_expand(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -217,7 +221,7 @@ fn partial_eq_expand(
}
fn ord_expand(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -226,7 +230,7 @@ fn ord_expand(
}
fn partial_ord_expand(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {

View file

@ -10,7 +10,7 @@ use syntax::{
};
use crate::{
db::AstDatabase, name, quote, tt, ExpandError, ExpandResult, MacroCallId, MacroCallLoc,
db::ExpandDatabase, name, quote, tt, ExpandError, ExpandResult, MacroCallId, MacroCallLoc,
};
macro_rules! register_builtin {
@ -28,7 +28,7 @@ macro_rules! register_builtin {
impl BuiltinFnLikeExpander {
pub fn expand(
&self,
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -42,7 +42,7 @@ macro_rules! register_builtin {
impl EagerExpander {
pub fn expand(
&self,
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
@ -121,7 +121,7 @@ const DOLLAR_CRATE: tt::Ident =
tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::TokenId::unspecified() };
fn module_path_expand(
_db: &dyn AstDatabase,
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -130,7 +130,7 @@ fn module_path_expand(
}
fn line_expand(
_db: &dyn AstDatabase,
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -144,7 +144,7 @@ fn line_expand(
}
fn log_syntax_expand(
_db: &dyn AstDatabase,
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -152,7 +152,7 @@ fn log_syntax_expand(
}
fn trace_macros_expand(
_db: &dyn AstDatabase,
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -160,7 +160,7 @@ fn trace_macros_expand(
}
fn stringify_expand(
_db: &dyn AstDatabase,
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -174,7 +174,7 @@ fn stringify_expand(
}
fn column_expand(
_db: &dyn AstDatabase,
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -188,7 +188,7 @@ fn column_expand(
}
fn assert_expand(
_db: &dyn AstDatabase,
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -218,7 +218,7 @@ fn assert_expand(
}
fn file_expand(
_db: &dyn AstDatabase,
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -234,7 +234,7 @@ fn file_expand(
}
fn format_args_expand(
_db: &dyn AstDatabase,
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -276,7 +276,7 @@ fn format_args_expand(
}
fn asm_expand(
_db: &dyn AstDatabase,
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -304,7 +304,7 @@ fn asm_expand(
}
fn global_asm_expand(
_db: &dyn AstDatabase,
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -313,7 +313,7 @@ fn global_asm_expand(
}
fn cfg_expand(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -325,7 +325,7 @@ fn cfg_expand(
}
fn panic_expand(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -343,7 +343,7 @@ fn panic_expand(
}
fn unreachable_expand(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -379,7 +379,7 @@ fn unquote_byte_string(lit: &tt::Literal) -> Option<Vec<u8>> {
}
fn compile_error_expand(
_db: &dyn AstDatabase,
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
@ -395,7 +395,7 @@ fn compile_error_expand(
}
fn concat_expand(
_db: &dyn AstDatabase,
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
@ -441,7 +441,7 @@ fn concat_expand(
}
fn concat_bytes_expand(
_db: &dyn AstDatabase,
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
@ -507,7 +507,7 @@ fn concat_bytes_expand_subtree(
}
fn concat_idents_expand(
_db: &dyn AstDatabase,
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
@ -529,7 +529,7 @@ fn concat_idents_expand(
}
fn relative_file(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
call_id: MacroCallId,
path_str: &str,
allow_recursion: bool,
@ -558,7 +558,7 @@ fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> {
}
fn include_expand(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
@ -583,7 +583,7 @@ fn include_expand(
}
fn include_bytes_expand(
_db: &dyn AstDatabase,
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
@ -606,7 +606,7 @@ fn include_bytes_expand(
}
fn include_str_expand(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
@ -637,13 +637,13 @@ fn include_str_expand(
ExpandResult::ok(ExpandedEager::new(quote!(#text)))
}
fn get_env_inner(db: &dyn AstDatabase, arg_id: MacroCallId, key: &str) -> Option<String> {
fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &str) -> Option<String> {
let krate = db.lookup_intern_macro_call(arg_id).krate;
db.crate_graph()[krate].env.get(key)
}
fn env_expand(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {
@ -679,7 +679,7 @@ fn env_expand(
}
fn option_env_expand(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<ExpandedEager> {

View file

@ -44,7 +44,7 @@ pub enum TokenExpander {
impl TokenExpander {
fn expand(
&self,
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
@ -83,9 +83,8 @@ impl TokenExpander {
}
}
// FIXME: rename to ExpandDatabase
#[salsa::query_group(AstDatabaseStorage)]
pub trait AstDatabase: SourceDatabase {
#[salsa::query_group(ExpandDatabaseStorage)]
pub trait ExpandDatabase: SourceDatabase {
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
/// Main public API -- parses a hir file, not caring whether it's a real
@ -138,7 +137,7 @@ pub trait AstDatabase: SourceDatabase {
/// token. The `token_to_map` mapped down into the expansion, with the mapped
/// token returned.
pub fn expand_speculative(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
actual_macro_call: MacroCallId,
speculative_args: &SyntaxNode,
token_to_map: SyntaxToken,
@ -211,7 +210,7 @@ pub fn expand_speculative(
let mut speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => {
tt.delimiter = tt::Delimiter::unspecified();
expander.expand(db, loc.krate, &tt, attr_arg.as_ref())
expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref())
}
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
@ -236,12 +235,12 @@ pub fn expand_speculative(
Some((node.syntax_node(), token))
}
fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
let map = db.parse_or_expand(file_id).map(|it| AstIdMap::from_source(&it)).unwrap_or_default();
Arc::new(map)
}
fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()),
HirFileIdRepr::MacroFile(macro_file) => {
@ -253,13 +252,13 @@ fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNod
}
fn parse_macro_expansion(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
macro_file: MacroFile,
) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> {
let _p = profile::span("parse_macro_expansion");
let result = db.macro_expand(macro_file.macro_call_id);
let mbe::ValueResult { value, err } = db.macro_expand(macro_file.macro_call_id);
if let Some(err) = &result.err {
if let Some(err) = &err {
// Note:
// The final goal we would like to make all parse_macro success,
// such that the following log will not call anyway.
@ -280,9 +279,9 @@ fn parse_macro_expansion(
parents
);
}
let tt = match result.value {
let tt = match value {
Some(tt) => tt,
None => return ExpandResult { value: None, err: result.err },
None => return ExpandResult { value: None, err },
};
let expand_to = macro_expand_to(db, macro_file.macro_call_id);
@ -292,11 +291,11 @@ fn parse_macro_expansion(
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: result.err }
ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err }
}
fn macro_arg(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
id: MacroCallId,
) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>> {
let arg = db.macro_arg_text(id)?;
@ -357,7 +356,7 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
.unwrap_or_default()
}
fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option<GreenNode> {
let loc = db.lookup_intern_macro_call(id);
let arg = loc.kind.arg(db)?;
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
@ -380,7 +379,10 @@ fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
Some(arg.green().into())
}
fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Result<Arc<TokenExpander>, mbe::ParseError> {
fn macro_def(
db: &dyn ExpandDatabase,
id: MacroDefId,
) -> Result<Arc<TokenExpander>, mbe::ParseError> {
match id.kind {
MacroDefKind::Declarative(ast_id) => {
let (mac, def_site_token_map) = match ast_id.to_node(db) {
@ -419,7 +421,10 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Result<Arc<TokenExpander>,
}
}
fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>> {
fn macro_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
) -> ExpandResult<Option<Arc<tt::Subtree>>> {
let _p = profile::span("macro_expand");
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
if let Some(eager) = &loc.eager {
@ -469,11 +474,11 @@ fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Ar
ExpandResult { value: Some(Arc::new(tt)), err }
}
fn macro_expand_error(db: &dyn AstDatabase, macro_call: MacroCallId) -> Option<ExpandError> {
fn macro_expand_error(db: &dyn ExpandDatabase, macro_call: MacroCallId) -> Option<ExpandError> {
db.macro_expand(macro_call).err
}
fn expand_proc_macro(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<tt::Subtree> {
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
let macro_arg = match db.macro_arg(id) {
Some(it) => it,
@ -499,14 +504,14 @@ fn expand_proc_macro(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<tt::
_ => None,
};
expander.expand(db, loc.krate, &macro_arg.0, attr_arg.as_ref())
expander.expand(db, loc.def.krate, loc.krate, &macro_arg.0, attr_arg.as_ref())
}
fn hygiene_frame(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
Arc::new(HygieneFrame::new(db, file_id))
}
fn macro_expand_to(db: &dyn AstDatabase, id: MacroCallId) -> ExpandTo {
fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
loc.kind.expand_to()
}

View file

@ -25,7 +25,7 @@ use syntax::{ted, SyntaxNode};
use crate::{
ast::{self, AstNode},
db::AstDatabase,
db::ExpandDatabase,
hygiene::Hygiene,
mod_path::ModPath,
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
@ -96,7 +96,7 @@ impl ErrorSink for &'_ mut dyn FnMut(ExpandError) {
}
pub fn expand_eager_macro(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
krate: CrateId,
macro_call: InFile<ast::MacroCall>,
def: MacroDefId,
@ -172,7 +172,7 @@ fn to_subtree(node: &SyntaxNode) -> crate::tt::Subtree {
}
fn lazy_expand(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
def: &MacroDefId,
macro_call: InFile<ast::MacroCall>,
krate: CrateId,
@ -193,7 +193,7 @@ fn lazy_expand(
}
fn eager_macro_recur(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
hygiene: &Hygiene,
curr: InFile<SyntaxNode>,
krate: CrateId,

View file

@ -636,9 +636,8 @@ fn foo() {
if {}
}
"#,
// the {} gets parsed as the condition, I think?
expect![[r#"
fn foo () {if {} {}}
fn foo () {if __ra_fixup {} {}}
"#]],
)
}

View file

@ -14,7 +14,7 @@ use syntax::{
};
use crate::{
db::{self, AstDatabase},
db::{self, ExpandDatabase},
fixup,
name::{AsName, Name},
HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
@ -26,7 +26,7 @@ pub struct Hygiene {
}
impl Hygiene {
pub fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Hygiene {
pub fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Hygiene {
Hygiene { frames: Some(HygieneFrames::new(db, file_id)) }
}
@ -37,7 +37,7 @@ impl Hygiene {
// FIXME: this should just return name
pub fn name_ref_to_name(
&self,
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
name_ref: ast::NameRef,
) -> Either<Name, CrateId> {
if let Some(frames) = &self.frames {
@ -51,7 +51,7 @@ impl Hygiene {
Either::Left(name_ref.as_name())
}
pub fn local_inner_macros(&self, db: &dyn AstDatabase, path: ast::Path) -> Option<CrateId> {
pub fn local_inner_macros(&self, db: &dyn ExpandDatabase, path: ast::Path) -> Option<CrateId> {
let mut token = path.syntax().first_token()?.text_range();
let frames = self.frames.as_ref()?;
let mut current = &frames.0;
@ -87,13 +87,13 @@ pub struct HygieneFrame {
}
impl HygieneFrames {
fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Self {
fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Self {
// Note that this intentionally avoids the `hygiene_frame` query to avoid blowing up memory
// usage. The query is only helpful for nested `HygieneFrame`s as it avoids redundant work.
HygieneFrames(Arc::new(HygieneFrame::new(db, file_id)))
}
fn root_crate(&self, db: &dyn AstDatabase, node: &SyntaxNode) -> Option<CrateId> {
fn root_crate(&self, db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option<CrateId> {
let mut token = node.first_token()?.text_range();
let mut result = self.0.krate;
let mut current = self.0.clone();
@ -136,7 +136,7 @@ struct HygieneInfo {
impl HygieneInfo {
fn map_ident_up(
&self,
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
token: TextRange,
) -> Option<(InFile<TextRange>, Origin)> {
let token_id = self.exp_map.token_by_range(token)?;
@ -175,7 +175,7 @@ impl HygieneInfo {
}
fn make_hygiene_info(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
macro_file: MacroFile,
loc: &MacroCallLoc,
) -> Option<HygieneInfo> {
@ -215,7 +215,7 @@ fn make_hygiene_info(
}
impl HygieneFrame {
pub(crate) fn new(db: &dyn AstDatabase, file_id: HirFileId) -> HygieneFrame {
pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> HygieneFrame {
let (info, krate, local_inner) = match file_id.macro_file() {
None => (None, None, false),
Some(macro_file) => {

View file

@ -198,7 +198,7 @@ impl HirFileId {
/// For macro-expansion files, returns the file original source file the
/// expansion originated from.
pub fn original_file(self, db: &dyn db::AstDatabase) -> FileId {
pub fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId {
let mut file_id = self;
loop {
match file_id.repr() {
@ -214,7 +214,7 @@ impl HirFileId {
}
}
pub fn expansion_level(self, db: &dyn db::AstDatabase) -> u32 {
pub fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 {
let mut level = 0;
let mut curr = self;
while let Some(macro_file) = curr.macro_file() {
@ -227,14 +227,14 @@ impl HirFileId {
}
/// If this is a macro call, returns the syntax node of the call.
pub fn call_node(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxNode>> {
pub fn call_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
let macro_file = self.macro_file()?;
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
Some(loc.kind.to_node(db))
}
/// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
pub fn original_call_node(self, db: &dyn db::AstDatabase) -> Option<(FileId, SyntaxNode)> {
pub fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)> {
let mut call =
db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).kind.to_node(db);
loop {
@ -248,7 +248,7 @@ impl HirFileId {
}
/// Return expansion information if it is a macro-expansion file
pub fn expansion_info(self, db: &dyn db::AstDatabase) -> Option<ExpansionInfo> {
pub fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> {
let macro_file = self.macro_file()?;
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
@ -294,7 +294,7 @@ impl HirFileId {
}
/// Indicate it is macro file generated for builtin derive
pub fn is_builtin_derive(&self, db: &dyn db::AstDatabase) -> Option<InFile<ast::Attr>> {
pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> Option<InFile<ast::Attr>> {
let macro_file = self.macro_file()?;
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let attr = match loc.def.kind {
@ -304,7 +304,7 @@ impl HirFileId {
Some(attr.with_value(ast::Attr::cast(attr.value.clone())?))
}
pub fn is_custom_derive(&self, db: &dyn db::AstDatabase) -> bool {
pub fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
match self.macro_file() {
Some(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
@ -315,7 +315,7 @@ impl HirFileId {
}
/// Return whether this file is an include macro
pub fn is_include_macro(&self, db: &dyn db::AstDatabase) -> bool {
pub fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
match self.macro_file() {
Some(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
@ -326,7 +326,7 @@ impl HirFileId {
}
/// Return whether this file is an attr macro
pub fn is_attr_macro(&self, db: &dyn db::AstDatabase) -> bool {
pub fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
match self.macro_file() {
Some(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
@ -338,7 +338,7 @@ impl HirFileId {
/// Return whether this file is the pseudo expansion of the derive attribute.
/// See [`crate::builtin_attr_macro::derive_attr_expand`].
pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::AstDatabase) -> bool {
pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool {
match self.macro_file() {
Some(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
@ -384,7 +384,7 @@ impl HirFileId {
impl MacroDefId {
pub fn as_lazy_macro(
self,
db: &dyn db::AstDatabase,
db: &dyn db::ExpandDatabase,
krate: CrateId,
kind: MacroCallKind,
) -> MacroCallId {
@ -427,7 +427,7 @@ impl MacroCallKind {
}
}
pub fn to_node(&self, db: &dyn db::AstDatabase) -> InFile<SyntaxNode> {
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode> {
match self {
MacroCallKind::FnLike { ast_id, .. } => {
ast_id.with_value(ast_id.to_node(db).syntax().clone())
@ -465,7 +465,7 @@ impl MacroCallKind {
/// Returns the original file range that best describes the location of this macro call.
///
/// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives.
pub fn original_call_range_with_body(self, db: &dyn db::AstDatabase) -> FileRange {
pub fn original_call_range_with_body(self, db: &dyn db::ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
match kind.file_id().repr() {
@ -490,7 +490,7 @@ impl MacroCallKind {
/// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros
/// get the whole `ast::MacroCall`, attribute macros get the attribute's range, and derives
/// get only the specific derive that is being referred to.
pub fn original_call_range(self, db: &dyn db::AstDatabase) -> FileRange {
pub fn original_call_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
let mut kind = self;
let file_id = loop {
match kind.file_id().repr() {
@ -529,7 +529,7 @@ impl MacroCallKind {
FileRange { range, file_id }
}
fn arg(&self, db: &dyn db::AstDatabase) -> Option<SyntaxNode> {
fn arg(&self, db: &dyn db::ExpandDatabase) -> Option<SyntaxNode> {
match self {
MacroCallKind::FnLike { ast_id, .. } => {
Some(ast_id.to_node(db).token_tree()?.syntax().clone())
@ -597,7 +597,7 @@ impl ExpansionInfo {
/// Both of these only have one simple call site input so no special handling is required here.
pub fn map_token_down(
&self,
db: &dyn db::AstDatabase,
db: &dyn db::ExpandDatabase,
item: Option<ast::Item>,
token: InFile<&SyntaxToken>,
) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
@ -666,7 +666,7 @@ impl ExpansionInfo {
/// Map a token up out of the expansion it resides in into the arguments of the macro call of the expansion.
pub fn map_token_up(
&self,
db: &dyn db::AstDatabase,
db: &dyn db::ExpandDatabase,
token: InFile<&SyntaxToken>,
) -> Option<(InFile<SyntaxToken>, Origin)> {
// Fetch the id through its text range,
@ -717,7 +717,7 @@ impl ExpansionInfo {
pub type AstId<N> = InFile<FileAstId<N>>;
impl<N: AstNode> AstId<N> {
pub fn to_node(&self, db: &dyn db::AstDatabase) -> N {
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
let root = db.parse_or_expand(self.file_id).unwrap();
db.ast_id_map(self.file_id).get(self.value).to_node(&root)
}
@ -753,7 +753,7 @@ impl<T> InFile<T> {
self.with_value(&self.value)
}
pub fn file_syntax(&self, db: &dyn db::AstDatabase) -> SyntaxNode {
pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse_or_expand(self.file_id).expect("source created from invalid file")
}
}
@ -783,7 +783,7 @@ impl<L, R> InFile<Either<L, R>> {
impl<'a> InFile<&'a SyntaxNode> {
pub fn ancestors_with_macros(
self,
db: &dyn db::AstDatabase,
db: &dyn db::ExpandDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
iter::successors(Some(self.cloned()), move |node| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
@ -794,7 +794,7 @@ impl<'a> InFile<&'a SyntaxNode> {
/// Skips the attributed item that caused the macro invocation we are climbing up
pub fn ancestors_with_macros_skip_attr_item(
self,
db: &dyn db::AstDatabase,
db: &dyn db::ExpandDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
@ -816,7 +816,7 @@ impl<'a> InFile<&'a SyntaxNode> {
///
/// For attributes and derives, this will point back to the attribute only.
/// For the entire item use [`InFile::original_file_range_full`].
pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange {
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
@ -831,7 +831,7 @@ impl<'a> InFile<&'a SyntaxNode> {
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
pub fn original_file_range_full(self, db: &dyn db::AstDatabase) -> FileRange {
pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
@ -846,7 +846,7 @@ impl<'a> InFile<&'a SyntaxNode> {
}
/// Attempts to map the syntax node back up its macro calls.
pub fn original_file_range_opt(self, db: &dyn db::AstDatabase) -> Option<FileRange> {
pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
match ascend_node_border_tokens(db, self) {
Some(InFile { file_id, value: (first, last) }) => {
let original_file = file_id.original_file(db);
@ -865,7 +865,7 @@ impl<'a> InFile<&'a SyntaxNode> {
}
}
pub fn original_syntax_node(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxNode>> {
pub fn original_syntax_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
if !self.file_id.is_macro() {
@ -892,13 +892,13 @@ impl<'a> InFile<&'a SyntaxNode> {
}
impl InFile<SyntaxToken> {
pub fn upmap(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxToken>> {
pub fn upmap(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxToken>> {
let expansion = self.file_id.expansion_info(db)?;
expansion.map_token_up(db, self.as_ref()).map(|(it, _)| it)
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange {
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
@ -913,7 +913,7 @@ impl InFile<SyntaxToken> {
}
/// Attempts to map the syntax node back up its macro calls.
pub fn original_file_range_opt(self, db: &dyn db::AstDatabase) -> Option<FileRange> {
pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
Some(FileRange { file_id, range: self.value.text_range() })
@ -932,7 +932,7 @@ impl InFile<SyntaxToken> {
pub fn ancestors_with_macros(
self,
db: &dyn db::AstDatabase,
db: &dyn db::ExpandDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
self.value.parent().into_iter().flat_map({
let file_id = self.file_id;
@ -942,7 +942,7 @@ impl InFile<SyntaxToken> {
}
fn ascend_node_border_tokens(
db: &dyn db::AstDatabase,
db: &dyn db::ExpandDatabase,
InFile { file_id, value: node }: InFile<&SyntaxNode>,
) -> Option<InFile<(SyntaxToken, SyntaxToken)>> {
let expansion = file_id.expansion_info(db)?;
@ -958,7 +958,7 @@ fn ascend_node_border_tokens(
}
fn ascend_call_token(
db: &dyn db::AstDatabase,
db: &dyn db::ExpandDatabase,
expansion: &ExpansionInfo,
token: InFile<SyntaxToken>,
) -> Option<InFile<SyntaxToken>> {
@ -977,7 +977,7 @@ impl<N: AstNode> InFile<N> {
self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n))
}
pub fn original_ast_node(self, db: &dyn db::AstDatabase) -> Option<InFile<N>> {
pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<N>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
if !self.file_id.is_macro() {

View file

@ -6,7 +6,7 @@ use std::{
};
use crate::{
db::AstDatabase,
db::ExpandDatabase,
hygiene::Hygiene,
name::{known, Name},
};
@ -37,7 +37,11 @@ pub enum PathKind {
}
impl ModPath {
pub fn from_src(db: &dyn AstDatabase, path: ast::Path, hygiene: &Hygiene) -> Option<ModPath> {
pub fn from_src(
db: &dyn ExpandDatabase,
path: ast::Path,
hygiene: &Hygiene,
) -> Option<ModPath> {
convert_path(db, None, path, hygiene)
}
@ -162,7 +166,7 @@ impl From<Name> for ModPath {
}
fn convert_path(
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
prefix: Option<ModPath>,
path: ast::Path,
hygiene: &Hygiene,

View file

@ -3,22 +3,20 @@
use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
use stdx::never;
use crate::{db::AstDatabase, tt, ExpandError, ExpandResult};
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult};
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub struct ProcMacroExpander {
krate: CrateId,
proc_macro_id: Option<ProcMacroId>,
}
impl ProcMacroExpander {
pub fn new(krate: CrateId, proc_macro_id: ProcMacroId) -> Self {
Self { krate, proc_macro_id: Some(proc_macro_id) }
pub fn new(proc_macro_id: ProcMacroId) -> Self {
Self { proc_macro_id: Some(proc_macro_id) }
}
pub fn dummy(krate: CrateId) -> Self {
// FIXME: Should store the name for better errors
Self { krate, proc_macro_id: None }
pub fn dummy() -> Self {
Self { proc_macro_id: None }
}
pub fn is_dummy(&self) -> bool {
@ -27,7 +25,8 @@ impl ProcMacroExpander {
pub fn expand(
self,
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
def_crate: CrateId,
calling_crate: CrateId,
tt: &tt::Subtree,
attr_arg: Option<&tt::Subtree>,
@ -35,7 +34,7 @@ impl ProcMacroExpander {
match self.proc_macro_id {
Some(id) => {
let krate_graph = db.crate_graph();
let proc_macros = match &krate_graph[self.krate].proc_macro {
let proc_macros = match &krate_graph[def_crate].proc_macro {
Ok(proc_macros) => proc_macros,
Err(_) => {
never!("Non-dummy expander even though there are no proc macros");
@ -84,7 +83,7 @@ impl ProcMacroExpander {
}
None => ExpandResult::with_err(
tt::Subtree::empty(),
ExpandError::UnresolvedProcMacro(self.krate),
ExpandError::UnresolvedProcMacro(def_crate),
),
}
}

View file

@ -22,10 +22,10 @@ either = "1.7.0"
tracing = "0.1.35"
rustc-hash = "1.1.0"
scoped-tls = "1.0.0"
chalk-solve = { version = "0.88.0", default-features = false }
chalk-ir = "0.88.0"
chalk-recursive = { version = "0.88.0", default-features = false }
chalk-derive = "0.88.0"
chalk-solve = { version = "0.89.0", default-features = false }
chalk-ir = "0.89.0"
chalk-recursive = { version = "0.89.0", default-features = false }
chalk-derive = "0.89.0"
la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
once_cell = "1.17.0"
typed-arena = "2.0.1"

View file

@ -12,7 +12,7 @@ use hir_def::{
use crate::{
db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
from_placeholder_idx, to_chalk_trait_id, utils::generics, AdtId, AliasEq, AliasTy, Binders,
CallableDefId, CallableSig, FnPointer, ImplTraitId, Interner, Lifetime, ProjectionTy,
CallableDefId, CallableSig, DynTy, FnPointer, ImplTraitId, Interner, Lifetime, ProjectionTy,
QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, TypeFlags, WhereClause,
};
@ -378,6 +378,19 @@ impl ProjectionTyExt for ProjectionTy {
}
}
pub trait DynTyExt {
fn principal(&self) -> Option<&TraitRef>;
}
impl DynTyExt for DynTy {
fn principal(&self) -> Option<&TraitRef> {
self.bounds.skip_binders().interned().get(0).and_then(|b| match b.skip_binders() {
crate::WhereClause::Implemented(trait_ref) => Some(trait_ref),
_ => None,
})
}
}
pub trait TraitRefExt {
fn hir_trait_id(&self) -> TraitId;
}

View file

@ -11,3 +11,9 @@ pub use crate::diagnostics::{
},
unsafe_check::{missing_unsafe, unsafe_expressions, UnsafeExpr},
};
#[derive(Debug, PartialEq, Eq)]
pub struct IncoherentImpl {
pub file_id: hir_expand::HirFileId,
pub impl_: syntax::AstPtr<syntax::ast::Impl>,
}

View file

@ -275,7 +275,23 @@ impl<'a> InferenceContext<'a> {
Some(type_ref) => self.make_ty(type_ref),
None => self.table.new_type_var(),
};
sig_tys.push(ret_ty.clone());
if let ClosureKind::Async = closure_kind {
// Use the first type parameter as the output type of future.
// existential type AsyncBlockImplTrait<InnerType>: Future<Output = InnerType>
let impl_trait_id =
crate::ImplTraitId::AsyncBlockTypeImplTrait(self.owner, *body);
let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
sig_tys.push(
TyKind::OpaqueType(
opaque_ty_id,
Substitution::from1(Interner, ret_ty.clone()),
)
.intern(Interner),
);
} else {
sig_tys.push(ret_ty.clone());
}
let sig_ty = TyKind::Function(FnPointer {
num_binders: 0,
sig: FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: false },
@ -286,33 +302,38 @@ impl<'a> InferenceContext<'a> {
})
.intern(Interner);
let (ty, resume_yield_tys) = if matches!(closure_kind, ClosureKind::Generator(_)) {
// FIXME: report error when there are more than 1 parameter.
let resume_ty = match sig_tys.first() {
// When `sig_tys.len() == 1` the first type is the return type, not the
// first parameter type.
Some(ty) if sig_tys.len() > 1 => ty.clone(),
_ => self.result.standard_types.unit.clone(),
};
let yield_ty = self.table.new_type_var();
let (ty, resume_yield_tys) = match closure_kind {
ClosureKind::Generator(_) => {
// FIXME: report error when there are more than 1 parameter.
let resume_ty = match sig_tys.first() {
// When `sig_tys.len() == 1` the first type is the return type, not the
// first parameter type.
Some(ty) if sig_tys.len() > 1 => ty.clone(),
_ => self.result.standard_types.unit.clone(),
};
let yield_ty = self.table.new_type_var();
let subst = TyBuilder::subst_for_generator(self.db, self.owner)
.push(resume_ty.clone())
.push(yield_ty.clone())
.push(ret_ty.clone())
.build();
let subst = TyBuilder::subst_for_generator(self.db, self.owner)
.push(resume_ty.clone())
.push(yield_ty.clone())
.push(ret_ty.clone())
.build();
let generator_id = self.db.intern_generator((self.owner, tgt_expr)).into();
let generator_ty = TyKind::Generator(generator_id, subst).intern(Interner);
let generator_id = self.db.intern_generator((self.owner, tgt_expr)).into();
let generator_ty = TyKind::Generator(generator_id, subst).intern(Interner);
(generator_ty, Some((resume_ty, yield_ty)))
} else {
let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into();
let closure_ty =
TyKind::Closure(closure_id, Substitution::from1(Interner, sig_ty.clone()))
.intern(Interner);
(generator_ty, Some((resume_ty, yield_ty)))
}
ClosureKind::Closure | ClosureKind::Async => {
let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into();
let closure_ty = TyKind::Closure(
closure_id,
Substitution::from1(Interner, sig_ty.clone()),
)
.intern(Interner);
(closure_ty, None)
(closure_ty, None)
}
};
// Eagerly try to relate the closure type with the expected
@ -321,7 +342,7 @@ impl<'a> InferenceContext<'a> {
self.deduce_closure_type_from_expectations(tgt_expr, &ty, &sig_ty, expected);
// Now go through the argument patterns
for (arg_pat, arg_ty) in args.iter().zip(sig_tys) {
for (arg_pat, arg_ty) in args.iter().zip(&sig_tys) {
self.infer_top_pat(*arg_pat, &arg_ty);
}

View file

@ -5,10 +5,7 @@ use std::iter::repeat_with;
use chalk_ir::Mutability;
use hir_def::{
body::Body,
expr::{
Binding, BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, Literal, Pat, PatId,
RecordFieldPat,
},
expr::{Binding, BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, Literal, Pat, PatId},
path::Path,
};
use hir_expand::name::Name;
@ -439,38 +436,8 @@ fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
pub(super) fn contains_explicit_ref_binding(body: &Body, pat_id: PatId) -> bool {
let mut res = false;
walk_pats(body, pat_id, &mut |pat| {
body.walk_pats(pat_id, &mut |pat| {
res |= matches!(pat, Pat::Bind { id, .. } if body.bindings[*id].mode == BindingAnnotation::Ref);
});
res
}
fn walk_pats(body: &Body, pat_id: PatId, f: &mut impl FnMut(&Pat)) {
let pat = &body[pat_id];
f(pat);
match pat {
Pat::Range { .. }
| Pat::Lit(..)
| Pat::Path(..)
| Pat::ConstBlock(..)
| Pat::Wild
| Pat::Missing => {}
&Pat::Bind { subpat, .. } => {
if let Some(subpat) = subpat {
walk_pats(body, subpat, f);
}
}
Pat::Or(args) | Pat::Tuple { args, .. } | Pat::TupleStruct { args, .. } => {
args.iter().copied().for_each(|p| walk_pats(body, p, f));
}
Pat::Ref { pat, .. } => walk_pats(body, *pat, f),
Pat::Slice { prefix, slice, suffix } => {
let total_iter = prefix.iter().chain(slice.iter()).chain(suffix.iter());
total_iter.copied().for_each(|p| walk_pats(body, p, f));
}
Pat::Record { args, .. } => {
args.iter().for_each(|RecordFieldPat { pat, .. }| walk_pats(body, *pat, f));
}
Pat::Box { inner } => walk_pats(body, *inner, f),
}
}

View file

@ -19,13 +19,13 @@ use stdx::never;
use crate::{
autoderef::{self, AutoderefKind},
db::HirDatabase,
from_foreign_def_id,
from_chalk_trait_id, from_foreign_def_id,
infer::{unify::InferenceTable, Adjust, Adjustment, AutoBorrow, OverloadedDeref, PointerCast},
primitive::{FloatTy, IntTy, UintTy},
static_lifetime, to_chalk_trait_id,
utils::all_super_traits,
AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, ForeignDefId, InEnvironment, Interner,
Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt,
AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, DynTyExt, ForeignDefId, InEnvironment,
Interner, Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt,
};
/// This is used as a key for indexing impls.
@ -266,11 +266,12 @@ impl TraitImpls {
#[derive(Debug, Eq, PartialEq)]
pub struct InherentImpls {
map: FxHashMap<TyFingerprint, Vec<ImplId>>,
invalid_impls: Vec<ImplId>,
}
impl InherentImpls {
pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
let mut impls = Self { map: FxHashMap::default() };
let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
let crate_def_map = db.crate_def_map(krate);
impls.collect_def_map(db, &crate_def_map);
@ -283,7 +284,7 @@ impl InherentImpls {
db: &dyn HirDatabase,
block: BlockId,
) -> Option<Arc<Self>> {
let mut impls = Self { map: FxHashMap::default() };
let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
if let Some(block_def_map) = db.block_def_map(block) {
impls.collect_def_map(db, &block_def_map);
impls.shrink_to_fit();
@ -306,11 +307,17 @@ impl InherentImpls {
}
let self_ty = db.impl_self_ty(impl_id);
let fp = TyFingerprint::for_inherent_impl(self_ty.skip_binders());
if let Some(fp) = fp {
self.map.entry(fp).or_default().push(impl_id);
let self_ty = self_ty.skip_binders();
match is_inherent_impl_coherent(db, def_map, &data, self_ty) {
true => {
// `fp` should only be `None` in error cases (either erroneous code or incomplete name resolution)
if let Some(fp) = TyFingerprint::for_inherent_impl(self_ty) {
self.map.entry(fp).or_default().push(impl_id);
}
}
false => self.invalid_impls.push(impl_id),
}
// `fp` should only be `None` in error cases (either erroneous code or incomplete name resolution)
}
// To better support custom derives, collect impls in all unnamed const items.
@ -334,6 +341,10 @@ impl InherentImpls {
pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
self.map.values().flat_map(|v| v.iter().copied())
}
pub fn invalid_impls(&self) -> &[ImplId] {
&self.invalid_impls
}
}
pub(crate) fn incoherent_inherent_impl_crates(
@ -775,6 +786,69 @@ fn find_matching_impl(
}
}
fn is_inherent_impl_coherent(
db: &dyn HirDatabase,
def_map: &DefMap,
impl_data: &ImplData,
self_ty: &Ty,
) -> bool {
let self_ty = self_ty.kind(Interner);
let impl_allowed = match self_ty {
TyKind::Tuple(_, _)
| TyKind::FnDef(_, _)
| TyKind::Array(_, _)
| TyKind::Never
| TyKind::Raw(_, _)
| TyKind::Ref(_, _, _)
| TyKind::Slice(_)
| TyKind::Str
| TyKind::Scalar(_) => def_map.is_rustc_coherence_is_core(),
&TyKind::Adt(AdtId(adt), _) => adt.module(db.upcast()).krate() == def_map.krate(),
TyKind::Dyn(it) => it.principal().map_or(false, |trait_ref| {
from_chalk_trait_id(trait_ref.trait_id).module(db.upcast()).krate() == def_map.krate()
}),
_ => true,
};
impl_allowed || {
let rustc_has_incoherent_inherent_impls = match self_ty {
TyKind::Tuple(_, _)
| TyKind::FnDef(_, _)
| TyKind::Array(_, _)
| TyKind::Never
| TyKind::Raw(_, _)
| TyKind::Ref(_, _, _)
| TyKind::Slice(_)
| TyKind::Str
| TyKind::Scalar(_) => true,
&TyKind::Adt(AdtId(adt), _) => match adt {
hir_def::AdtId::StructId(it) => {
db.struct_data(it).rustc_has_incoherent_inherent_impls
}
hir_def::AdtId::UnionId(it) => {
db.union_data(it).rustc_has_incoherent_inherent_impls
}
hir_def::AdtId::EnumId(it) => db.enum_data(it).rustc_has_incoherent_inherent_impls,
},
TyKind::Dyn(it) => it.principal().map_or(false, |trait_ref| {
db.trait_data(from_chalk_trait_id(trait_ref.trait_id))
.rustc_has_incoherent_inherent_impls
}),
_ => false,
};
rustc_has_incoherent_inherent_impls
&& !impl_data.items.is_empty()
&& impl_data.items.iter().copied().all(|assoc| match assoc {
AssocItemId::FunctionId(it) => db.function_data(it).rustc_allow_incoherent_impl,
AssocItemId::ConstId(it) => db.const_data(it).rustc_allow_incoherent_impl,
AssocItemId::TypeAliasId(it) => db.type_alias_data(it).rustc_allow_incoherent_impl,
})
}
}
pub fn iterate_path_candidates(
ty: &Canonical<Ty>,
db: &dyn HirDatabase,

View file

@ -1113,7 +1113,7 @@ impl MirLowerCtx<'_> {
if matches!(mode, BindingAnnotation::Ref | BindingAnnotation::RefMut) {
binding_mode = mode;
}
self.push_storage_live(*id, current)?;
self.push_storage_live(*id, current);
self.push_assignment(
current,
target_place.into(),
@ -1327,8 +1327,9 @@ impl MirLowerCtx<'_> {
is_ty_uninhabited_from(&self.infer[expr_id], self.owner.module(self.db.upcast()), self.db)
}
/// This function push `StorageLive` statements for each binding in the pattern.
fn push_storage_live(&mut self, b: BindingId, current: BasicBlockId) -> Result<()> {
/// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` in
/// the appropriated places.
fn push_storage_live(&mut self, b: BindingId, current: BasicBlockId) {
// Current implementation is wrong. It adds no `StorageDead` at the end of scope, and before each break
// and continue. It just add a `StorageDead` before the `StorageLive`, which is not wrong, but unneeeded in
// the proper implementation. Due this limitation, implementing a borrow checker on top of this mir will falsely
@ -1356,7 +1357,6 @@ impl MirLowerCtx<'_> {
let l = self.result.binding_locals[b];
self.push_statement(current, StatementKind::StorageDead(l).with_span(span));
self.push_statement(current, StatementKind::StorageLive(l).with_span(span));
Ok(())
}
fn resolve_lang_item(&self, item: LangItem) -> Result<LangItemTarget> {
@ -1381,10 +1381,10 @@ impl MirLowerCtx<'_> {
if let Some(expr_id) = initializer {
let else_block;
let Some((init_place, c)) =
self.lower_expr_as_place(current, *expr_id, true)?
else {
return Ok(None);
};
self.lower_expr_as_place(current, *expr_id, true)?
else {
return Ok(None);
};
current = c;
(current, else_block) = self.pattern_match(
current,
@ -1407,6 +1407,10 @@ impl MirLowerCtx<'_> {
}
}
}
} else {
self.body.walk_bindings_in_pat(*pat, |b| {
self.push_storage_live(b, current);
});
}
}
hir_def::expr::Statement::Expr { expr, has_semi: _ } => {

View file

@ -9,7 +9,7 @@ use base_db::{
salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
};
use hir_def::{db::DefDatabase, ModuleId};
use hir_expand::db::AstDatabase;
use hir_expand::db::ExpandDatabase;
use stdx::hash::{NoHashHashMap, NoHashHashSet};
use syntax::TextRange;
use test_utils::extract_annotations;
@ -17,7 +17,7 @@ use test_utils::extract_annotations;
#[salsa::database(
base_db::SourceDatabaseExtStorage,
base_db::SourceDatabaseStorage,
hir_expand::db::AstDatabaseStorage,
hir_expand::db::ExpandDatabaseStorage,
hir_def::db::InternDatabaseStorage,
hir_def::db::DefDatabaseStorage,
crate::db::HirDatabaseStorage
@ -41,8 +41,8 @@ impl fmt::Debug for TestDB {
}
}
impl Upcast<dyn AstDatabase> for TestDB {
fn upcast(&self) -> &(dyn AstDatabase + 'static) {
impl Upcast<dyn ExpandDatabase> for TestDB {
fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
&*self
}
}

View file

@ -23,7 +23,7 @@ use hir_def::{
src::HasSource,
AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId,
};
use hir_expand::{db::AstDatabase, InFile};
use hir_expand::{db::ExpandDatabase, InFile};
use once_cell::race::OnceBool;
use stdx::format_to;
use syntax::{

View file

@ -9,6 +9,7 @@ fn infer_slice_method() {
check_types(
r#"
impl<T> [T] {
#[rustc_allow_incoherent_impl]
fn foo(&self) -> T {
loop {}
}
@ -35,6 +36,7 @@ fn test() {
//- /lib.rs crate:other_crate
mod foo {
impl f32 {
#[rustc_allow_incoherent_impl]
pub fn foo(self) -> f32 { 0. }
}
}
@ -47,6 +49,7 @@ fn infer_array_inherent_impl() {
check_types(
r#"
impl<T, const N: usize> [T; N] {
#[rustc_allow_incoherent_impl]
fn foo(&self) -> T {
loop {}
}
@ -1437,6 +1440,7 @@ fn resolve_const_generic_array_methods() {
r#"
#[lang = "array"]
impl<T, const N: usize> [T; N] {
#[rustc_allow_incoherent_impl]
pub fn map<F, U>(self, f: F) -> [U; N]
where
F: FnMut(T) -> U,
@ -1445,6 +1449,7 @@ impl<T, const N: usize> [T; N] {
#[lang = "slice"]
impl<T> [T] {
#[rustc_allow_incoherent_impl]
pub fn map<F, U>(self, f: F) -> &[U]
where
F: FnMut(T) -> U,
@ -1468,6 +1473,7 @@ struct Const<const N: usize>;
#[lang = "array"]
impl<T, const N: usize> [T; N] {
#[rustc_allow_incoherent_impl]
pub fn my_map<F, U, const X: usize>(self, f: F, c: Const<X>) -> [U; X]
where
F: FnMut(T) -> U,
@ -1476,6 +1482,7 @@ impl<T, const N: usize> [T; N] {
#[lang = "slice"]
impl<T> [T] {
#[rustc_allow_incoherent_impl]
pub fn my_map<F, const X: usize, U>(self, f: F, c: Const<X>) -> &[U]
where
F: FnMut(T) -> U,
@ -1874,14 +1881,14 @@ fn incoherent_impls() {
pub struct Box<T>(T);
use core::error::Error;
#[rustc_allow_incoherent_impl]
impl dyn Error {
#[rustc_allow_incoherent_impl]
pub fn downcast<T: Error + 'static>(self: Box<Self>) -> Result<Box<T>, Box<dyn Error>> {
loop {}
}
}
#[rustc_allow_incoherent_impl]
impl dyn Error + Send {
#[rustc_allow_incoherent_impl]
/// Attempts to downcast the box to a concrete type.
pub fn downcast<T: Error + 'static>(self: Box<Self>) -> Result<Box<T>, Box<dyn Error + Send>> {
let err: Box<dyn Error> = self;

View file

@ -1756,3 +1756,35 @@ const C: usize = 2 + 2;
"#,
);
}
#[test]
fn regression_14164() {
check_types(
r#"
trait Rec {
type K;
type Rebind<Tok>: Rec<K = Tok>;
}
trait Expr<K> {
type Part: Rec<K = K>;
fn foo(_: <Self::Part as Rec>::Rebind<i32>) {}
}
struct Head<K>(K);
impl<K> Rec for Head<K> {
type K = K;
type Rebind<Tok> = Head<Tok>;
}
fn test<E>()
where
E: Expr<usize, Part = Head<usize>>,
{
let head;
//^^^^ Head<i32>
E::foo(head);
}
"#,
);
}

View file

@ -1116,21 +1116,22 @@ fn infer_inherent_method() {
fn infer_inherent_method_str() {
check_infer(
r#"
#[lang = "str"]
impl str {
fn foo(&self) -> i32 {}
}
#![rustc_coherence_is_core]
#[lang = "str"]
impl str {
fn foo(&self) -> i32 {}
}
fn test() {
"foo".foo();
}
"#,
fn test() {
"foo".foo();
}
"#,
expect![[r#"
39..43 'self': &str
52..54 '{}': i32
68..88 '{ ...o(); }': ()
74..79 '"foo"': &str
74..85 '"foo".foo()': i32
67..71 'self': &str
80..82 '{}': i32
96..116 '{ ...o(); }': ()
102..107 '"foo"': &str
102..113 '"foo".foo()': i32
"#]],
);
}
@ -2640,6 +2641,7 @@ impl<T> [T] {}
#[lang = "slice_alloc"]
impl<T> [T] {
#[rustc_allow_incoherent_impl]
pub fn into_vec<A: Allocator>(self: Box<Self, A>) -> Vec<T, A> {
unimplemented!()
}
@ -2655,22 +2657,22 @@ struct Astruct;
impl B for Astruct {}
"#,
expect![[r#"
569..573 'self': Box<[T], A>
602..634 '{ ... }': Vec<T, A>
648..761 '{ ...t]); }': ()
658..661 'vec': Vec<i32, Global>
664..679 '<[_]>::into_vec': fn into_vec<i32, Global>(Box<[i32], Global>) -> Vec<i32, Global>
664..691 '<[_]>:...1i32])': Vec<i32, Global>
680..690 'box [1i32]': Box<[i32; 1], Global>
684..690 '[1i32]': [i32; 1]
685..689 '1i32': i32
701..702 'v': Vec<Box<dyn B, Global>, Global>
722..739 '<[_]> ...to_vec': fn into_vec<Box<dyn B, Global>, Global>(Box<[Box<dyn B, Global>], Global>) -> Vec<Box<dyn B, Global>, Global>
722..758 '<[_]> ...ruct])': Vec<Box<dyn B, Global>, Global>
740..757 'box [b...truct]': Box<[Box<dyn B, Global>; 1], Global>
744..757 '[box Astruct]': [Box<dyn B, Global>; 1]
745..756 'box Astruct': Box<Astruct, Global>
749..756 'Astruct': Astruct
604..608 'self': Box<[T], A>
637..669 '{ ... }': Vec<T, A>
683..796 '{ ...t]); }': ()
693..696 'vec': Vec<i32, Global>
699..714 '<[_]>::into_vec': fn into_vec<i32, Global>(Box<[i32], Global>) -> Vec<i32, Global>
699..726 '<[_]>:...1i32])': Vec<i32, Global>
715..725 'box [1i32]': Box<[i32; 1], Global>
719..725 '[1i32]': [i32; 1]
720..724 '1i32': i32
736..737 'v': Vec<Box<dyn B, Global>, Global>
757..774 '<[_]> ...to_vec': fn into_vec<Box<dyn B, Global>, Global>(Box<[Box<dyn B, Global>], Global>) -> Vec<Box<dyn B, Global>, Global>
757..793 '<[_]> ...ruct])': Vec<Box<dyn B, Global>, Global>
775..792 'box [b...truct]': Box<[Box<dyn B, Global>; 1], Global>
779..792 '[box Astruct]': [Box<dyn B, Global>; 1]
780..791 'box Astruct': Box<Astruct, Global>
784..791 'Astruct': Astruct
"#]],
)
}

View file

@ -82,6 +82,46 @@ async fn test() {
);
}
#[test]
fn infer_async_closure() {
check_types(
r#"
//- minicore: future, option
async fn test() {
let f = async move |x: i32| x + 42;
f;
// ^ |i32| -> impl Future<Output = i32>
let a = f(4);
a;
// ^ impl Future<Output = i32>
let x = a.await;
x;
// ^ i32
let f = async move || 42;
f;
// ^ || -> impl Future<Output = i32>
let a = f();
a;
// ^ impl Future<Output = i32>
let x = a.await;
x;
// ^ i32
let b = ((async move || {})()).await;
b;
// ^ ()
let c = async move || {
let y = None;
y
// ^ Option<u64>
};
let _: Option<u64> = c().await;
c;
// ^ || -> impl Future<Output = Option<u64>>
}
"#,
);
}
#[test]
fn auto_sized_async_block() {
check_no_mismatches(
@ -493,29 +533,30 @@ fn tuple_struct_with_fn() {
r#"
struct S(fn(u32) -> u64);
fn test() -> u64 {
let a = S(|i| 2*i);
let a = S(|i| 2*i as u64);
let b = a.0(4);
a.0(2)
}"#,
expect![[r#"
43..101 '{ ...0(2) }': u64
43..108 '{ ...0(2) }': u64
53..54 'a': S
57..58 'S': S(fn(u32) -> u64) -> S
57..67 'S(|i| 2*i)': S
59..66 '|i| 2*i': |u32| -> u64
57..74 'S(|i| ...s u64)': S
59..73 '|i| 2*i as u64': |u32| -> u64
60..61 'i': u32
63..64 '2': u32
63..66 '2*i': u32
63..64 '2': u64
63..73 '2*i as u64': u64
65..66 'i': u32
77..78 'b': u64
81..82 'a': S
81..84 'a.0': fn(u32) -> u64
81..87 'a.0(4)': u64
85..86 '4': u32
93..94 'a': S
93..96 'a.0': fn(u32) -> u64
93..99 'a.0(2)': u64
97..98 '2': u32
65..73 'i as u64': u64
84..85 'b': u64
88..89 'a': S
88..91 'a.0': fn(u32) -> u64
88..94 'a.0(4)': u64
92..93 '4': u32
100..101 'a': S
100..103 'a.0': fn(u32) -> u64
100..106 'a.0(2)': u64
104..105 '2': u32
"#]],
);
}

View file

@ -5,7 +5,7 @@
//! But we need this for at least LRU caching at the query level.
pub use hir_def::db::*;
pub use hir_expand::db::{
AstDatabase, AstDatabaseStorage, AstIdMapQuery, ExpandProcMacroQuery, HygieneFrameQuery,
AstIdMapQuery, ExpandDatabase, ExpandDatabaseStorage, ExpandProcMacroQuery, HygieneFrameQuery,
InternMacroCallQuery, MacroArgTextQuery, MacroDefQuery, MacroExpandErrorQuery,
MacroExpandQuery, ParseMacroExpansionQuery,
};

View file

@ -3,6 +3,8 @@
//!
//! This probably isn't the best way to do this -- ideally, diagnostics should
//! be expressed in terms of hir types themselves.
pub use hir_ty::diagnostics::{IncoherentImpl, IncorrectCase};
use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions};
use either::Either;
@ -35,6 +37,7 @@ diagnostics![
InactiveCode,
IncorrectCase,
InvalidDeriveTarget,
IncoherentImpl,
MacroError,
MalformedDerive,
MismatchedArgCount,
@ -220,5 +223,3 @@ pub struct NeedMut {
pub struct UnusedMut {
pub local: Local,
}
pub use hir_ty::diagnostics::IncorrectCase;

View file

@ -85,10 +85,10 @@ use crate::db::{DefDatabase, HirDatabase};
pub use crate::{
attrs::{HasAttrs, Namespace},
diagnostics::{
AnyDiagnostic, BreakOutsideOfLoop, ExpectedFunction, InactiveCode, IncorrectCase,
InvalidDeriveTarget, MacroError, MalformedDerive, MismatchedArgCount, MissingFields,
MissingMatchArms, MissingUnsafe, NeedMut, NoSuchField, PrivateAssocItem, PrivateField,
ReplaceFilterMapNextWithFindMap, TypeMismatch, UnimplementedBuiltinMacro,
AnyDiagnostic, BreakOutsideOfLoop, ExpectedFunction, InactiveCode, IncoherentImpl,
IncorrectCase, InvalidDeriveTarget, MacroError, MalformedDerive, MismatchedArgCount,
MissingFields, MissingMatchArms, MissingUnsafe, NeedMut, NoSuchField, PrivateAssocItem,
PrivateField, ReplaceFilterMapNextWithFindMap, TypeMismatch, UnimplementedBuiltinMacro,
UnresolvedExternCrate, UnresolvedField, UnresolvedImport, UnresolvedMacroCall,
UnresolvedMethodCall, UnresolvedModule, UnresolvedProcMacro, UnusedMut,
},
@ -604,11 +604,23 @@ impl Module {
}
}
let inherent_impls = db.inherent_impls_in_crate(self.id.krate());
for impl_def in self.impl_defs(db) {
for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() {
emit_def_diagnostic(db, acc, diag);
}
if inherent_impls.invalid_impls().contains(&impl_def.id) {
let loc = impl_def.id.lookup(db.upcast());
let tree = loc.id.item_tree(db.upcast());
let node = &tree[loc.id.value];
let file_id = loc.id.file_id();
let ast_id_map = db.ast_id_map(file_id);
acc.push(IncoherentImpl { impl_: ast_id_map.get(node.ast_id()), file_id }.into())
}
for item in impl_def.items(db) {
let def: DefWithBody = match item {
AssocItem::Function(it) => it.into(),
@ -3210,6 +3222,14 @@ impl Type {
matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Uint(UintTy::Usize)))
}
pub fn is_float(&self) -> bool {
matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Float(_)))
}
pub fn is_char(&self) -> bool {
matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Char))
}
pub fn is_int_or_uint(&self) -> bool {
match self.ty.kind(Interner) {
TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)) => true,
@ -3224,6 +3244,13 @@ impl Type {
}
}
pub fn as_slice(&self) -> Option<Type> {
match &self.ty.kind(Interner) {
TyKind::Slice(ty) => Some(self.derived(ty.clone())),
_ => None,
}
}
pub fn strip_references(&self) -> Type {
self.derived(self.ty.strip_references().clone())
}

View file

@ -15,7 +15,7 @@ use hir_def::{
AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId,
};
use hir_expand::{
db::AstDatabase,
db::ExpandDatabase,
name::{known, AsName},
ExpansionInfo, MacroCallId,
};
@ -411,7 +411,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.resolve_record_field(field)
}
pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> {
self.imp.resolve_record_pat_field(field)
}
@ -1201,7 +1201,7 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(field.syntax())?.resolve_record_field(self.db, field)
}
fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> {
self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
}
@ -1536,7 +1536,7 @@ impl<'db> SemanticsImpl<'db> {
fn macro_call_to_macro_id(
ctx: &mut SourceToDefCtx<'_, '_>,
db: &dyn AstDatabase,
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
) -> Option<MacroId> {
let loc = db.lookup_intern_macro_call(macro_call_id);

View file

@ -441,14 +441,17 @@ impl SourceAnalyzer {
&self,
db: &dyn HirDatabase,
field: &ast::RecordPatField,
) -> Option<Field> {
) -> Option<(Field, Type)> {
let field_name = field.field_name()?.as_name();
let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
let pat_id = self.pat_id(&record_pat.into())?;
let variant = self.infer.as_ref()?.variant_resolution_for_pat(pat_id)?;
let variant_data = variant.variant_data(db.upcast());
let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? };
Some(field.into())
let (_, subst) = self.infer.as_ref()?.type_of_pat.get(pat_id)?.as_adt()?;
let field_ty =
db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
Some((field.into(), Type::new_with_resolver(db, &self.resolver, field_ty)))
}
pub(crate) fn resolve_macro_call(

View file

@ -1027,7 +1027,7 @@ fn next_space_for_fn_after_call_site(expr: ast::CallableExpr) -> Option<Generate
}
fn next_space_for_fn_in_module(
db: &dyn hir::db::AstDatabase,
db: &dyn hir::db::ExpandDatabase,
module_source: &hir::InFile<hir::ModuleSource>,
) -> Option<(FileId, GeneratedFunctionTarget)> {
let file = module_source.file_id.original_file(db);

View file

@ -363,10 +363,10 @@ fn inline(
.collect();
if function.self_param(sema.db).is_some() {
let this = || make::name_ref("this").syntax().clone_for_update();
let this = || make::name_ref("this").syntax().clone_for_update().first_token().unwrap();
if let Some(self_local) = params[0].2.as_local(sema.db) {
usages_for_locals(self_local)
.flat_map(|FileReference { name, range, .. }| match name {
.filter_map(|FileReference { name, range, .. }| match name {
ast::NameLike::NameRef(_) => Some(body.syntax().covering_element(range)),
_ => None,
})
@ -680,6 +680,42 @@ impl Foo {
}
}
fn main() {
let x = {
let ref this = Foo(3);
Foo(this.0 + 2)
};
}
"#,
);
}
#[test]
fn generic_method_by_ref() {
check_assist(
inline_call,
r#"
struct Foo(u32);
impl Foo {
fn add<T>(&self, a: u32) -> Self {
Foo(self.0 + a)
}
}
fn main() {
let x = Foo(3).add$0::<usize>(2);
}
"#,
r#"
struct Foo(u32);
impl Foo {
fn add<T>(&self, a: u32) -> Self {
Foo(self.0 + a)
}
}
fn main() {
let x = {
let ref this = Foo(3);

View file

@ -46,7 +46,7 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
acc.add(
AssistId("remove_dbg", AssistKind::Refactor),
"Remove dbg!()",
ctx.selection_trimmed(),
replacements.iter().map(|&(range, _)| range).reduce(|acc, range| acc.cover(range)).unwrap(),
|builder| {
for (range, expr) in replacements {
if let Some(expr) = expr {

View file

@ -265,7 +265,6 @@ mod handlers {
inline_local_variable::inline_local_variable,
inline_type_alias::inline_type_alias,
inline_type_alias::inline_type_alias_uses,
inline_macro::inline_macro,
introduce_named_generic::introduce_named_generic,
introduce_named_lifetime::introduce_named_lifetime,
invert_if::invert_if,
@ -286,7 +285,6 @@ mod handlers {
raw_string::add_hash,
raw_string::make_usual_string,
raw_string::remove_hash,
remove_dbg::remove_dbg,
remove_mut::remove_mut,
remove_unused_param::remove_unused_param,
remove_parentheses::remove_parentheses,
@ -335,6 +333,9 @@ mod handlers {
generate_setter::generate_setter,
generate_delegate_methods::generate_delegate_methods,
generate_deref::generate_deref,
//
remove_dbg::remove_dbg,
inline_macro::inline_macro,
// Are you sure you want to add new assist here, and not to the
// sorted list above?
]

View file

@ -23,7 +23,7 @@ pub(crate) mod env_vars;
use std::iter;
use hir::{known, ScopeDef};
use hir::{known, ScopeDef, Variant};
use ide_db::{imports::import_assets::LocatedImport, SymbolKind};
use syntax::ast;
@ -537,17 +537,20 @@ fn enum_variants_with_paths(
impl_: &Option<ast::Impl>,
cb: impl Fn(&mut Completions, &CompletionContext<'_>, hir::Variant, hir::ModPath),
) {
let mut process_variant = |variant: Variant| {
let self_path = hir::ModPath::from_segments(
hir::PathKind::Plain,
iter::once(known::SELF_TYPE).chain(iter::once(variant.name(ctx.db))),
);
cb(acc, ctx, variant, self_path);
};
let variants = enum_.variants(ctx.db);
if let Some(impl_) = impl_.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) {
if impl_.self_ty(ctx.db).as_adt() == Some(hir::Adt::Enum(enum_)) {
for &variant in &variants {
let self_path = hir::ModPath::from_segments(
hir::PathKind::Plain,
iter::once(known::SELF_TYPE).chain(iter::once(variant.name(ctx.db))),
);
cb(acc, ctx, variant, self_path);
}
variants.iter().for_each(|variant| process_variant(*variant));
}
}

View file

@ -415,7 +415,6 @@ fn foo(a: lib::A) { a.$0 }
fn test_local_impls() {
check(
r#"
//- /lib.rs crate:lib
pub struct A {}
mod m {
impl super::A {
@ -427,9 +426,8 @@ mod m {
}
}
}
//- /main.rs crate:main deps:lib
fn foo(a: lib::A) {
impl lib::A {
fn foo(a: A) {
impl A {
fn local_method(&self) {}
}
a.$0

View file

@ -220,6 +220,8 @@ pub(super) struct PatternContext {
/// The record pattern this name or ref is a field of
pub(super) record_pat: Option<ast::RecordPat>,
pub(super) impl_: Option<ast::Impl>,
/// List of missing variants in a match expr
pub(super) missing_variants: Vec<hir::Variant>,
}
#[derive(Debug, Clone, PartialEq, Eq)]

View file

@ -1,7 +1,7 @@
//! Module responsible for analyzing the code surrounding the cursor for completion.
use std::iter;
use hir::{Semantics, Type, TypeInfo};
use hir::{Semantics, Type, TypeInfo, Variant};
use ide_db::{active_parameter::ActiveParameter, RootDatabase};
use syntax::{
algo::{find_node_at_offset, non_trivia_sibling},
@ -353,7 +353,7 @@ fn expected_type_and_name(
_ => ty,
};
loop {
let (ty, name) = loop {
break match_ast! {
match node {
ast::LetStmt(it) => {
@ -385,9 +385,7 @@ fn expected_type_and_name(
token.clone(),
).map(|ap| {
let name = ap.ident().map(NameOrNameRef::Name);
let ty = strip_refs(ap.ty);
(Some(ty), name)
(Some(ap.ty), name)
})
.unwrap_or((None, None))
},
@ -489,7 +487,8 @@ fn expected_type_and_name(
},
}
};
}
};
(ty.map(strip_refs), name)
}
fn classify_lifetime(
@ -1133,6 +1132,9 @@ fn pattern_context_for(
pat: ast::Pat,
) -> PatternContext {
let mut param_ctx = None;
let mut missing_variants = vec![];
let (refutability, has_type_ascription) =
pat
.syntax()
@ -1162,7 +1164,52 @@ fn pattern_context_for(
})();
return (PatternRefutability::Irrefutable, has_type_ascription)
},
ast::MatchArm(_) => PatternRefutability::Refutable,
ast::MatchArm(match_arm) => {
let missing_variants_opt = match_arm
.syntax()
.parent()
.and_then(ast::MatchArmList::cast)
.and_then(|match_arm_list| {
match_arm_list
.syntax()
.parent()
.and_then(ast::MatchExpr::cast)
.and_then(|match_expr| {
let expr_opt = find_opt_node_in_file(&original_file, match_expr.expr());
expr_opt.and_then(|expr| {
sema.type_of_expr(&expr)?
.adjusted()
.autoderef(sema.db)
.find_map(|ty| match ty.as_adt() {
Some(hir::Adt::Enum(e)) => Some(e),
_ => None,
}).and_then(|enum_| {
Some(enum_.variants(sema.db))
})
})
}).and_then(|variants| {
Some(variants.iter().filter_map(|variant| {
let variant_name = variant.name(sema.db).to_string();
let variant_already_present = match_arm_list.arms().any(|arm| {
arm.pat().and_then(|pat| {
let pat_already_present = pat.syntax().to_string().contains(&variant_name);
pat_already_present.then(|| pat_already_present)
}).is_some()
});
(!variant_already_present).then_some(variant.clone())
}).collect::<Vec<Variant>>())
})
});
if let Some(missing_variants_) = missing_variants_opt {
missing_variants = missing_variants_;
};
PatternRefutability::Refutable
},
ast::LetExpr(_) => PatternRefutability::Refutable,
ast::ForExpr(_) => PatternRefutability::Irrefutable,
_ => PatternRefutability::Irrefutable,
@ -1184,6 +1231,7 @@ fn pattern_context_for(
ref_token,
record_pat: None,
impl_: fetch_immediate_impl(sema, original_file, pat.syntax()),
missing_variants,
}
}

View file

@ -411,3 +411,15 @@ fn main() {
expect!["ty: i32, name: ?"],
);
}
#[test]
fn expected_type_ref_return_pos() {
check_expected_type_and_name(
r#"
fn f(thing: u32) -> &u32 {
&thin$0
}
"#,
expect!["ty: u32, name: ?"],
);
}

View file

@ -37,7 +37,9 @@ pub(crate) fn render_struct_pat(
let lookup = format_literal_lookup(name.as_str(), kind);
let pat = render_pat(&ctx, pattern_ctx, &escaped_name, kind, &visible_fields, fields_omitted)?;
Some(build_completion(ctx, label, lookup, pat, strukt))
let db = ctx.db();
Some(build_completion(ctx, label, lookup, pat, strukt, strukt.ty(db), false))
}
pub(crate) fn render_variant_pat(
@ -52,6 +54,7 @@ pub(crate) fn render_variant_pat(
let fields = variant.fields(ctx.db());
let (visible_fields, fields_omitted) = visible_fields(ctx.completion, &fields, variant)?;
let enum_ty = variant.parent_enum(ctx.db()).ty(ctx.db());
let (name, escaped_name) = match path {
Some(path) => (path.unescaped().to_string().into(), path.to_string().into()),
@ -81,7 +84,15 @@ pub(crate) fn render_variant_pat(
}
};
Some(build_completion(ctx, label, lookup, pat, variant))
Some(build_completion(
ctx,
label,
lookup,
pat,
variant,
enum_ty,
pattern_ctx.missing_variants.contains(&variant),
))
}
fn build_completion(
@ -90,13 +101,22 @@ fn build_completion(
lookup: SmolStr,
pat: String,
def: impl HasAttrs + Copy,
adt_ty: hir::Type,
// Missing in context of match statement completions
is_variant_missing: bool,
) -> CompletionItem {
let mut relevance = ctx.completion_relevance();
if is_variant_missing {
relevance.type_match = super::compute_type_match(ctx.completion, &adt_ty);
}
let mut item = CompletionItem::new(CompletionItemKind::Binding, ctx.source_range(), label);
item.set_documentation(ctx.docs(def))
.set_deprecated(ctx.is_deprecated(def))
.detail(&pat)
.lookup_by(lookup)
.set_relevance(ctx.completion_relevance());
.set_relevance(relevance);
match ctx.snippet_cap() {
Some(snippet_cap) => item.insert_snippet(snippet_cap, pat),
None => item.insert_text(pat),

View file

@ -614,6 +614,7 @@ fn f(u: U) {
check_empty(
r#"
#![rustc_coherence_is_core]
#[lang = "u32"]
impl u32 {
pub const MIN: Self = 0;

View file

@ -46,6 +46,66 @@ fn foo(s: Struct) {
);
}
#[test]
fn record_pattern_field_enum() {
check(
r#"
//- minicore:result
enum Baz { Foo, Bar }
fn foo(baz: Baz) {
match baz {
Baz::Foo => (),
$0
}
}
"#,
expect![[r#"
en Baz
en Result
md core
ev Err
ev Ok
bn Baz::Bar Baz::Bar$0
bn Baz::Foo Baz::Foo$0
bn Err() Err($1)$0
bn Ok() Ok($1)$0
kw mut
kw ref
"#]],
);
check(
r#"
//- minicore:result
enum Baz { Foo, Bar }
fn foo(baz: Baz) {
use Baz::*;
match baz {
Foo => (),
$0
}
}
"#,
expect![[r#"
en Baz
en Result
md core
ev Bar
ev Err
ev Foo
ev Ok
bn Bar Bar$0
bn Err() Err($1)$0
bn Foo Foo$0
bn Ok() Ok($1)$0
kw mut
kw ref
"#]],
);
}
#[test]
fn pattern_enum_variant() {
check(

View file

@ -608,6 +608,7 @@ fn f() {
}
//- /core.rs crate:core
#![rustc_coherence_is_core]
#[lang = "u8"]
impl u8 {
pub const MAX: Self = 255;

View file

@ -71,7 +71,7 @@ impl RootDatabase {
base_db::SourceRootQuery
base_db::SourceRootCratesQuery
// AstDatabase
// ExpandDatabase
hir::db::AstIdMapQuery
hir::db::ParseMacroExpansionQuery
hir::db::InternMacroCallQuery

View file

@ -327,7 +327,7 @@ impl NameClass {
let pat_parent = ident_pat.syntax().parent();
if let Some(record_pat_field) = pat_parent.and_then(ast::RecordPatField::cast) {
if record_pat_field.name_ref().is_none() {
if let Some(field) = sema.resolve_record_pat_field(&record_pat_field) {
if let Some((field, _)) = sema.resolve_record_pat_field(&record_pat_field) {
return Some(NameClass::PatFieldShorthand {
local_def: local,
field_ref: field,
@ -483,6 +483,13 @@ impl NameRefClass {
},
ast::RecordPatField(record_pat_field) => {
sema.resolve_record_pat_field(&record_pat_field)
.map(|(field, ..)|field)
.map(Definition::Field)
.map(NameRefClass::Definition)
},
ast::RecordExprField(record_expr_field) => {
sema.resolve_record_field(&record_expr_field)
.map(|(field, ..)|field)
.map(Definition::Field)
.map(NameRefClass::Definition)
},

View file

@ -50,7 +50,7 @@ use base_db::{
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
};
use hir::{
db::{AstDatabase, DefDatabase, HirDatabase},
db::{DefDatabase, ExpandDatabase, HirDatabase},
symbols::FileSymbolKind,
};
use stdx::hash::NoHashHashSet;
@ -68,7 +68,7 @@ pub type FxIndexMap<K, V> =
#[salsa::database(
base_db::SourceDatabaseExtStorage,
base_db::SourceDatabaseStorage,
hir::db::AstDatabaseStorage,
hir::db::ExpandDatabaseStorage,
hir::db::DefDatabaseStorage,
hir::db::HirDatabaseStorage,
hir::db::InternDatabaseStorage,
@ -95,8 +95,8 @@ impl fmt::Debug for RootDatabase {
}
}
impl Upcast<dyn AstDatabase> for RootDatabase {
fn upcast(&self) -> &(dyn AstDatabase + 'static) {
impl Upcast<dyn ExpandDatabase> for RootDatabase {
fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
&*self
}
}

View file

@ -0,0 +1,77 @@
use hir::InFile;
use crate::{Diagnostic, DiagnosticsContext, Severity};
// Diagnostic: incoherent-impl
//
// This diagnostic is triggered if the targe type of an impl is from a foreign crate.
pub(crate) fn incoherent_impl(ctx: &DiagnosticsContext<'_>, d: &hir::IncoherentImpl) -> Diagnostic {
Diagnostic::new(
"incoherent-impl",
format!("cannot define inherent `impl` for foreign type"),
ctx.sema.diagnostics_display_range(InFile::new(d.file_id, d.impl_.clone().into())).range,
)
.severity(Severity::Error)
}
#[cfg(test)]
mod change_case {
use crate::tests::check_diagnostics;
#[test]
fn primitive() {
check_diagnostics(
r#"
impl bool {}
//^^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type
"#,
);
}
#[test]
fn primitive_rustc_allow_incoherent_impl() {
check_diagnostics(
r#"
impl bool {
#[rustc_allow_incoherent_impl]
fn falsch(self) -> Self { false }
}
"#,
);
}
#[test]
fn rustc_allow_incoherent_impl() {
check_diagnostics(
r#"
//- /lib.rs crate:foo
#[rustc_has_incoherent_inherent_impls]
pub struct S;
//- /main.rs crate:main deps:foo
impl foo::S {
#[rustc_allow_incoherent_impl]
fn func(self) {}
}
"#,
);
check_diagnostics(
r#"
//- /lib.rs crate:foo
pub struct S;
//- /main.rs crate:main deps:foo
impl foo::S { #[rustc_allow_incoherent_impl] fn func(self) {} }
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type
"#,
);
check_diagnostics(
r#"
//- /lib.rs crate:foo
#[rustc_has_incoherent_inherent_impls]
pub struct S;
//- /main.rs crate:main deps:foo
impl foo::S { fn func(self) {} }
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type
"#,
);
}
}

View file

@ -1,4 +1,4 @@
use hir::{db::AstDatabase, InFile};
use hir::{db::ExpandDatabase, InFile};
use ide_db::{assists::Assist, defs::NameClass};
use syntax::AstNode;

View file

@ -1,6 +1,6 @@
use either::Either;
use hir::{
db::{AstDatabase, HirDatabase},
db::{ExpandDatabase, HirDatabase},
known, AssocItem, HirDisplay, InFile, Type,
};
use ide_db::{

View file

@ -1,4 +1,10 @@
use crate::{Diagnostic, DiagnosticsContext};
use hir::db::ExpandDatabase;
use ide_db::{assists::Assist, source_change::SourceChange};
use syntax::{ast, SyntaxNode};
use syntax::{match_ast, AstNode};
use text_edit::TextEdit;
use crate::{fix, Diagnostic, DiagnosticsContext};
// Diagnostic: missing-unsafe
//
@ -9,11 +15,83 @@ pub(crate) fn missing_unsafe(ctx: &DiagnosticsContext<'_>, d: &hir::MissingUnsaf
"this operation is unsafe and requires an unsafe function or block",
ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
)
.with_fixes(fixes(ctx, d))
}
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingUnsafe) -> Option<Vec<Assist>> {
// The fixit will not work correctly for macro expansions, so we don't offer it in that case.
if d.expr.file_id.is_macro() {
return None;
}
let root = ctx.sema.db.parse_or_expand(d.expr.file_id)?;
let expr = d.expr.value.to_node(&root);
let node_to_add_unsafe_block = pick_best_node_to_add_unsafe_block(&expr)?;
let replacement = format!("unsafe {{ {} }}", node_to_add_unsafe_block.text());
let edit = TextEdit::replace(node_to_add_unsafe_block.text_range(), replacement);
let source_change =
SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), edit);
Some(vec![fix("add_unsafe", "Add unsafe block", source_change, expr.syntax().text_range())])
}
// Pick the first ancestor expression of the unsafe `expr` that is not a
// receiver of a method call, a field access, the left-hand side of an
// assignment, or a reference. As all of those cases would incur a forced move
// if wrapped which might not be wanted. That is:
// - `unsafe_expr.foo` -> `unsafe { unsafe_expr.foo }`
// - `unsafe_expr.foo.bar` -> `unsafe { unsafe_expr.foo.bar }`
// - `unsafe_expr.foo()` -> `unsafe { unsafe_expr.foo() }`
// - `unsafe_expr.foo.bar()` -> `unsafe { unsafe_expr.foo.bar() }`
// - `unsafe_expr += 1` -> `unsafe { unsafe_expr += 1 }`
// - `&unsafe_expr` -> `unsafe { &unsafe_expr }`
// - `&&unsafe_expr` -> `unsafe { &&unsafe_expr }`
fn pick_best_node_to_add_unsafe_block(unsafe_expr: &ast::Expr) -> Option<SyntaxNode> {
// The `unsafe_expr` might be:
// - `ast::CallExpr`: call an unsafe function
// - `ast::MethodCallExpr`: call an unsafe method
// - `ast::PrefixExpr`: dereference a raw pointer
// - `ast::PathExpr`: access a static mut variable
for (node, parent) in
unsafe_expr.syntax().ancestors().zip(unsafe_expr.syntax().ancestors().skip(1))
{
match_ast! {
match parent {
// If the `parent` is a `MethodCallExpr`, that means the `node`
// is the receiver of the method call, because only the receiver
// can be a direct child of a method call. The method name
// itself is not an expression but a `NameRef`, and an argument
// is a direct child of an `ArgList`.
ast::MethodCallExpr(_) => continue,
ast::FieldExpr(_) => continue,
ast::RefExpr(_) => continue,
ast::BinExpr(it) => {
// Check if the `node` is the left-hand side of an
// assignment, if so, we don't want to wrap it in an unsafe
// block, e.g. `unsafe_expr += 1`
let is_left_hand_side_of_assignment = {
if let Some(ast::BinaryOp::Assignment { .. }) = it.op_kind() {
it.lhs().map(|lhs| lhs.syntax().text_range().contains_range(node.text_range())).unwrap_or(false)
} else {
false
}
};
if !is_left_hand_side_of_assignment {
return Some(node);
}
},
_ => { return Some(node); }
}
}
}
None
}
#[cfg(test)]
mod tests {
use crate::tests::check_diagnostics;
use crate::tests::{check_diagnostics, check_fix, check_no_fix};
#[test]
fn missing_unsafe_diagnostic_with_raw_ptr() {
@ -23,7 +101,7 @@ fn main() {
let x = &5 as *const usize;
unsafe { let y = *x; }
let z = *x;
} //^^ error: this operation is unsafe and requires an unsafe function or block
} //^^💡 error: this operation is unsafe and requires an unsafe function or block
"#,
)
}
@ -48,9 +126,9 @@ unsafe fn unsafe_fn() {
fn main() {
unsafe_fn();
//^^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block
//^^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block
HasUnsafe.unsafe_fn();
//^^^^^^^^^^^^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block
//^^^^^^^^^^^^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block
unsafe {
unsafe_fn();
HasUnsafe.unsafe_fn();
@ -72,7 +150,7 @@ static mut STATIC_MUT: Ty = Ty { a: 0 };
fn main() {
let x = STATIC_MUT.a;
//^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block
//^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block
unsafe {
let x = STATIC_MUT.a;
}
@ -94,9 +172,298 @@ extern "rust-intrinsic" {
fn main() {
let _ = bitreverse(12);
let _ = floorf32(12.0);
//^^^^^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block
//^^^^^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block
}
"#,
);
}
#[test]
fn add_unsafe_block_when_dereferencing_a_raw_pointer() {
check_fix(
r#"
fn main() {
let x = &5 as *const usize;
let z = *x$0;
}
"#,
r#"
fn main() {
let x = &5 as *const usize;
let z = unsafe { *x };
}
"#,
);
}
#[test]
fn add_unsafe_block_when_calling_unsafe_function() {
check_fix(
r#"
unsafe fn func() {
let x = &5 as *const usize;
let z = *x;
}
fn main() {
func$0();
}
"#,
r#"
unsafe fn func() {
let x = &5 as *const usize;
let z = *x;
}
fn main() {
unsafe { func() };
}
"#,
)
}
#[test]
fn add_unsafe_block_when_calling_unsafe_method() {
check_fix(
r#"
struct S(usize);
impl S {
unsafe fn func(&self) {
let x = &self.0 as *const usize;
let z = *x;
}
}
fn main() {
let s = S(5);
s.func$0();
}
"#,
r#"
struct S(usize);
impl S {
unsafe fn func(&self) {
let x = &self.0 as *const usize;
let z = *x;
}
}
fn main() {
let s = S(5);
unsafe { s.func() };
}
"#,
)
}
#[test]
fn add_unsafe_block_when_accessing_mutable_static() {
check_fix(
r#"
struct Ty {
a: u8,
}
static mut STATIC_MUT: Ty = Ty { a: 0 };
fn main() {
let x = STATIC_MUT$0.a;
}
"#,
r#"
struct Ty {
a: u8,
}
static mut STATIC_MUT: Ty = Ty { a: 0 };
fn main() {
let x = unsafe { STATIC_MUT.a };
}
"#,
)
}
#[test]
fn add_unsafe_block_when_calling_unsafe_intrinsic() {
check_fix(
r#"
extern "rust-intrinsic" {
pub fn floorf32(x: f32) -> f32;
}
fn main() {
let _ = floorf32$0(12.0);
}
"#,
r#"
extern "rust-intrinsic" {
pub fn floorf32(x: f32) -> f32;
}
fn main() {
let _ = unsafe { floorf32(12.0) };
}
"#,
)
}
#[test]
fn unsafe_expr_as_a_receiver_of_a_method_call() {
check_fix(
r#"
unsafe fn foo() -> String {
"string".to_string()
}
fn main() {
foo$0().len();
}
"#,
r#"
unsafe fn foo() -> String {
"string".to_string()
}
fn main() {
unsafe { foo().len() };
}
"#,
)
}
#[test]
fn unsafe_expr_as_an_argument_of_a_method_call() {
check_fix(
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
let mut v = vec![];
v.push(STATIC_MUT$0);
}
"#,
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
let mut v = vec![];
v.push(unsafe { STATIC_MUT });
}
"#,
)
}
#[test]
fn unsafe_expr_as_left_hand_side_of_assignment() {
check_fix(
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
STATIC_MUT$0 = 1;
}
"#,
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
unsafe { STATIC_MUT = 1 };
}
"#,
)
}
#[test]
fn unsafe_expr_as_right_hand_side_of_assignment() {
check_fix(
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
let x;
x = STATIC_MUT$0;
}
"#,
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
let x;
x = unsafe { STATIC_MUT };
}
"#,
)
}
#[test]
fn unsafe_expr_in_binary_plus() {
check_fix(
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
let x = STATIC_MUT$0 + 1;
}
"#,
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
let x = unsafe { STATIC_MUT } + 1;
}
"#,
)
}
#[test]
fn ref_to_unsafe_expr() {
check_fix(
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
let x = &STATIC_MUT$0;
}
"#,
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
let x = unsafe { &STATIC_MUT };
}
"#,
)
}
#[test]
fn ref_ref_to_unsafe_expr() {
check_fix(
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
let x = &&STATIC_MUT$0;
}
"#,
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
let x = unsafe { &&STATIC_MUT };
}
"#,
)
}
#[test]
fn unsafe_expr_in_macro_call() {
check_no_fix(
r#"
unsafe fn foo() -> u8 {
0
}
fn main() {
let x = format!("foo: {}", foo$0());
}
"#,
)
}
}

View file

@ -505,6 +505,30 @@ fn main() {
);
}
#[test]
fn initialization_is_not_mutation_in_loop() {
check_diagnostics(
r#"
fn main() {
let a;
loop {
let c @ (
mut b,
//^^^^^ 💡 weak: variable does not need to be mutable
mut d
//^^^^^ 💡 weak: variable does not need to be mutable
);
a = 1;
//^^^^^ 💡 error: cannot mutate immutable variable `a`
b = 1;
c = (2, 3);
d = 3;
}
}
"#,
);
}
#[test]
fn function_arguments_are_initialized() {
check_diagnostics(

View file

@ -1,4 +1,4 @@
use hir::{db::AstDatabase, HasSource, HirDisplay, Semantics};
use hir::{db::ExpandDatabase, HasSource, HirDisplay, Semantics};
use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase};
use syntax::{
ast::{self, edit::IndentLevel, make},

View file

@ -62,6 +62,26 @@ mod module {
fn main(s: module::Struct) {
s.field;
}
"#,
);
}
#[test]
fn block_module_madness() {
check_diagnostics(
r#"
fn main() {
let strukt = {
use crate as ForceParentBlockDefMap;
{
pub struct Struct {
field: (),
}
Struct { field: () }
}
};
strukt.field;
}
"#,
);
}

View file

@ -1,4 +1,4 @@
use hir::{db::AstDatabase, InFile};
use hir::{db::ExpandDatabase, InFile};
use ide_db::source_change::SourceChange;
use syntax::{
ast::{self, HasArgList},

View file

@ -1,5 +1,5 @@
use either::Either;
use hir::{db::AstDatabase, HirDisplay, InFile, Type};
use hir::{db::ExpandDatabase, HirDisplay, InFile, Type};
use ide_db::{famous_defs::FamousDefs, source_change::SourceChange};
use syntax::{
ast::{self, BlockExpr, ExprStmt},

View file

@ -1,4 +1,4 @@
use hir::{db::AstDatabase, HirDisplay, InFile};
use hir::{db::ExpandDatabase, HirDisplay, InFile};
use ide_db::{
assists::{Assist, AssistId, AssistKind},
base_db::FileRange,

View file

@ -1,4 +1,4 @@
use hir::{db::AstDatabase, HirDisplay};
use hir::{db::ExpandDatabase, HirDisplay};
use ide_db::{
assists::{Assist, AssistId, AssistKind},
base_db::FileRange,

View file

@ -1,4 +1,4 @@
use hir::db::AstDatabase;
use hir::db::ExpandDatabase;
use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSystemEdit};
use itertools::Itertools;
use syntax::AstNode;

View file

@ -29,6 +29,7 @@ mod handlers {
pub(crate) mod break_outside_of_loop;
pub(crate) mod expected_function;
pub(crate) mod inactive_code;
pub(crate) mod incoherent_impl;
pub(crate) mod incorrect_case;
pub(crate) mod invalid_derive_target;
pub(crate) mod macro_error;
@ -254,6 +255,7 @@ pub fn diagnostics(
AnyDiagnostic::BreakOutsideOfLoop(d) => handlers::break_outside_of_loop::break_outside_of_loop(&ctx, &d),
AnyDiagnostic::ExpectedFunction(d) => handlers::expected_function::expected_function(&ctx, &d),
AnyDiagnostic::IncorrectCase(d) => handlers::incorrect_case::incorrect_case(&ctx, &d),
AnyDiagnostic::IncoherentImpl(d) => handlers::incoherent_impl::incoherent_impl(&ctx, &d),
AnyDiagnostic::MacroError(d) => handlers::macro_error::macro_error(&ctx, &d),
AnyDiagnostic::MalformedDerive(d) => handlers::malformed_derive::malformed_derive(&ctx, &d),
AnyDiagnostic::MismatchedArgCount(d) => handlers::mismatched_arg_count::mismatched_arg_count(&ctx, &d),

View file

@ -297,6 +297,7 @@ impl Foo<str> {}
//- /lib.rs crate:main deps:core
fn foo(_: bool$0) {{}}
//- /libcore.rs crate:core
#![rustc_coherence_is_core]
#[lang = "bool"]
impl bool {}
//^^^^

View file

@ -55,7 +55,7 @@ pub(crate) fn goto_type_definition(
ty
} else {
let record_field = ast::RecordPatField::for_field_name_ref(&it)?;
sema.resolve_record_pat_field(&record_field)?.ty(db)
sema.resolve_record_pat_field(&record_field)?.1
}
},
_ => return None,

View file

@ -31,19 +31,31 @@ pub(super) fn hints(
return None;
}
// These inherit from the inner expression which would result in duplicate hints
if let ast::Expr::ParenExpr(_)
| ast::Expr::IfExpr(_)
| ast::Expr::BlockExpr(_)
| ast::Expr::MatchExpr(_) = expr
{
// ParenExpr resolve to their contained expressions HIR so they will dupe these hints
if let ast::Expr::ParenExpr(_) = expr {
return None;
}
if let ast::Expr::BlockExpr(b) = expr {
if !b.is_standalone() {
return None;
}
}
let descended = sema.descend_node_into_attributes(expr.clone()).pop();
let desc_expr = descended.as_ref().unwrap_or(expr);
let adjustments = sema.expr_adjustments(desc_expr).filter(|it| !it.is_empty())?;
if let ast::Expr::BlockExpr(_) | ast::Expr::IfExpr(_) | ast::Expr::MatchExpr(_) = desc_expr {
if let [Adjustment { kind: Adjust::Deref(_), source, .. }, Adjustment { kind: Adjust::Borrow(_), source: _, target }] =
&*adjustments
{
// Don't show unnecessary reborrows for these, they will just repeat the inner ones again
if source == target {
return None;
}
}
}
let (postfix, needs_outer_parens, needs_inner_parens) =
mode_and_needs_parens_for_adjustment_hints(expr, config.adjustment_hints_mode);
@ -67,6 +79,7 @@ pub(super) fn hints(
for Adjustment { source, target, kind } in iter {
if source == target {
cov_mark::hit!(same_type_adjustment);
continue;
}
@ -251,7 +264,7 @@ mod tests {
check_with_config(
InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG },
r#"
//- minicore: coerce_unsized, fn
//- minicore: coerce_unsized, fn, eq
fn main() {
let _: u32 = loop {};
//^^^^^^^<never-to-any>
@ -332,7 +345,7 @@ fn main() {
loop {}
//^^^^^^^<never-to-any>
};
let _: &mut [u32] = match () { () => &mut [] }
let _: &mut [u32] = match () { () => &mut [] };
//^^^^^^^<unsize>
//^^^^^^^&mut $
//^^^^^^^*
@ -341,6 +354,12 @@ fn main() {
//^^^^^^^^^^<unsize>
//^^^^^^^^^^&mut $
//^^^^^^^^^^*
() == ();
// ^^&
// ^^&
(()) == {()};
// ^^&
// ^^^^&
}
#[derive(Copy, Clone)]
@ -363,7 +382,7 @@ impl Struct {
..DISABLED_CONFIG
},
r#"
//- minicore: coerce_unsized, fn
//- minicore: coerce_unsized, fn, eq
fn main() {
Struct.consume();
@ -419,7 +438,7 @@ fn main() {
loop {}
//^^^^^^^.<never-to-any>
};
let _: &mut [u32] = match () { () => &mut [] }
let _: &mut [u32] = match () { () => &mut [] };
//^^^^^^^(
//^^^^^^^)
//^^^^^^^.*
@ -432,6 +451,12 @@ fn main() {
//^^^^^^^^^^.*
//^^^^^^^^^^.&mut
//^^^^^^^^^^.<unsize>
() == ();
// ^^.&
// ^^.&
(()) == {()};
// ^^.&
// ^^^^.&
}
#[derive(Copy, Clone)]
@ -499,6 +524,7 @@ fn main() {
#[test]
fn never_to_never_is_never_shown() {
cov_mark::check!(same_type_adjustment);
check_with_config(
InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG },
r#"

View file

@ -435,7 +435,7 @@ fn main() {
file_id: FileId(
1,
),
range: 3386..3394,
range: 3415..3423,
},
),
tooltip: "",
@ -448,7 +448,7 @@ fn main() {
file_id: FileId(
1,
),
range: 3418..3422,
range: 3447..3451,
},
),
tooltip: "",
@ -468,7 +468,7 @@ fn main() {
file_id: FileId(
1,
),
range: 3386..3394,
range: 3415..3423,
},
),
tooltip: "",
@ -481,7 +481,7 @@ fn main() {
file_id: FileId(
1,
),
range: 3418..3422,
range: 3447..3451,
},
),
tooltip: "",
@ -501,7 +501,7 @@ fn main() {
file_id: FileId(
1,
),
range: 3386..3394,
range: 3415..3423,
},
),
tooltip: "",
@ -514,7 +514,7 @@ fn main() {
file_id: FileId(
1,
),
range: 3418..3422,
range: 3447..3451,
},
),
tooltip: "",

View file

@ -16,7 +16,7 @@ use stdx::format_to;
use syntax::{
algo,
ast::{self, HasArgList},
match_ast, AstNode, Direction, SyntaxToken, TextRange, TextSize,
match_ast, AstNode, Direction, SyntaxElementChildren, SyntaxToken, TextRange, TextSize,
};
use crate::RootDatabase;
@ -102,6 +102,20 @@ pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Optio
}
return signature_help_for_record_lit(&sema, record, token);
},
ast::RecordPat(record) => {
let cursor_outside = record.record_pat_field_list().and_then(|list| list.r_curly_token()).as_ref() == Some(&token);
if cursor_outside {
continue;
}
return signature_help_for_record_pat(&sema, record, token);
},
ast::TupleStructPat(tuple_pat) => {
let cursor_outside = tuple_pat.r_paren_token().as_ref() == Some(&token);
if cursor_outside {
continue;
}
return signature_help_for_tuple_struct_pat(&sema, tuple_pat, token);
},
_ => (),
}
}
@ -346,10 +360,111 @@ fn signature_help_for_record_lit(
record: ast::RecordExpr,
token: SyntaxToken,
) -> Option<SignatureHelp> {
let active_parameter = record
.record_expr_field_list()?
signature_help_for_record_(
sema,
record.record_expr_field_list()?.syntax().children_with_tokens(),
&record.path()?,
record
.record_expr_field_list()?
.fields()
.filter_map(|field| sema.resolve_record_field(&field))
.map(|(field, _, ty)| (field, ty)),
token,
)
}
fn signature_help_for_record_pat(
sema: &Semantics<'_, RootDatabase>,
record: ast::RecordPat,
token: SyntaxToken,
) -> Option<SignatureHelp> {
signature_help_for_record_(
sema,
record.record_pat_field_list()?.syntax().children_with_tokens(),
&record.path()?,
record
.record_pat_field_list()?
.fields()
.filter_map(|field| sema.resolve_record_pat_field(&field)),
token,
)
}
fn signature_help_for_tuple_struct_pat(
sema: &Semantics<'_, RootDatabase>,
pat: ast::TupleStructPat,
token: SyntaxToken,
) -> Option<SignatureHelp> {
let rest_pat = pat.fields().find(|it| matches!(it, ast::Pat::RestPat(_)));
let is_left_of_rest_pat =
rest_pat.map_or(true, |it| token.text_range().start() < it.syntax().text_range().end());
let mut res = SignatureHelp {
doc: None,
signature: String::new(),
parameters: vec![],
active_parameter: None,
};
let db = sema.db;
let path_res = sema.resolve_path(&pat.path()?)?;
let fields: Vec<_> = if let PathResolution::Def(ModuleDef::Variant(variant)) = path_res {
let en = variant.parent_enum(db);
res.doc = en.docs(db).map(|it| it.into());
format_to!(res.signature, "enum {}::{} (", en.name(db), variant.name(db));
variant.fields(db)
} else {
let adt = match path_res {
PathResolution::SelfType(imp) => imp.self_ty(db).as_adt()?,
PathResolution::Def(ModuleDef::Adt(adt)) => adt,
_ => return None,
};
match adt {
hir::Adt::Struct(it) => {
res.doc = it.docs(db).map(|it| it.into());
format_to!(res.signature, "struct {} (", it.name(db));
it.fields(db)
}
_ => return None,
}
};
let commas = pat
.syntax()
.children_with_tokens()
.filter_map(syntax::NodeOrToken::into_token)
.filter(|t| t.kind() == syntax::T![,]);
res.active_parameter = Some(if is_left_of_rest_pat {
commas.take_while(|t| t.text_range().start() <= token.text_range().start()).count()
} else {
let n_commas = commas
.collect::<Vec<_>>()
.into_iter()
.rev()
.take_while(|t| t.text_range().start() > token.text_range().start())
.count();
fields.len().saturating_sub(1).saturating_sub(n_commas)
});
let mut buf = String::new();
for ty in fields.into_iter().map(|it| it.ty(db)) {
format_to!(buf, "{}", ty.display_truncated(db, Some(20)));
res.push_call_param(&buf);
buf.clear();
}
res.signature.push_str(")");
Some(res)
}
fn signature_help_for_record_(
sema: &Semantics<'_, RootDatabase>,
field_list_children: SyntaxElementChildren,
path: &ast::Path,
fields2: impl Iterator<Item = (hir::Field, hir::Type)>,
token: SyntaxToken,
) -> Option<SignatureHelp> {
let active_parameter = field_list_children
.filter_map(syntax::NodeOrToken::into_token)
.filter(|t| t.kind() == syntax::T![,])
.take_while(|t| t.text_range().start() <= token.text_range().start())
@ -365,7 +480,7 @@ fn signature_help_for_record_lit(
let fields;
let db = sema.db;
let path_res = sema.resolve_path(&record.path()?)?;
let path_res = sema.resolve_path(path)?;
if let PathResolution::Def(ModuleDef::Variant(variant)) = path_res {
fields = variant.fields(db);
let en = variant.parent_enum(db);
@ -397,8 +512,7 @@ fn signature_help_for_record_lit(
let mut fields =
fields.into_iter().map(|field| (field.name(db), Some(field))).collect::<FxIndexMap<_, _>>();
let mut buf = String::new();
for field in record.record_expr_field_list()?.fields() {
let Some((field, _, ty)) = sema.resolve_record_field(&field) else { continue };
for (field, ty) in fields2 {
let name = field.name(db);
format_to!(buf, "{name}: {}", ty.display_truncated(db, Some(20)));
res.push_record_field(&buf);
@ -439,6 +553,7 @@ mod tests {
(database, FilePosition { file_id, offset })
}
#[track_caller]
fn check(ra_fixture: &str, expect: Expect) {
let fixture = format!(
r#"
@ -890,6 +1005,119 @@ fn main() {
);
}
#[test]
fn tuple_struct_pat() {
check(
r#"
/// A cool tuple struct
struct S(u32, i32);
fn main() {
let S(0, $0);
}
"#,
expect![[r#"
A cool tuple struct
------
struct S (u32, i32)
--- ^^^
"#]],
);
}
#[test]
fn tuple_struct_pat_rest() {
check(
r#"
/// A cool tuple struct
struct S(u32, i32, f32, u16);
fn main() {
let S(0, .., $0);
}
"#,
expect![[r#"
A cool tuple struct
------
struct S (u32, i32, f32, u16)
--- --- --- ^^^
"#]],
);
check(
r#"
/// A cool tuple struct
struct S(u32, i32, f32, u16, u8);
fn main() {
let S(0, .., $0, 0);
}
"#,
expect![[r#"
A cool tuple struct
------
struct S (u32, i32, f32, u16, u8)
--- --- --- ^^^ --
"#]],
);
check(
r#"
/// A cool tuple struct
struct S(u32, i32, f32, u16);
fn main() {
let S($0, .., 1);
}
"#,
expect![[r#"
A cool tuple struct
------
struct S (u32, i32, f32, u16)
^^^ --- --- ---
"#]],
);
check(
r#"
/// A cool tuple struct
struct S(u32, i32, f32, u16, u8);
fn main() {
let S(1, .., 1, $0, 2);
}
"#,
expect![[r#"
A cool tuple struct
------
struct S (u32, i32, f32, u16, u8)
--- --- --- ^^^ --
"#]],
);
check(
r#"
/// A cool tuple struct
struct S(u32, i32, f32, u16);
fn main() {
let S(1, $0.., 1);
}
"#,
expect![[r#"
A cool tuple struct
------
struct S (u32, i32, f32, u16)
--- ^^^ --- ---
"#]],
);
check(
r#"
/// A cool tuple struct
struct S(u32, i32, f32, u16);
fn main() {
let S(1, ..$0, 1);
}
"#,
expect![[r#"
A cool tuple struct
------
struct S (u32, i32, f32, u16)
--- ^^^ --- ---
"#]],
);
}
#[test]
fn generic_struct() {
check(
@ -1550,6 +1778,29 @@ impl S {
);
}
#[test]
fn record_pat() {
check(
r#"
struct Strukt<T, U = ()> {
t: T,
u: U,
unit: (),
}
fn f() {
let Strukt {
u: 0,
$0
}
}
"#,
expect![[r#"
struct Strukt { u: i32, t: T, unit: () }
------ ^^^^ --------
"#]],
);
}
#[test]
fn test_enum_in_nested_method_in_lambda() {
check(

View file

@ -431,14 +431,15 @@ fn slice_pat(p: &mut Parser<'_>) -> CompletedMarker {
fn pat_list(p: &mut Parser<'_>, ket: SyntaxKind) {
while !p.at(EOF) && !p.at(ket) {
if !p.at_ts(PAT_TOP_FIRST) {
p.error("expected a pattern");
break;
}
pattern_top(p);
if !p.at(ket) {
p.expect(T![,]);
if !p.at(T![,]) {
if p.at_ts(PAT_TOP_FIRST) {
p.error(format!("expected {:?}, got {:?}", T![,], p.current()));
} else {
break;
}
} else {
p.bump(T![,]);
}
}
}

View file

@ -429,8 +429,9 @@ impl WorkspaceBuildScripts {
for p in rustc.packages() {
let package = &rustc[p];
if package.targets.iter().any(|&it| rustc[it].is_proc_macro) {
if let Some((_, path)) =
proc_macro_dylibs.iter().find(|(name, _)| *name == package.name)
if let Some((_, path)) = proc_macro_dylibs
.iter()
.find(|(name, _)| *name.trim_start_matches("lib") == package.name)
{
bs.outputs[p].proc_macro_dylib_path = Some(path.clone());
}

View file

@ -50,7 +50,7 @@ impl ops::Index<Target> for CargoWorkspace {
/// Describes how to set the rustc source directory.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum RustcSource {
pub enum RustLibSource {
/// Explicit path for the rustc source directory.
Path(AbsPathBuf),
/// Try to automatically detect where the rustc source directory is.
@ -95,10 +95,10 @@ pub struct CargoConfig {
/// rustc target
pub target: Option<String>,
/// Sysroot loading behavior
pub sysroot: Option<RustcSource>,
pub sysroot: Option<RustLibSource>,
pub sysroot_src: Option<AbsPathBuf>,
/// rustc private crate source
pub rustc_source: Option<RustcSource>,
pub rustc_source: Option<RustLibSource>,
/// crates to disable `#[cfg(test)]` on
pub unset_test_crates: UnsetTestCrates,
/// Invoke `cargo check` through the RUSTC_WRAPPER.

View file

@ -44,7 +44,7 @@ pub use crate::{
build_scripts::WorkspaceBuildScripts,
cargo_workspace::{
CargoConfig, CargoFeatures, CargoWorkspace, Package, PackageData, PackageDependency,
RustcSource, Target, TargetData, TargetKind, UnsetTestCrates,
RustLibSource, Target, TargetData, TargetKind, UnsetTestCrates,
},
manifest_path::ManifestPath,
project_json::{ProjectJson, ProjectJsonData},

View file

@ -24,8 +24,8 @@ fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> CrateGr
let project_workspace = ProjectWorkspace::Cargo {
cargo: cargo_workspace,
build_scripts: WorkspaceBuildScripts::default(),
sysroot: None,
rustc: None,
sysroot: Err(None),
rustc: Err(None),
rustc_cfg: Vec::new(),
cfg_overrides,
toolchain: None,
@ -37,7 +37,7 @@ fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> CrateGr
fn load_rust_project(file: &str) -> CrateGraph {
let data = get_test_json_file(file);
let project = rooted_project_json(data);
let sysroot = Some(get_fake_sysroot());
let sysroot = Ok(get_fake_sysroot());
let project_workspace = ProjectWorkspace::Json { project, sysroot, rustc_cfg: Vec::new() };
to_crate_graph(project_workspace)
}

View file

@ -17,7 +17,7 @@ use stdx::{always, hash::NoHashHashMap};
use crate::{
build_scripts::BuildScriptOutput,
cargo_workspace::{DepKind, PackageData, RustcSource},
cargo_workspace::{DepKind, PackageData, RustLibSource},
cfg_flag::CfgFlag,
rustc_cfg,
sysroot::SysrootCrate,
@ -69,8 +69,8 @@ pub enum ProjectWorkspace {
Cargo {
cargo: CargoWorkspace,
build_scripts: WorkspaceBuildScripts,
sysroot: Option<Sysroot>,
rustc: Option<(CargoWorkspace, WorkspaceBuildScripts)>,
sysroot: Result<Sysroot, Option<String>>,
rustc: Result<(CargoWorkspace, WorkspaceBuildScripts), Option<String>>,
/// Holds cfg flags for the current target. We get those by running
/// `rustc --print cfg`.
///
@ -82,7 +82,7 @@ pub enum ProjectWorkspace {
target_layout: Result<String, String>,
},
/// Project workspace was manually specified using a `rust-project.json` file.
Json { project: ProjectJson, sysroot: Option<Sysroot>, rustc_cfg: Vec<CfgFlag> },
Json { project: ProjectJson, sysroot: Result<Sysroot, Option<String>>, rustc_cfg: Vec<CfgFlag> },
// FIXME: The primary limitation of this approach is that the set of detached files needs to be fixed at the beginning.
// That's not the end user experience we should strive for.
// Ideally, you should be able to just open a random detached file in existing cargo projects, and get the basic features working.
@ -93,7 +93,11 @@ pub enum ProjectWorkspace {
// //
/// Project with a set of disjoint files, not belonging to any particular workspace.
/// Backed by basic sysroot crates for basic completion and highlighting.
DetachedFiles { files: Vec<AbsPathBuf>, sysroot: Option<Sysroot>, rustc_cfg: Vec<CfgFlag> },
DetachedFiles {
files: Vec<AbsPathBuf>,
sysroot: Result<Sysroot, Option<String>>,
rustc_cfg: Vec<CfgFlag>,
},
}
impl fmt::Debug for ProjectWorkspace {
@ -113,7 +117,7 @@ impl fmt::Debug for ProjectWorkspace {
.debug_struct("Cargo")
.field("root", &cargo.workspace_root().file_name())
.field("n_packages", &cargo.packages().len())
.field("sysroot", &sysroot.is_some())
.field("sysroot", &sysroot.is_ok())
.field(
"n_rustc_compiler_crates",
&rustc.as_ref().map_or(0, |(rc, _)| rc.packages().len()),
@ -126,7 +130,7 @@ impl fmt::Debug for ProjectWorkspace {
ProjectWorkspace::Json { project, sysroot, rustc_cfg } => {
let mut debug_struct = f.debug_struct("Json");
debug_struct.field("n_crates", &project.n_crates());
if let Some(sysroot) = sysroot {
if let Ok(sysroot) = sysroot {
debug_struct.field("n_sysroot_crates", &sysroot.crates().len());
}
debug_struct.field("n_rustc_cfg", &rustc_cfg.len());
@ -135,7 +139,7 @@ impl fmt::Debug for ProjectWorkspace {
ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => f
.debug_struct("DetachedFiles")
.field("n_files", &files.len())
.field("sysroot", &sysroot.is_some())
.field("sysroot", &sysroot.is_ok())
.field("n_rustc_cfg", &rustc_cfg.len())
.finish(),
}
@ -191,93 +195,81 @@ impl ProjectWorkspace {
let cargo = CargoWorkspace::new(meta);
let sysroot = match (&config.sysroot, &config.sysroot_src) {
(Some(RustcSource::Path(path)), None) => {
match Sysroot::with_sysroot_dir(path.clone()) {
Ok(it) => Some(it),
Err(e) => {
tracing::error!(%e, "Failed to find sysroot at {}.", path.display());
None
}
}
(Some(RustLibSource::Path(path)), None) => {
Sysroot::with_sysroot_dir(path.clone()).map_err(|e| {
Some(format!("Failed to find sysroot at {}:{e}", path.display()))
})
}
(Some(RustcSource::Discover), None) => {
match Sysroot::discover(cargo_toml.parent(), &config.extra_env) {
Ok(it) => Some(it),
Err(e) => {
tracing::error!(
%e,
"Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?",
cargo_toml.display()
);
None
}
}
(Some(RustLibSource::Discover), None) => {
Sysroot::discover(cargo_toml.parent(), &config.extra_env).map_err(|e| {
Some(format!("Failed to find sysroot for Cargo.toml file {}. Is rust-src installed? {e}", cargo_toml.display()))
})
}
(Some(RustcSource::Path(sysroot)), Some(sysroot_src)) => {
Some(Sysroot::load(sysroot.clone(), sysroot_src.clone()))
(Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => {
Ok(Sysroot::load(sysroot.clone(), sysroot_src.clone()))
}
(Some(RustcSource::Discover), Some(sysroot_src)) => {
match Sysroot::discover_with_src_override(
(Some(RustLibSource::Discover), Some(sysroot_src)) => {
Sysroot::discover_with_src_override(
cargo_toml.parent(),
&config.extra_env,
sysroot_src.clone(),
) {
Ok(it) => Some(it),
Err(e) => {
tracing::error!(
%e,
"Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?",
cargo_toml.display()
);
None
}
}
).map_err(|e| {
Some(format!("Failed to find sysroot for Cargo.toml file {}. Is rust-src installed? {e}", cargo_toml.display()))
})
}
(None, _) => None,
(None, _) => Err(None),
};
if let Some(sysroot) = &sysroot {
if let Ok(sysroot) = &sysroot {
tracing::info!(workspace = %cargo_toml.display(), src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot");
}
let rustc_dir = match &config.rustc_source {
Some(RustcSource::Path(path)) => ManifestPath::try_from(path.clone()).ok(),
Some(RustcSource::Discover) => {
sysroot.as_ref().and_then(Sysroot::discover_rustc)
Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone())
.map_err(|p| {
Some(format!("rustc source path is not absolute: {}", p.display()))
}),
Some(RustLibSource::Discover) => {
sysroot.as_ref().ok().and_then(Sysroot::discover_rustc).ok_or_else(|| {
Some(format!("Failed to discover rustc source for sysroot."))
})
}
None => None,
None => Err(None),
};
let rustc = match rustc_dir {
Some(rustc_dir) => {
tracing::info!(workspace = %cargo_toml.display(), rustc_dir = %rustc_dir.display(), "Using rustc source");
match CargoWorkspace::fetch_metadata(
&rustc_dir,
cargo_toml.parent(),
config,
progress,
) {
Ok(meta) => {
let workspace = CargoWorkspace::new(meta);
let buildscripts = WorkspaceBuildScripts::rustc_crates(
&workspace,
cargo_toml.parent(),
&config.extra_env,
);
Some((workspace, buildscripts))
}
Err(e) => {
tracing::error!(
%e,
"Failed to read Cargo metadata from rustc source at {}",
rustc_dir.display()
);
None
}
let rustc = rustc_dir.and_then(|rustc_dir| {
tracing::info!(workspace = %cargo_toml.display(), rustc_dir = %rustc_dir.display(), "Using rustc source");
match CargoWorkspace::fetch_metadata(
&rustc_dir,
cargo_toml.parent(),
&CargoConfig {
features: crate::CargoFeatures::default(),
..config.clone()
},
progress,
) {
Ok(meta) => {
let workspace = CargoWorkspace::new(meta);
let buildscripts = WorkspaceBuildScripts::rustc_crates(
&workspace,
cargo_toml.parent(),
&config.extra_env,
);
Ok((workspace, buildscripts))
}
Err(e) => {
tracing::error!(
%e,
"Failed to read Cargo metadata from rustc source at {}",
rustc_dir.display()
);
Err(Some(format!(
"Failed to read Cargo metadata from rustc source at {}: {e}",
rustc_dir.display())
))
}
}
None => None,
};
});
let rustc_cfg =
rustc_cfg::get(Some(&cargo_toml), config.target.as_deref(), &config.extra_env);
@ -313,12 +305,12 @@ impl ProjectWorkspace {
extra_env: &FxHashMap<String, String>,
) -> ProjectWorkspace {
let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) {
(Some(sysroot), Some(sysroot_src)) => Some(Sysroot::load(sysroot, sysroot_src)),
(Some(sysroot), Some(sysroot_src)) => Ok(Sysroot::load(sysroot, sysroot_src)),
(Some(sysroot), None) => {
// assume sysroot is structured like rustup's and guess `sysroot_src`
let sysroot_src =
sysroot.join("lib").join("rustlib").join("src").join("rust").join("library");
Some(Sysroot::load(sysroot, sysroot_src))
Ok(Sysroot::load(sysroot, sysroot_src))
}
(None, Some(sysroot_src)) => {
// assume sysroot is structured like rustup's and guess `sysroot`
@ -326,11 +318,11 @@ impl ProjectWorkspace {
for _ in 0..5 {
sysroot.pop();
}
Some(Sysroot::load(sysroot, sysroot_src))
Ok(Sysroot::load(sysroot, sysroot_src))
}
(None, None) => None,
(None, None) => Err(None),
};
if let Some(sysroot) = &sysroot {
if let Ok(sysroot) = &sysroot {
tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot");
}
@ -343,33 +335,23 @@ impl ProjectWorkspace {
config: &CargoConfig,
) -> Result<ProjectWorkspace> {
let sysroot = match &config.sysroot {
Some(RustcSource::Path(path)) => match Sysroot::with_sysroot_dir(path.clone()) {
Ok(it) => Some(it),
Err(e) => {
tracing::error!(%e, "Failed to find sysroot at {}.", path.display());
None
}
},
Some(RustcSource::Discover) => {
Some(RustLibSource::Path(path)) => Sysroot::with_sysroot_dir(path.clone())
.map_err(|e| Some(format!("Failed to find sysroot at {}:{e}", path.display()))),
Some(RustLibSource::Discover) => {
let dir = &detached_files
.first()
.and_then(|it| it.parent())
.ok_or_else(|| format_err!("No detached files to load"))?;
match Sysroot::discover(dir, &config.extra_env) {
Ok(it) => Some(it),
Err(e) => {
tracing::error!(
%e,
"Failed to find sysroot for {}. Is rust-src installed?",
dir.display()
);
None
}
}
Sysroot::discover(dir, &config.extra_env).map_err(|e| {
Some(format!(
"Failed to find sysroot for {}. Is rust-src installed? {e}",
dir.display()
))
})
}
None => None,
None => Err(None),
};
if let Some(sysroot) = &sysroot {
if let Ok(sysroot) = &sysroot {
tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot");
}
let rustc_cfg = rustc_cfg::get(None, None, &Default::default());
@ -450,10 +432,18 @@ impl ProjectWorkspace {
}
}
pub fn workspace_definition_path(&self) -> Option<&AbsPath> {
match self {
ProjectWorkspace::Cargo { cargo, .. } => Some(cargo.workspace_root()),
ProjectWorkspace::Json { project, .. } => Some(project.path()),
ProjectWorkspace::DetachedFiles { .. } => None,
}
}
pub fn find_sysroot_proc_macro_srv(&self) -> Option<AbsPathBuf> {
match self {
ProjectWorkspace::Cargo { sysroot: Some(sysroot), .. }
| ProjectWorkspace::Json { sysroot: Some(sysroot), .. } => {
ProjectWorkspace::Cargo { sysroot: Ok(sysroot), .. }
| ProjectWorkspace::Json { sysroot: Ok(sysroot), .. } => {
let standalone_server_name =
format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX);
["libexec", "lib"]
@ -469,7 +459,7 @@ impl ProjectWorkspace {
/// The return type contains the path and whether or not
/// the root is a member of the current workspace
pub fn to_roots(&self) -> Vec<PackageRoot> {
let mk_sysroot = |sysroot: Option<&Sysroot>, project_root: Option<&AbsPath>| {
let mk_sysroot = |sysroot: Result<&Sysroot, _>, project_root: Option<&AbsPath>| {
sysroot.map(|sysroot| PackageRoot {
// mark the sysroot as mutable if it is located inside of the project
is_local: project_root
@ -592,7 +582,7 @@ impl ProjectWorkspace {
load_proc_macro,
load,
project,
sysroot.as_ref(),
sysroot.as_ref().ok(),
extra_env,
Err("rust-project.json projects have no target layout set".into()),
),
@ -608,9 +598,9 @@ impl ProjectWorkspace {
} => cargo_to_crate_graph(
load_proc_macro,
load,
rustc,
rustc.as_ref().ok(),
cargo,
sysroot.as_ref(),
sysroot.as_ref().ok(),
rustc_cfg.clone(),
cfg_overrides,
build_scripts,
@ -624,7 +614,7 @@ impl ProjectWorkspace {
rustc_cfg.clone(),
load,
files,
sysroot,
sysroot.as_ref().ok(),
Err("detached file projects have no target layout set".into()),
)
}
@ -786,7 +776,7 @@ fn project_json_to_crate_graph(
fn cargo_to_crate_graph(
load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
rustc: &Option<(CargoWorkspace, WorkspaceBuildScripts)>,
rustc: Option<&(CargoWorkspace, WorkspaceBuildScripts)>,
cargo: &CargoWorkspace,
sysroot: Option<&Sysroot>,
rustc_cfg: Vec<CfgFlag>,
@ -932,7 +922,7 @@ fn cargo_to_crate_graph(
if has_private {
// If the user provided a path to rustc sources, we add all the rustc_private crates
// and create dependencies on them for the crates which opt-in to that
if let Some((rustc_workspace, build_scripts)) = rustc {
if let Some((rustc_workspace, rustc_build_scripts)) = rustc {
handle_rustc_crates(
&mut crate_graph,
&mut pkg_to_lib_crate,
@ -945,7 +935,13 @@ fn cargo_to_crate_graph(
&pkg_crates,
&cfg_options,
override_cfg,
build_scripts,
if rustc_workspace.workspace_root() == cargo.workspace_root() {
// the rustc workspace does not use the installed toolchain's proc-macro server
// so we need to make sure we don't use the pre compiled proc-macros there either
build_scripts
} else {
rustc_build_scripts
},
target_layout,
);
}
@ -957,7 +953,7 @@ fn detached_files_to_crate_graph(
rustc_cfg: Vec<CfgFlag>,
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
detached_files: &[AbsPathBuf],
sysroot: &Option<Sysroot>,
sysroot: Option<&Sysroot>,
target_layout: TargetLayoutLoadResult,
) -> CrateGraph {
let _p = profile::span("detached_files_to_crate_graph");

View file

@ -7,7 +7,7 @@ use std::{
};
use hir::{
db::{AstDatabase, DefDatabase, HirDatabase},
db::{DefDatabase, ExpandDatabase, HirDatabase},
AssocItem, Crate, Function, HasSource, HirDisplay, ModuleDef,
};
use hir_def::{
@ -24,7 +24,7 @@ use ide_db::base_db::{
use itertools::Itertools;
use oorandom::Rand32;
use profile::{Bytes, StopWatch};
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustcSource};
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
use rayon::prelude::*;
use rustc_hash::FxHashSet;
use stdx::format_to;
@ -57,7 +57,7 @@ impl flags::AnalysisStats {
let mut cargo_config = CargoConfig::default();
cargo_config.sysroot = match self.no_sysroot {
true => None,
false => Some(RustcSource::Discover),
false => Some(RustLibSource::Discover),
};
let no_progress = &|_| ();

View file

@ -1,7 +1,7 @@
//! Analyze all modules in a project for diagnostics. Exits with a non-zero
//! status code if any errors are found.
use project_model::{CargoConfig, RustcSource};
use project_model::{CargoConfig, RustLibSource};
use rustc_hash::FxHashSet;
use hir::{db::HirDatabase, Crate, Module};
@ -16,7 +16,7 @@ use crate::cli::{
impl flags::Diagnostics {
pub fn run(self) -> anyhow::Result<()> {
let mut cargo_config = CargoConfig::default();
cargo_config.sysroot = Some(RustcSource::Discover);
cargo_config.sysroot = Some(RustLibSource::Discover);
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: !self.disable_build_scripts,
with_proc_macro_server: ProcMacroServerChoice::Sysroot,

View file

@ -13,7 +13,7 @@ use ide_db::LineIndexDatabase;
use ide_db::base_db::salsa::{self, ParallelDatabase};
use ide_db::line_index::WideEncoding;
use lsp_types::{self, lsif};
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustcSource};
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
use vfs::{AbsPathBuf, Vfs};
use crate::cli::load_cargo::ProcMacroServerChoice;
@ -290,7 +290,7 @@ impl flags::Lsif {
eprintln!("Generating LSIF started...");
let now = Instant::now();
let mut cargo_config = CargoConfig::default();
cargo_config.sysroot = Some(RustcSource::Discover);
cargo_config.sysroot = Some(RustLibSource::Discover);
let no_progress = &|_| ();
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,

View file

@ -15,7 +15,7 @@ use ide::{
TokenStaticData,
};
use ide_db::LineIndexDatabase;
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustcSource};
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
use scip::types as scip_types;
use std::env;
@ -30,7 +30,7 @@ impl flags::Scip {
eprintln!("Generating SCIP start...");
let now = Instant::now();
let mut cargo_config = CargoConfig::default();
cargo_config.sysroot = Some(RustcSource::Discover);
cargo_config.sysroot = Some(RustLibSource::Discover);
let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}"));
let load_cargo_config = LoadCargoConfig {

View file

@ -1,7 +1,7 @@
//! Applies structured search replace rules from the command line.
use ide_ssr::MatchFinder;
use project_model::{CargoConfig, RustcSource};
use project_model::{CargoConfig, RustLibSource};
use crate::cli::{
flags,
@ -13,7 +13,7 @@ impl flags::Ssr {
pub fn run(self) -> Result<()> {
use ide_db::base_db::SourceDatabaseExt;
let mut cargo_config = CargoConfig::default();
cargo_config.sysroot = Some(RustcSource::Discover);
cargo_config.sysroot = Some(RustLibSource::Discover);
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::Sysroot,

View file

@ -22,7 +22,7 @@ use ide_db::{
use itertools::Itertools;
use lsp_types::{ClientCapabilities, MarkupKind};
use project_model::{
CargoConfig, CargoFeatures, ProjectJson, ProjectJsonData, ProjectManifest, RustcSource,
CargoConfig, CargoFeatures, ProjectJson, ProjectJsonData, ProjectManifest, RustLibSource,
UnsetTestCrates,
};
use rustc_hash::{FxHashMap, FxHashSet};
@ -272,7 +272,6 @@ config_data! {
/// The warnings will be indicated by a blue squiggly underline in code
/// and a blue icon in the `Problems Panel`.
diagnostics_warningsAsInfo: Vec<String> = "[]",
/// These directories will be ignored by rust-analyzer. They are
/// relative to the workspace root, and globs are not supported. You may
/// also need to add the folders to Code's `files.watcherExclude`.
@ -895,6 +894,15 @@ impl Config {
}
}
pub fn add_linked_projects(&mut self, linked_projects: Vec<ProjectJsonData>) {
let mut linked_projects = linked_projects
.into_iter()
.map(ManifestOrProjectJson::ProjectJson)
.collect::<Vec<ManifestOrProjectJson>>();
self.data.linkedProjects.append(&mut linked_projects);
}
pub fn did_save_text_document_dynamic_registration(&self) -> bool {
let caps = try_or_def!(self.caps.text_document.as_ref()?.synchronization.clone()?);
caps.did_save == Some(true) && caps.dynamic_registration == Some(true)
@ -1129,16 +1137,16 @@ impl Config {
pub fn cargo(&self) -> CargoConfig {
let rustc_source = self.data.rustc_source.as_ref().map(|rustc_src| {
if rustc_src == "discover" {
RustcSource::Discover
RustLibSource::Discover
} else {
RustcSource::Path(self.root_path.join(rustc_src))
RustLibSource::Path(self.root_path.join(rustc_src))
}
});
let sysroot = self.data.cargo_sysroot.as_ref().map(|sysroot| {
if sysroot == "discover" {
RustcSource::Discover
RustLibSource::Discover
} else {
RustcSource::Path(self.root_path.join(sysroot))
RustLibSource::Path(self.root_path.join(sysroot))
}
});
let sysroot_src =

View file

@ -87,6 +87,42 @@ impl<'a> RequestDispatcher<'a> {
self
}
/// Dispatches the request onto thread pool
pub(crate) fn on_no_retry<R>(
&mut self,
f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
) -> &mut Self
where
R: lsp_types::request::Request + 'static,
R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug,
R::Result: Serialize,
{
let (req, params, panic_context) = match self.parse::<R>() {
Some(it) => it,
None => return self,
};
self.global_state.task_pool.handle.spawn({
let world = self.global_state.snapshot();
move || {
let result = panic::catch_unwind(move || {
let _pctx = stdx::panic_context::enter(panic_context);
f(world, params)
});
match thread_result_to_response::<R>(req.id.clone(), result) {
Ok(response) => Task::Response(response),
Err(_) => Task::Response(lsp_server::Response::new_err(
req.id,
lsp_server::ErrorCode::ContentModified as i32,
"content modified".to_string(),
)),
}
}
});
self
}
/// Dispatches the request onto thread pool
pub(crate) fn on<R>(
&mut self,

View file

@ -29,7 +29,7 @@ use project_model::{ManifestPath, ProjectWorkspace, TargetKind};
use serde_json::json;
use stdx::{format_to, never};
use syntax::{algo, ast, AstNode, TextRange, TextSize};
use vfs::AbsPathBuf;
use vfs::{AbsPath, AbsPathBuf};
use crate::{
cargo_target_spec::CargoTargetSpec,
@ -46,6 +46,7 @@ use crate::{
pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<()> {
state.proc_macro_clients.clear();
state.proc_macro_changed = false;
state.fetch_workspaces_queue.request_op("reload workspace request".to_string());
state.fetch_build_data_queue.request_op("reload workspace request".to_string());
Ok(())
@ -84,6 +85,15 @@ pub(crate) fn handle_analyzer_status(
snap.workspaces.len(),
if snap.workspaces.len() == 1 { "" } else { "s" }
);
format_to!(
buf,
"Workspace root folders: {:?}",
snap.workspaces
.iter()
.flat_map(|ws| ws.workspace_definition_path())
.collect::<Vec<&AbsPath>>()
);
}
buf.push_str("\nAnalysis:\n");
buf.push_str(

View file

@ -36,11 +36,41 @@ impl Progress {
}
impl GlobalState {
pub(crate) fn show_message(&mut self, typ: lsp_types::MessageType, message: String) {
let message = message;
self.send_notification::<lsp_types::notification::ShowMessage>(
lsp_types::ShowMessageParams { typ, message },
)
pub(crate) fn show_message(
&mut self,
typ: lsp_types::MessageType,
message: String,
show_open_log_button: bool,
) {
match self.config.open_server_logs() && show_open_log_button {
true => self.send_request::<lsp_types::request::ShowMessageRequest>(
lsp_types::ShowMessageRequestParams {
typ,
message,
actions: Some(vec![lsp_types::MessageActionItem {
title: "Open server logs".to_owned(),
properties: Default::default(),
}]),
},
|this, resp| {
let lsp_server::Response { error: None, result: Some(result), .. } = resp
else { return };
if let Ok(Some(_item)) = crate::from_json::<
<lsp_types::request::ShowMessageRequest as lsp_types::request::Request>::Result,
>(
lsp_types::request::ShowMessageRequest::METHOD, &result
) {
this.send_notification::<lsp_ext::OpenServerLogs>(());
}
},
),
false => self.send_notification::<lsp_types::notification::ShowMessage>(
lsp_types::ShowMessageParams {
typ,
message,
},
),
}
}
/// Sends a notification to the client containing the error `message`.

View file

@ -406,9 +406,19 @@ impl GlobalState {
if self.config.server_status_notification() {
self.send_notification::<lsp_ext::ServerStatusNotification>(status);
} else if let (lsp_ext::Health::Error, Some(message)) = (status.health, &status.message)
{
self.show_and_log_error(message.clone(), None);
} else if let (health, Some(message)) = (status.health, &status.message) {
let open_log_button = tracing::enabled!(tracing::Level::ERROR)
&& (self.fetch_build_data_error().is_err()
|| self.fetch_workspace_error().is_err());
self.show_message(
match health {
lsp_ext::Health::Ok => lsp_types::MessageType::INFO,
lsp_ext::Health::Warning => lsp_types::MessageType::WARNING,
lsp_ext::Health::Error => lsp_types::MessageType::ERROR,
},
message.clone(),
open_log_button,
);
}
}
}
@ -653,7 +663,7 @@ impl GlobalState {
.on::<lsp_types::request::GotoDeclaration>(handlers::handle_goto_declaration)
.on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation)
.on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition)
.on::<lsp_types::request::InlayHintRequest>(handlers::handle_inlay_hints)
.on_no_retry::<lsp_types::request::InlayHintRequest>(handlers::handle_inlay_hints)
.on::<lsp_types::request::InlayHintResolveRequest>(handlers::handle_inlay_hints_resolve)
.on::<lsp_types::request::Completion>(handlers::handle_completion)
.on::<lsp_types::request::ResolveCompletionItem>(handlers::handle_completion_resolve)
@ -919,6 +929,7 @@ impl GlobalState {
this.show_message(
lsp_types::MessageType::WARNING,
error.to_string(),
false,
);
}
this.update_configuration(config);

View file

@ -90,38 +90,55 @@ impl GlobalState {
quiescent: self.is_quiescent(),
message: None,
};
let mut message = String::new();
if self.proc_macro_changed {
status.health = lsp_ext::Health::Warning;
status.message =
Some("Reload required due to source changes of a procedural macro.".into())
message.push_str("Reload required due to source changes of a procedural macro.\n\n");
}
if let Err(_) = self.fetch_build_data_error() {
status.health = lsp_ext::Health::Warning;
status.message =
Some("Failed to run build scripts of some packages, check the logs.".to_string());
message.push_str("Failed to run build scripts of some packages.\n\n");
}
if !self.config.cargo_autoreload()
&& self.is_quiescent()
&& self.fetch_workspaces_queue.op_requested()
{
status.health = lsp_ext::Health::Warning;
status.message = Some("Workspace reload required".to_string())
message.push_str("Auto-reloading is disabled and the workspace has changed, a manual workspace reload is required.\n\n");
}
if let Err(_) = self.fetch_workspace_error() {
status.health = lsp_ext::Health::Error;
status.message = Some("Failed to load workspaces".to_string())
}
if self.config.linked_projects().is_empty()
&& self.config.detached_files().is_empty()
&& self.config.notifications().cargo_toml_not_found
{
status.health = lsp_ext::Health::Warning;
status.message = Some("Failed to discover workspace".to_string())
message.push_str("Failed to discover workspace.\n\n");
}
for ws in self.workspaces.iter() {
let (ProjectWorkspace::Cargo { sysroot, .. }
| ProjectWorkspace::Json { sysroot, .. }
| ProjectWorkspace::DetachedFiles { sysroot, .. }) = ws;
if let Err(Some(e)) = sysroot {
status.health = lsp_ext::Health::Warning;
message.push_str(e);
message.push_str("\n\n");
}
if let ProjectWorkspace::Cargo { rustc: Err(Some(e)), .. } = ws {
status.health = lsp_ext::Health::Warning;
message.push_str(e);
message.push_str("\n\n");
}
}
if let Err(_) = self.fetch_workspace_error() {
status.health = lsp_ext::Health::Error;
message.push_str("Failed to load workspaces.\n\n");
}
if !message.is_empty() {
status.message = Some(message.trim_end().to_owned());
}
status
}

View file

@ -48,23 +48,30 @@ impl From<ast::IfExpr> for ElseBranch {
}
impl ast::IfExpr {
pub fn condition(&self) -> Option<ast::Expr> {
// If the condition is a BlockExpr, check if the then body is missing.
// If it is assume the condition is the expression that is missing instead.
let mut exprs = support::children(self.syntax());
let first = exprs.next();
match first {
Some(ast::Expr::BlockExpr(_)) => exprs.next().and(first),
first => first,
}
}
pub fn then_branch(&self) -> Option<ast::BlockExpr> {
self.children_after_condition().next()
match support::children(self.syntax()).nth(1)? {
ast::Expr::BlockExpr(block) => Some(block),
_ => None,
}
}
pub fn else_branch(&self) -> Option<ElseBranch> {
let res = match self.children_after_condition().nth(1) {
Some(block) => ElseBranch::Block(block),
None => {
let elif = self.children_after_condition().next()?;
ElseBranch::IfExpr(elif)
}
};
Some(res)
}
fn children_after_condition<N: AstNode>(&self) -> impl Iterator<Item = N> {
self.syntax().children().skip(1).filter_map(N::cast)
match support::children(self.syntax()).nth(2)? {
ast::Expr::BlockExpr(block) => Some(ElseBranch::Block(block)),
ast::Expr::IfExpr(elif) => Some(ElseBranch::IfExpr(elif)),
_ => None,
}
}
}
@ -356,7 +363,15 @@ impl ast::BlockExpr {
Some(it) => it,
None => return true,
};
!matches!(parent.kind(), FN | IF_EXPR | WHILE_EXPR | LOOP_EXPR)
match parent.kind() {
FOR_EXPR | IF_EXPR => parent
.children()
.filter(|it| ast::Expr::can_cast(it.kind()))
.next()
.map_or(true, |it| it == *self.syntax()),
LET_ELSE | FN | WHILE_EXPR | LOOP_EXPR | CONST_BLOCK_PAT => false,
_ => true,
}
}
}

View file

@ -937,12 +937,6 @@ impl From<ast::Adt> for ast::Item {
}
}
impl ast::IfExpr {
pub fn condition(&self) -> Option<ast::Expr> {
support::child(&self.syntax)
}
}
impl ast::MatchGuard {
pub fn condition(&self) -> Option<ast::Expr> {
support::child(&self.syntax)

View file

@ -44,6 +44,8 @@
//! try: infallible
//! unsize: sized
#![rustc_coherence_is_core]
pub mod marker {
// region:sized
#[lang = "sized"]

View file

@ -199,6 +199,11 @@
"title": "Reload workspace",
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.addProject",
"title": "Add current file's crate to workspace",
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.reload",
"title": "Restart server",
@ -428,6 +433,17 @@
"default": false,
"type": "boolean"
},
"rust-analyzer.discoverProjectCommand": {
"markdownDescription": "Sets the command that rust-analyzer uses to generate `rust-project.json` files. This command should only be used\n if a build system like Buck or Bazel is also in use. The command must accept files as arguments and return \n a rust-project.json over stdout.",
"default": null,
"type": [
"null",
"array"
],
"items": {
"type": "string"
}
},
"$generated-start": {},
"rust-analyzer.assist.emitMustUse": {
"markdownDescription": "Whether to insert #[must_use] when generating `as_` methods\nfor enum variants.",

View file

@ -6,7 +6,7 @@ import * as Is from "vscode-languageclient/lib/common/utils/is";
import { assert } from "./util";
import * as diagnostics from "./diagnostics";
import { WorkspaceEdit } from "vscode";
import { Config, substituteVSCodeVariables } from "./config";
import { Config, prepareVSCodeConfig } from "./config";
import { randomUUID } from "crypto";
export interface Env {
@ -95,7 +95,16 @@ export async function createClient(
const resp = await next(params, token);
if (resp && Array.isArray(resp)) {
return resp.map((val) => {
return substituteVSCodeVariables(val);
return prepareVSCodeConfig(val, (key, cfg) => {
// we only want to set discovered workspaces on the right key
// and if a workspace has been discovered.
if (
key === "linkedProjects" &&
config.discoveredWorkspaces.length > 0
) {
cfg[key] = config.discoveredWorkspaces;
}
});
});
} else {
return resp;

View file

@ -3,7 +3,7 @@ import * as lc from "vscode-languageclient";
import * as ra from "./lsp_ext";
import * as path from "path";
import { Ctx, Cmd, CtxInit } from "./ctx";
import { Ctx, Cmd, CtxInit, discoverWorkspace } from "./ctx";
import { applySnippetWorkspaceEdit, applySnippetTextEdits } from "./snippets";
import { spawnSync } from "child_process";
import { RunnableQuickPick, selectRunnable, createTask, createArgs } from "./run";
@ -749,6 +749,33 @@ export function reloadWorkspace(ctx: CtxInit): Cmd {
return async () => ctx.client.sendRequest(ra.reloadWorkspace);
}
export function addProject(ctx: CtxInit): Cmd {
return async () => {
const discoverProjectCommand = ctx.config.discoverProjectCommand;
if (!discoverProjectCommand) {
return;
}
const workspaces: JsonProject[] = await Promise.all(
vscode.workspace.workspaceFolders!.map(async (folder): Promise<JsonProject> => {
const rustDocuments = vscode.workspace.textDocuments.filter(isRustDocument);
return discoverWorkspace(rustDocuments, discoverProjectCommand, {
cwd: folder.uri.fsPath,
});
})
);
ctx.addToDiscoveredWorkspaces(workspaces);
// this is a workaround to avoid needing writing the `rust-project.json` into
// a workspace-level VS Code-specific settings folder. We'd like to keep the
// `rust-project.json` entirely in-memory.
await ctx.client?.sendNotification(lc.DidChangeConfigurationNotification.type, {
settings: "",
});
};
}
async function showReferencesImpl(
client: LanguageClient | undefined,
uri: string,

Some files were not shown because too many files have changed in this diff Show more