mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-11-10 07:04:22 +00:00
Merge commit '7219414e81810fd4d967136c4a0650523892c157' into sync-from-ra
This commit is contained in:
parent
858f4aca6c
commit
f43cea0878
78 changed files with 1391 additions and 2797 deletions
91
Cargo.lock
generated
91
Cargo.lock
generated
|
@ -166,7 +166,7 @@ checksum = "5676cea088c32290fe65c82895be9d06dd21e0fa49bb97ca840529e9417ab71a"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.39",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
|
@ -312,6 +312,17 @@ dependencies = [
|
|||
"parking_lot_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derivative"
|
||||
version = "2.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_arbitrary"
|
||||
version = "1.3.2"
|
||||
|
@ -320,7 +331,7 @@ checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.39",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -582,6 +593,7 @@ dependencies = [
|
|||
"project-model",
|
||||
"ra-ap-rustc_abi",
|
||||
"ra-ap-rustc_index",
|
||||
"ra-ap-rustc_pattern_analysis",
|
||||
"rustc-hash",
|
||||
"scoped-tls",
|
||||
"smallvec",
|
||||
|
@ -1407,20 +1419,20 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_abi"
|
||||
version = "0.21.0"
|
||||
version = "0.33.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7816f980fab89e878ff2e916e2077d484e3aa1c619a3cc982c8a417c3dfe45fa"
|
||||
checksum = "8ce9100fc66c6c60aeeb076868ead9c2eaa65d6a5a90404f08c242327a92ff4b"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"bitflags 2.4.1",
|
||||
"ra-ap-rustc_index",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_index"
|
||||
version = "0.21.0"
|
||||
version = "0.33.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8352918d61aa4afab9f2ed7314cf638976b20949b3d61d2f468c975b0d251f24"
|
||||
checksum = "5e5313d7f243b63ef9e58d94355b11aa8499f1328055f1f58adf0a5ea7d2faca"
|
||||
dependencies = [
|
||||
"arrayvec",
|
||||
"ra-ap-rustc_index_macros",
|
||||
|
@ -1429,21 +1441,21 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_index_macros"
|
||||
version = "0.21.0"
|
||||
version = "0.33.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "66a9424018828155a3e3596515598f90e68427d8f35eff6df7f0856c73fc58a8"
|
||||
checksum = "a83108ebf3e73dde205b9c25706209bcd7736480820f90ded28eabaf8b469f25"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.39",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_lexer"
|
||||
version = "0.21.0"
|
||||
version = "0.33.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc741c7a78103efab416b562e35bd73c8d4967478575010c86c6062f8d3cbf29"
|
||||
checksum = "d2d221356e5717595e8a0afa5fba1620dcb4032ab784dc4d98fdc7284e3feb66"
|
||||
dependencies = [
|
||||
"unicode-properties",
|
||||
"unicode-xid",
|
||||
|
@ -1451,14 +1463,28 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_parse_format"
|
||||
version = "0.21.0"
|
||||
version = "0.33.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d557201d71792487bd2bab637ab5be9aa6fff59b88e25e12de180b0f9d2df60f"
|
||||
checksum = "ab62fc925612374103b4f178da347b535b35d9eb1ff5ba42105c990b2e25a164"
|
||||
dependencies = [
|
||||
"ra-ap-rustc_index",
|
||||
"ra-ap-rustc_lexer",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_pattern_analysis"
|
||||
version = "0.33.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6c4085e0c771fd4b883930b599ef42966b855762bbe4052c17673b3253421a6d"
|
||||
dependencies = [
|
||||
"derivative",
|
||||
"ra-ap-rustc_index",
|
||||
"rustc-hash",
|
||||
"rustc_apfloat",
|
||||
"smallvec",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rayon"
|
||||
version = "1.8.0"
|
||||
|
@ -1593,7 +1619,7 @@ dependencies = [
|
|||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.39",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1608,6 +1634,16 @@ version = "1.1.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
||||
|
||||
[[package]]
|
||||
name = "rustc_apfloat"
|
||||
version = "0.2.0+llvm-462a31f5a5ab"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "465187772033a5ee566f69fe008df03628fce549a0899aae76f0a0c2e34696be"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"smallvec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.13"
|
||||
|
@ -1670,7 +1706,7 @@ checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.39",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1693,7 +1729,7 @@ checksum = "bcec881020c684085e55a25f7fd888954d56609ef363479dc5a1305eb0d40cab"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.39",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1707,9 +1743,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.10.0"
|
||||
version = "1.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
|
||||
checksum = "2593d31f82ead8df961d8bd23a64c2ccf2eb5dd34b0a34bfb4dd54011c72009e"
|
||||
|
||||
[[package]]
|
||||
name = "smol_str"
|
||||
|
@ -1770,6 +1806,17 @@ dependencies = [
|
|||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.109"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.39"
|
||||
|
@ -1789,7 +1836,7 @@ checksum = "285ba80e733fac80aa4270fbcdf83772a79b80aa35c97075320abfee4a915b06"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.39",
|
||||
"unicode-xid",
|
||||
]
|
||||
|
||||
|
@ -1876,7 +1923,7 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.39",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1977,7 +2024,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.39",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
@ -79,10 +79,11 @@ tt = { path = "./crates/tt", version = "0.0.0" }
|
|||
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
|
||||
vfs = { path = "./crates/vfs", version = "0.0.0" }
|
||||
|
||||
ra-ap-rustc_lexer = { version = "0.21.0", default-features = false }
|
||||
ra-ap-rustc_parse_format = { version = "0.21.0", default-features = false }
|
||||
ra-ap-rustc_index = { version = "0.21.0", default-features = false }
|
||||
ra-ap-rustc_abi = { version = "0.21.0", default-features = false }
|
||||
ra-ap-rustc_lexer = { version = "0.33.0", default-features = false }
|
||||
ra-ap-rustc_parse_format = { version = "0.33.0", default-features = false }
|
||||
ra-ap-rustc_index = { version = "0.33.0", default-features = false }
|
||||
ra-ap-rustc_abi = { version = "0.33.0", default-features = false }
|
||||
ra-ap-rustc_pattern_analysis = { version = "0.33.0", default-features = false }
|
||||
|
||||
# local crates that aren't published to crates.io. These should not have versions.
|
||||
sourcegen = { path = "./crates/sourcegen" }
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
mod input;
|
||||
mod change;
|
||||
mod input;
|
||||
|
||||
use std::panic;
|
||||
|
||||
|
|
|
@ -18,28 +18,6 @@ pub enum CfgAtom {
|
|||
KeyValue { key: SmolStr, value: SmolStr },
|
||||
}
|
||||
|
||||
impl CfgAtom {
|
||||
/// Returns `true` when the atom comes from the target specification.
|
||||
///
|
||||
/// If this returns `true`, then changing this atom requires changing the compilation target. If
|
||||
/// it returns `false`, the atom might come from a build script or the build system.
|
||||
pub fn is_target_defined(&self) -> bool {
|
||||
match self {
|
||||
CfgAtom::Flag(flag) => matches!(&**flag, "unix" | "windows"),
|
||||
CfgAtom::KeyValue { key, value: _ } => matches!(
|
||||
&**key,
|
||||
"target_arch"
|
||||
| "target_os"
|
||||
| "target_env"
|
||||
| "target_family"
|
||||
| "target_endian"
|
||||
| "target_pointer_width"
|
||||
| "target_vendor" // NOTE: `target_feature` is left out since it can be configured via `-Ctarget-feature`
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for CfgAtom {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
|
|
|
@ -131,11 +131,9 @@ impl CfgDiff {
|
|||
/// of both.
|
||||
pub fn new(enable: Vec<CfgAtom>, disable: Vec<CfgAtom>) -> Option<CfgDiff> {
|
||||
let mut occupied = FxHashSet::default();
|
||||
for item in enable.iter().chain(disable.iter()) {
|
||||
if !occupied.insert(item) {
|
||||
// was present
|
||||
return None;
|
||||
}
|
||||
if enable.iter().chain(disable.iter()).any(|item| !occupied.insert(item)) {
|
||||
// was present
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(CfgDiff { enable, disable })
|
||||
|
|
|
@ -32,6 +32,7 @@ use crate::{
|
|||
VariantId,
|
||||
};
|
||||
|
||||
/// Desugared attributes of an item post `cfg_attr` expansion.
|
||||
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Attrs(RawAttrs);
|
||||
|
||||
|
@ -228,7 +229,6 @@ pub enum DocAtom {
|
|||
KeyValue { key: SmolStr, value: SmolStr },
|
||||
}
|
||||
|
||||
// Adapted from `CfgExpr` parsing code
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum DocExpr {
|
||||
Invalid,
|
||||
|
@ -448,10 +448,7 @@ impl AttrsWithOwner {
|
|||
let map = db.fields_attrs_source_map(id.parent);
|
||||
let file_id = id.parent.file_id(db);
|
||||
let root = db.parse_or_expand(file_id);
|
||||
let owner = match &map[id.local_id] {
|
||||
Either::Left(it) => ast::AnyHasAttrs::new(it.to_node(&root)),
|
||||
Either::Right(it) => ast::AnyHasAttrs::new(it.to_node(&root)),
|
||||
};
|
||||
let owner = ast::AnyHasAttrs::new(map[id.local_id].to_node(&root));
|
||||
InFile::new(file_id, owner)
|
||||
}
|
||||
AttrDefId::AdtId(adt) => match adt {
|
||||
|
@ -634,7 +631,7 @@ fn attrs_from_item_tree_assoc<'db, N: ItemTreeModItemNode>(
|
|||
pub(crate) fn fields_attrs_source_map(
|
||||
db: &dyn DefDatabase,
|
||||
def: VariantId,
|
||||
) -> Arc<ArenaMap<LocalFieldId, Either<AstPtr<ast::TupleField>, AstPtr<ast::RecordField>>>> {
|
||||
) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>> {
|
||||
let mut res = ArenaMap::default();
|
||||
let child_source = def.child_source(db);
|
||||
|
||||
|
@ -643,7 +640,7 @@ pub(crate) fn fields_attrs_source_map(
|
|||
idx,
|
||||
variant
|
||||
.as_ref()
|
||||
.either(|l| Either::Left(AstPtr::new(l)), |r| Either::Right(AstPtr::new(r))),
|
||||
.either(|l| AstPtr::new(l).wrap_left(), |r| AstPtr::new(r).wrap_right()),
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
//! Defines `Body`: a lowered representation of bodies of functions, statics and
|
||||
//! consts.
|
||||
mod lower;
|
||||
mod pretty;
|
||||
pub mod scope;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
pub mod scope;
|
||||
mod pretty;
|
||||
|
||||
use std::ops::Index;
|
||||
|
||||
|
|
|
@ -1335,6 +1335,7 @@ impl ExprCollector<'_> {
|
|||
let args = record_pat_field_list
|
||||
.fields()
|
||||
.filter_map(|f| {
|
||||
self.check_cfg(&f)?;
|
||||
let ast_pat = f.pat()?;
|
||||
let pat = self.collect_pat(ast_pat, binding_list);
|
||||
let name = f.field_name()?.as_name();
|
||||
|
|
|
@ -194,7 +194,7 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
|
|||
fn fields_attrs_source_map(
|
||||
&self,
|
||||
def: VariantId,
|
||||
) -> Arc<ArenaMap<LocalFieldId, Either<AstPtr<ast::TupleField>, AstPtr<ast::RecordField>>>>;
|
||||
) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>>;
|
||||
|
||||
#[salsa::invoke(AttrsWithOwner::attrs_query)]
|
||||
fn attrs(&self, def: AttrDefId) -> Attrs;
|
||||
|
|
|
@ -12,8 +12,8 @@
|
|||
//!
|
||||
//! See also a neighboring `body` module.
|
||||
|
||||
pub mod type_ref;
|
||||
pub mod format_args;
|
||||
pub mod type_ref;
|
||||
|
||||
use std::fmt;
|
||||
|
||||
|
|
|
@ -25,13 +25,13 @@ extern crate ra_ap_rustc_abi as rustc_abi;
|
|||
pub mod db;
|
||||
|
||||
pub mod attr;
|
||||
pub mod path;
|
||||
pub mod builtin_type;
|
||||
pub mod per_ns;
|
||||
pub mod item_scope;
|
||||
pub mod path;
|
||||
pub mod per_ns;
|
||||
|
||||
pub mod lower;
|
||||
pub mod expander;
|
||||
pub mod lower;
|
||||
|
||||
pub mod dyn_map;
|
||||
|
||||
|
@ -46,24 +46,24 @@ pub use self::hir::type_ref;
|
|||
pub mod body;
|
||||
pub mod resolver;
|
||||
|
||||
mod trace;
|
||||
pub mod nameres;
|
||||
mod trace;
|
||||
|
||||
pub mod src;
|
||||
pub mod child_by_source;
|
||||
pub mod src;
|
||||
|
||||
pub mod visibility;
|
||||
pub mod find_path;
|
||||
pub mod import_map;
|
||||
pub mod visibility;
|
||||
|
||||
pub use rustc_abi as layout;
|
||||
use triomphe::Arc;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test_db;
|
||||
#[cfg(test)]
|
||||
mod macro_expansion_tests;
|
||||
mod pretty;
|
||||
#[cfg(test)]
|
||||
mod test_db;
|
||||
|
||||
use std::{
|
||||
hash::{Hash, Hasher},
|
||||
|
@ -73,7 +73,6 @@ use std::{
|
|||
use base_db::{impl_intern_key, salsa, CrateId, Edition};
|
||||
use hir_expand::{
|
||||
ast_id_map::{AstIdNode, FileAstId},
|
||||
attrs::{Attr, AttrId, AttrInput},
|
||||
builtin_attr_macro::BuiltinAttrExpander,
|
||||
builtin_derive_macro::BuiltinDeriveExpander,
|
||||
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
|
||||
|
@ -939,6 +938,15 @@ impl From<AssocItemId> for AttrDefId {
|
|||
}
|
||||
}
|
||||
}
|
||||
impl From<VariantId> for AttrDefId {
|
||||
fn from(vid: VariantId) -> Self {
|
||||
match vid {
|
||||
VariantId::EnumVariantId(id) => id.into(),
|
||||
VariantId::StructId(id) => id.into(),
|
||||
VariantId::UnionId(id) => id.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum VariantId {
|
||||
|
@ -1265,60 +1273,6 @@ fn macro_call_as_call_id_with_eager(
|
|||
Ok(res)
|
||||
}
|
||||
|
||||
fn derive_macro_as_call_id(
|
||||
db: &dyn DefDatabase,
|
||||
item_attr: &AstIdWithPath<ast::Adt>,
|
||||
derive_attr_index: AttrId,
|
||||
derive_pos: u32,
|
||||
call_site: Span,
|
||||
krate: CrateId,
|
||||
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
|
||||
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
|
||||
let (macro_id, def_id) = resolver(item_attr.path.clone())
|
||||
.filter(|(_, def_id)| def_id.is_derive())
|
||||
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
|
||||
let call_id = def_id.as_lazy_macro(
|
||||
db.upcast(),
|
||||
krate,
|
||||
MacroCallKind::Derive {
|
||||
ast_id: item_attr.ast_id,
|
||||
derive_index: derive_pos,
|
||||
derive_attr_index,
|
||||
},
|
||||
call_site,
|
||||
);
|
||||
Ok((macro_id, def_id, call_id))
|
||||
}
|
||||
|
||||
fn attr_macro_as_call_id(
|
||||
db: &dyn DefDatabase,
|
||||
item_attr: &AstIdWithPath<ast::Item>,
|
||||
macro_attr: &Attr,
|
||||
krate: CrateId,
|
||||
def: MacroDefId,
|
||||
) -> MacroCallId {
|
||||
let arg = match macro_attr.input.as_deref() {
|
||||
Some(AttrInput::TokenTree(tt)) => {
|
||||
let mut tt = tt.as_ref().clone();
|
||||
tt.delimiter = tt::Delimiter::invisible_spanned(macro_attr.span);
|
||||
Some(tt)
|
||||
}
|
||||
|
||||
_ => None,
|
||||
};
|
||||
|
||||
def.as_lazy_macro(
|
||||
db.upcast(),
|
||||
krate,
|
||||
MacroCallKind::Attr {
|
||||
ast_id: item_attr.ast_id,
|
||||
attr_args: arg.map(Arc::new),
|
||||
invoc_attr_index: macro_attr.id,
|
||||
},
|
||||
macro_attr.span,
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UnresolvedMacro {
|
||||
pub path: hir_expand::mod_path::ModPath,
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
//! Tests specific to declarative macros, aka macros by example. This covers
|
||||
//! both stable `macro_rules!` macros as well as unstable `macro` macros.
|
||||
|
||||
mod tt_conversion;
|
||||
mod matching;
|
||||
mod meta_syntax;
|
||||
mod metavar_expr;
|
||||
mod regression;
|
||||
mod tt_conversion;
|
||||
|
||||
use expect_test::expect;
|
||||
|
||||
|
|
|
@ -9,9 +9,9 @@
|
|||
//! write unit-tests (in fact, we used to do that), but that makes tests brittle
|
||||
//! and harder to understand.
|
||||
|
||||
mod mbe;
|
||||
mod builtin_fn_macro;
|
||||
mod builtin_derive_macro;
|
||||
mod builtin_fn_macro;
|
||||
mod mbe;
|
||||
mod proc_macros;
|
||||
|
||||
use std::{iter, ops::Range, sync};
|
||||
|
|
|
@ -48,11 +48,11 @@
|
|||
//! the result
|
||||
|
||||
pub mod attr_resolution;
|
||||
pub mod proc_macro;
|
||||
pub mod diagnostics;
|
||||
mod collector;
|
||||
pub mod diagnostics;
|
||||
mod mod_resolution;
|
||||
mod path_resolution;
|
||||
pub mod proc_macro;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
|
|
@ -1,16 +1,21 @@
|
|||
//! Post-nameres attribute resolution.
|
||||
|
||||
use hir_expand::{attrs::Attr, MacroCallId};
|
||||
use base_db::CrateId;
|
||||
use hir_expand::{
|
||||
attrs::{Attr, AttrId, AttrInput},
|
||||
MacroCallId, MacroCallKind, MacroDefId,
|
||||
};
|
||||
use span::Span;
|
||||
use syntax::{ast, SmolStr};
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
attr::builtin::{find_builtin_attr_idx, TOOL_MODULES},
|
||||
attr_macro_as_call_id,
|
||||
db::DefDatabase,
|
||||
item_scope::BuiltinShadowMode,
|
||||
nameres::path_resolution::ResolveMode,
|
||||
path::{ModPath, PathKind},
|
||||
AstIdWithPath, LocalModuleId, UnresolvedMacro,
|
||||
path::{self, ModPath, PathKind},
|
||||
AstIdWithPath, LocalModuleId, MacroId, UnresolvedMacro,
|
||||
};
|
||||
|
||||
use super::{DefMap, MacroSubNs};
|
||||
|
@ -93,3 +98,57 @@ impl DefMap {
|
|||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn attr_macro_as_call_id(
|
||||
db: &dyn DefDatabase,
|
||||
item_attr: &AstIdWithPath<ast::Item>,
|
||||
macro_attr: &Attr,
|
||||
krate: CrateId,
|
||||
def: MacroDefId,
|
||||
) -> MacroCallId {
|
||||
let arg = match macro_attr.input.as_deref() {
|
||||
Some(AttrInput::TokenTree(tt)) => {
|
||||
let mut tt = tt.as_ref().clone();
|
||||
tt.delimiter = tt::Delimiter::invisible_spanned(macro_attr.span);
|
||||
Some(tt)
|
||||
}
|
||||
|
||||
_ => None,
|
||||
};
|
||||
|
||||
def.as_lazy_macro(
|
||||
db.upcast(),
|
||||
krate,
|
||||
MacroCallKind::Attr {
|
||||
ast_id: item_attr.ast_id,
|
||||
attr_args: arg.map(Arc::new),
|
||||
invoc_attr_index: macro_attr.id,
|
||||
},
|
||||
macro_attr.span,
|
||||
)
|
||||
}
|
||||
|
||||
pub(super) fn derive_macro_as_call_id(
|
||||
db: &dyn DefDatabase,
|
||||
item_attr: &AstIdWithPath<ast::Adt>,
|
||||
derive_attr_index: AttrId,
|
||||
derive_pos: u32,
|
||||
call_site: Span,
|
||||
krate: CrateId,
|
||||
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
|
||||
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
|
||||
let (macro_id, def_id) = resolver(item_attr.path.clone())
|
||||
.filter(|(_, def_id)| def_id.is_derive())
|
||||
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
|
||||
let call_id = def_id.as_lazy_macro(
|
||||
db.upcast(),
|
||||
krate,
|
||||
MacroCallKind::Derive {
|
||||
ast_id: item_attr.ast_id,
|
||||
derive_index: derive_pos,
|
||||
derive_attr_index,
|
||||
},
|
||||
call_site,
|
||||
);
|
||||
Ok((macro_id, def_id, call_id))
|
||||
}
|
||||
|
|
|
@ -30,9 +30,7 @@ use triomphe::Arc;
|
|||
|
||||
use crate::{
|
||||
attr::Attrs,
|
||||
attr_macro_as_call_id,
|
||||
db::DefDatabase,
|
||||
derive_macro_as_call_id,
|
||||
item_scope::{ImportId, ImportOrExternCrate, ImportType, PerNsGlobImports},
|
||||
item_tree::{
|
||||
self, ExternCrate, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId,
|
||||
|
@ -40,6 +38,7 @@ use crate::{
|
|||
},
|
||||
macro_call_as_call_id, macro_call_as_call_id_with_eager,
|
||||
nameres::{
|
||||
attr_resolution::{attr_macro_as_call_id, derive_macro_as_call_id},
|
||||
diagnostics::DefDiagnostic,
|
||||
mod_resolution::ModDir,
|
||||
path_resolution::ReachedFixedPoint,
|
||||
|
@ -1245,7 +1244,9 @@ impl DefCollector<'_> {
|
|||
MacroDefId { kind: MacroDefKind::BuiltInAttr(expander, _),.. }
|
||||
if expander.is_derive()
|
||||
) {
|
||||
// Resolved to `#[derive]`
|
||||
// Resolved to `#[derive]`, we don't actually expand this attribute like
|
||||
// normal (as that would just be an identity expansion with extra output)
|
||||
// Instead we treat derive attributes special and apply them separately.
|
||||
|
||||
let item_tree = tree.item_tree(self.db);
|
||||
let ast_adt_id: FileAstId<ast::Adt> = match *mod_item {
|
||||
|
@ -1284,7 +1285,8 @@ impl DefCollector<'_> {
|
|||
}
|
||||
|
||||
// We treat the #[derive] macro as an attribute call, but we do not resolve it for nameres collection.
|
||||
// This is just a trick to be able to resolve the input to derives as proper paths.
|
||||
// This is just a trick to be able to resolve the input to derives
|
||||
// as proper paths in `Semantics`.
|
||||
// Check the comment in [`builtin_attr_macro`].
|
||||
let call_id = attr_macro_as_call_id(
|
||||
self.db,
|
||||
|
|
|
@ -155,7 +155,14 @@ impl PartialEq for AstIdMap {
|
|||
impl Eq for AstIdMap {}
|
||||
|
||||
impl AstIdMap {
|
||||
pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
|
||||
pub(crate) fn ast_id_map(
|
||||
db: &dyn ExpandDatabase,
|
||||
file_id: span::HirFileId,
|
||||
) -> triomphe::Arc<AstIdMap> {
|
||||
triomphe::Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
|
||||
}
|
||||
|
||||
fn from_source(node: &SyntaxNode) -> AstIdMap {
|
||||
assert!(node.parent().is_none());
|
||||
let mut res = AstIdMap::default();
|
||||
|
||||
|
|
|
@ -117,14 +117,10 @@ impl RawAttrs {
|
|||
None => return smallvec![attr.clone()],
|
||||
};
|
||||
let index = attr.id;
|
||||
let attrs =
|
||||
parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| {
|
||||
let tree = Subtree {
|
||||
delimiter: tt::Delimiter::invisible_spanned(attr.first()?.first_span()),
|
||||
token_trees: attr.to_vec(),
|
||||
};
|
||||
Attr::from_tt(db, &tree, index.with_cfg_attr(idx))
|
||||
});
|
||||
let attrs = parts
|
||||
.enumerate()
|
||||
.take(1 << AttrId::CFG_ATTR_BITS)
|
||||
.filter_map(|(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx)));
|
||||
|
||||
let cfg_options = &crate_graph[krate].cfg_options;
|
||||
let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
|
||||
|
@ -222,12 +218,40 @@ impl Attr {
|
|||
Some(Attr { id, path, input, span })
|
||||
}
|
||||
|
||||
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
|
||||
// FIXME: Unecessary roundtrip tt -> ast -> tt
|
||||
let (parse, map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
|
||||
let ast = ast::Meta::cast(parse.syntax_node())?;
|
||||
fn from_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree], id: AttrId) -> Option<Attr> {
|
||||
let span = tt.first()?.first_span();
|
||||
let path_end = tt
|
||||
.iter()
|
||||
.position(|tt| {
|
||||
!matches!(
|
||||
tt,
|
||||
tt::TokenTree::Leaf(
|
||||
tt::Leaf::Punct(tt::Punct { char: ':' | '$', .. }) | tt::Leaf::Ident(_),
|
||||
)
|
||||
)
|
||||
})
|
||||
.unwrap_or_else(|| tt.len());
|
||||
|
||||
Self::from_src(db, ast, SpanMapRef::ExpansionSpanMap(&map), id)
|
||||
let (path, input) = tt.split_at(path_end);
|
||||
let path = Interned::new(ModPath::from_tt(db, path)?);
|
||||
|
||||
let input = match input.get(0) {
|
||||
Some(tt::TokenTree::Subtree(tree)) => {
|
||||
Some(Interned::new(AttrInput::TokenTree(Box::new(tree.clone()))))
|
||||
}
|
||||
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))) => {
|
||||
let input = match input.get(1) {
|
||||
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text, .. }))) => {
|
||||
//FIXME the trimming here isn't quite right, raw strings are not handled
|
||||
Some(Interned::new(AttrInput::Literal(text.trim_matches('"').into())))
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
input
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
Some(Attr { id, path, input, span })
|
||||
}
|
||||
|
||||
pub fn path(&self) -> &ModPath {
|
||||
|
@ -277,29 +301,8 @@ impl Attr {
|
|||
.token_trees
|
||||
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
|
||||
.filter_map(move |tts| {
|
||||
if tts.is_empty() {
|
||||
return None;
|
||||
}
|
||||
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
|
||||
// here or maybe just parse a mod path from a token tree directly
|
||||
let subtree = tt::Subtree {
|
||||
delimiter: tt::Delimiter::invisible_spanned(tts.first()?.first_span()),
|
||||
token_trees: tts.to_vec(),
|
||||
};
|
||||
let (parse, span_map) =
|
||||
mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem);
|
||||
let meta = ast::Meta::cast(parse.syntax_node())?;
|
||||
// Only simple paths are allowed.
|
||||
if meta.eq_token().is_some() || meta.expr().is_some() || meta.token_tree().is_some()
|
||||
{
|
||||
return None;
|
||||
}
|
||||
let path = meta.path()?;
|
||||
let call_site = span_map.span_at(path.syntax().text_range().start());
|
||||
Some((
|
||||
ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?,
|
||||
call_site,
|
||||
))
|
||||
let span = tts.first()?.first_span();
|
||||
Some((ModPath::from_tt(db, tts)?, span))
|
||||
});
|
||||
|
||||
Some(paths)
|
||||
|
|
|
@ -48,11 +48,13 @@ impl BuiltinAttrExpander {
|
|||
|
||||
register_builtin! { expand:
|
||||
(bench, Bench) => dummy_attr_expand,
|
||||
(cfg, Cfg) => dummy_attr_expand,
|
||||
(cfg_attr, CfgAttr) => dummy_attr_expand,
|
||||
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
|
||||
(cfg_eval, CfgEval) => dummy_attr_expand,
|
||||
(derive, Derive) => derive_attr_expand,
|
||||
(derive, Derive) => derive_expand,
|
||||
// derive const is equivalent to derive for our proposes.
|
||||
(derive_const, DeriveConst) => derive_attr_expand,
|
||||
(derive_const, DeriveConst) => derive_expand,
|
||||
(global_allocator, GlobalAllocator) => dummy_attr_expand,
|
||||
(test, Test) => dummy_attr_expand,
|
||||
(test_case, TestCase) => dummy_attr_expand
|
||||
|
@ -91,7 +93,7 @@ fn dummy_attr_expand(
|
|||
/// always resolve as a derive without nameres recollecting them.
|
||||
/// So this hacky approach is a lot more friendly for us, though it does require a bit of support in
|
||||
/// [`hir::Semantics`] to make this work.
|
||||
fn derive_attr_expand(
|
||||
fn derive_expand(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
tt: &tt::Subtree,
|
||||
|
|
|
@ -1,16 +1,11 @@
|
|||
//! Defines database & queries for macro expansion.
|
||||
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use base_db::{
|
||||
salsa::{self, debug::DebugQueryTable},
|
||||
CrateId, Edition, FileId, SourceDatabase, VersionReq,
|
||||
};
|
||||
use base_db::{salsa, CrateId, FileId, SourceDatabase};
|
||||
use either::Either;
|
||||
use limit::Limit;
|
||||
use mbe::{syntax_node_to_token_tree, ValueResult};
|
||||
use rustc_hash::FxHashSet;
|
||||
use span::{Span, SyntaxContextId};
|
||||
use span::SyntaxContextId;
|
||||
use syntax::{
|
||||
ast::{self, HasAttrs},
|
||||
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
|
||||
|
@ -19,13 +14,14 @@ use triomphe::Arc;
|
|||
|
||||
use crate::{
|
||||
ast_id_map::AstIdMap,
|
||||
attrs::{collect_attrs, RawAttrs},
|
||||
attrs::collect_attrs,
|
||||
builtin_attr_macro::pseudo_derive_attr_expansion,
|
||||
builtin_fn_macro::EagerExpander,
|
||||
declarative::DeclarativeMacroExpander,
|
||||
fixup::{self, reverse_fixups, SyntaxFixupUndoInfo},
|
||||
hygiene::{
|
||||
apply_mark, span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt,
|
||||
SyntaxContextData, Transparency,
|
||||
span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt,
|
||||
SyntaxContextData,
|
||||
},
|
||||
proc_macro::ProcMacros,
|
||||
span_map::{RealSpanMap, SpanMap, SpanMapRef},
|
||||
|
@ -43,82 +39,6 @@ use crate::{
|
|||
/// Actual max for `analysis-stats .` at some point: 30672.
|
||||
static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
/// Old-style `macro_rules` or the new macros 2.0
|
||||
pub struct DeclarativeMacroExpander {
|
||||
pub mac: mbe::DeclarativeMacro<span::Span>,
|
||||
pub transparency: Transparency,
|
||||
}
|
||||
|
||||
// FIXME: Remove this once we drop support for 1.76
|
||||
static REQUIREMENT: OnceLock<VersionReq> = OnceLock::new();
|
||||
|
||||
impl DeclarativeMacroExpander {
|
||||
pub fn expand(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
tt: tt::Subtree,
|
||||
call_id: MacroCallId,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let loc = db.lookup_intern_macro_call(call_id);
|
||||
let toolchain = &db.crate_graph()[loc.def.krate].toolchain;
|
||||
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
||||
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
||||
&base_db::Version {
|
||||
pre: base_db::Prerelease::EMPTY,
|
||||
build: base_db::BuildMetadata::EMPTY,
|
||||
major: version.major,
|
||||
minor: version.minor,
|
||||
patch: version.patch,
|
||||
},
|
||||
)
|
||||
});
|
||||
match self.mac.err() {
|
||||
Some(e) => ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }),
|
||||
ExpandError::other(format!("invalid macro definition: {e}")),
|
||||
),
|
||||
None => self
|
||||
.mac
|
||||
.expand(
|
||||
&tt,
|
||||
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
|
||||
new_meta_vars,
|
||||
loc.call_site,
|
||||
)
|
||||
.map_err(Into::into),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand_unhygienic(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
tt: tt::Subtree,
|
||||
krate: CrateId,
|
||||
call_site: Span,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let toolchain = &db.crate_graph()[krate].toolchain;
|
||||
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
||||
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
||||
&base_db::Version {
|
||||
pre: base_db::Prerelease::EMPTY,
|
||||
build: base_db::BuildMetadata::EMPTY,
|
||||
major: version.major,
|
||||
minor: version.minor,
|
||||
patch: version.patch,
|
||||
},
|
||||
)
|
||||
});
|
||||
match self.mac.err() {
|
||||
Some(e) => ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||
ExpandError::other(format!("invalid macro definition: {e}")),
|
||||
),
|
||||
None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum TokenExpander {
|
||||
/// Old-style `macro_rules` or the new macros 2.0
|
||||
|
@ -141,6 +61,7 @@ pub trait ExpandDatabase: SourceDatabase {
|
|||
#[salsa::input]
|
||||
fn proc_macros(&self) -> Arc<ProcMacros>;
|
||||
|
||||
#[salsa::invoke(AstIdMap::ast_id_map)]
|
||||
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
|
||||
|
||||
/// Main public API -- parses a hir file, not caring whether it's a real
|
||||
|
@ -156,8 +77,10 @@ pub trait ExpandDatabase: SourceDatabase {
|
|||
macro_file: MacroFileId,
|
||||
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
|
||||
#[salsa::transparent]
|
||||
#[salsa::invoke(SpanMap::new)]
|
||||
fn span_map(&self, file_id: HirFileId) -> SpanMap;
|
||||
|
||||
#[salsa::invoke(crate::span_map::real_span_map)]
|
||||
fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>;
|
||||
|
||||
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
|
||||
|
@ -173,6 +96,7 @@ pub trait ExpandDatabase: SourceDatabase {
|
|||
#[salsa::transparent]
|
||||
fn setup_syntax_context_root(&self) -> ();
|
||||
#[salsa::transparent]
|
||||
#[salsa::invoke(crate::hygiene::dump_syntax_contexts)]
|
||||
fn dump_syntax_contexts(&self) -> String;
|
||||
|
||||
/// Lowers syntactic macro call to a token tree representation. That's a firewall
|
||||
|
@ -184,8 +108,10 @@ pub trait ExpandDatabase: SourceDatabase {
|
|||
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>>;
|
||||
/// Fetches the expander for this macro.
|
||||
#[salsa::transparent]
|
||||
#[salsa::invoke(TokenExpander::macro_expander)]
|
||||
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
|
||||
/// Fetches (and compiles) the expander of this decl macro.
|
||||
#[salsa::invoke(DeclarativeMacroExpander::expander)]
|
||||
fn decl_macro_expander(
|
||||
&self,
|
||||
def_crate: CrateId,
|
||||
|
@ -203,36 +129,6 @@ pub trait ExpandDatabase: SourceDatabase {
|
|||
) -> ExpandResult<Box<[SyntaxError]>>;
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
|
||||
match file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
|
||||
HirFileIdRepr::MacroFile(m) => {
|
||||
SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> {
|
||||
use syntax::ast::HasModuleItem;
|
||||
let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
|
||||
let ast_id_map = db.ast_id_map(file_id.into());
|
||||
let tree = db.parse(file_id).tree();
|
||||
// FIXME: Descend into modules and other item containing items that are not annotated with attributes
|
||||
// and allocate pairs for those as well. This gives us finer grained span anchors resulting in
|
||||
// better incrementality
|
||||
pairs.extend(
|
||||
tree.items()
|
||||
.map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())),
|
||||
);
|
||||
|
||||
Arc::new(RealSpanMap::from_file(
|
||||
file_id,
|
||||
pairs.into_boxed_slice(),
|
||||
tree.syntax().text_range().end(),
|
||||
))
|
||||
}
|
||||
|
||||
/// This expands the given macro call, but with different arguments. This is
|
||||
/// used for completion, where we want to see what 'would happen' if we insert a
|
||||
/// token. The `token_to_map` mapped down into the expansion, with the mapped
|
||||
|
@ -357,10 +253,6 @@ pub fn expand_speculative(
|
|||
Some((node.syntax_node(), token))
|
||||
}
|
||||
|
||||
fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
|
||||
Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
|
||||
}
|
||||
|
||||
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
|
||||
match file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
|
||||
|
@ -412,7 +304,10 @@ fn parse_macro_expansion_error(
|
|||
.map(|it| it.0.errors().to_vec().into_boxed_slice())
|
||||
}
|
||||
|
||||
fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse<SyntaxNode>, SpanMap) {
|
||||
pub(crate) fn parse_with_map(
|
||||
db: &dyn ExpandDatabase,
|
||||
file_id: HirFileId,
|
||||
) -> (Parse<SyntaxNode>, SpanMap) {
|
||||
match file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => {
|
||||
(db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
|
||||
|
@ -581,100 +476,18 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
|
|||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn decl_macro_expander(
|
||||
db: &dyn ExpandDatabase,
|
||||
def_crate: CrateId,
|
||||
id: AstId<ast::Macro>,
|
||||
) -> Arc<DeclarativeMacroExpander> {
|
||||
let crate_data = &db.crate_graph()[def_crate];
|
||||
let is_2021 = crate_data.edition >= Edition::Edition2021;
|
||||
let (root, map) = parse_with_map(db, id.file_id);
|
||||
let root = root.syntax_node();
|
||||
|
||||
let transparency = |node| {
|
||||
// ... would be nice to have the item tree here
|
||||
let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
|
||||
match &*attrs
|
||||
.iter()
|
||||
.find(|it| {
|
||||
it.path.as_ident().and_then(|it| it.as_str()) == Some("rustc_macro_transparency")
|
||||
})?
|
||||
.token_tree_value()?
|
||||
.token_trees
|
||||
{
|
||||
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text {
|
||||
"transparent" => Some(Transparency::Transparent),
|
||||
"semitransparent" => Some(Transparency::SemiTransparent),
|
||||
"opaque" => Some(Transparency::Opaque),
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
impl TokenExpander {
|
||||
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
|
||||
match id.kind {
|
||||
MacroDefKind::Declarative(ast_id) => {
|
||||
TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
|
||||
}
|
||||
MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander),
|
||||
MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander),
|
||||
MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander),
|
||||
MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander),
|
||||
MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander),
|
||||
}
|
||||
};
|
||||
let toolchain = crate_data.toolchain.as_ref();
|
||||
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
||||
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
||||
&base_db::Version {
|
||||
pre: base_db::Prerelease::EMPTY,
|
||||
build: base_db::BuildMetadata::EMPTY,
|
||||
major: version.major,
|
||||
minor: version.minor,
|
||||
patch: version.patch,
|
||||
},
|
||||
)
|
||||
});
|
||||
|
||||
let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
|
||||
ast::Macro::MacroRules(macro_rules) => (
|
||||
match macro_rules.token_tree() {
|
||||
Some(arg) => {
|
||||
let tt = mbe::syntax_node_to_token_tree(
|
||||
arg.syntax(),
|
||||
map.as_ref(),
|
||||
map.span_for_range(macro_rules.macro_rules_token().unwrap().text_range()),
|
||||
);
|
||||
|
||||
mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars)
|
||||
}
|
||||
None => mbe::DeclarativeMacro::from_err(
|
||||
mbe::ParseError::Expected("expected a token tree".into()),
|
||||
is_2021,
|
||||
),
|
||||
},
|
||||
transparency(¯o_rules).unwrap_or(Transparency::SemiTransparent),
|
||||
),
|
||||
ast::Macro::MacroDef(macro_def) => (
|
||||
match macro_def.body() {
|
||||
Some(arg) => {
|
||||
let tt = mbe::syntax_node_to_token_tree(
|
||||
arg.syntax(),
|
||||
map.as_ref(),
|
||||
map.span_for_range(macro_def.macro_token().unwrap().text_range()),
|
||||
);
|
||||
|
||||
mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars)
|
||||
}
|
||||
None => mbe::DeclarativeMacro::from_err(
|
||||
mbe::ParseError::Expected("expected a token tree".into()),
|
||||
is_2021,
|
||||
),
|
||||
},
|
||||
transparency(¯o_def).unwrap_or(Transparency::Opaque),
|
||||
),
|
||||
};
|
||||
Arc::new(DeclarativeMacroExpander { mac, transparency })
|
||||
}
|
||||
|
||||
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
|
||||
match id.kind {
|
||||
MacroDefKind::Declarative(ast_id) => {
|
||||
TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
|
||||
}
|
||||
MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander),
|
||||
MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander),
|
||||
MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander),
|
||||
MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander),
|
||||
MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -862,40 +675,3 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {
|
|||
fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
|
||||
db.intern_syntax_context(SyntaxContextData::root());
|
||||
}
|
||||
|
||||
fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
|
||||
let mut s = String::from("Expansions:");
|
||||
let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
|
||||
entries.sort_by_key(|e| e.key);
|
||||
for e in entries {
|
||||
let id = e.key;
|
||||
let expn_data = e.value.as_ref().unwrap();
|
||||
s.push_str(&format!(
|
||||
"\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
|
||||
id,
|
||||
expn_data.kind.file_id(),
|
||||
expn_data.call_site,
|
||||
SyntaxContextId::ROOT, // FIXME expn_data.def_site,
|
||||
expn_data.kind.descr(),
|
||||
));
|
||||
}
|
||||
|
||||
s.push_str("\n\nSyntaxContexts:\n");
|
||||
let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
|
||||
entries.sort_by_key(|e| e.key);
|
||||
for e in entries {
|
||||
struct SyntaxContextDebug<'a>(
|
||||
&'a dyn ExpandDatabase,
|
||||
SyntaxContextId,
|
||||
&'a SyntaxContextData,
|
||||
);
|
||||
|
||||
impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.2.fancy_debug(self.1, self.0, f)
|
||||
}
|
||||
}
|
||||
stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
|
||||
}
|
||||
s
|
||||
}
|
||||
|
|
177
crates/hir-expand/src/declarative.rs
Normal file
177
crates/hir-expand/src/declarative.rs
Normal file
|
@ -0,0 +1,177 @@
|
|||
//! Compiled declarative macro expanders (`macro_rules!`` and `macro`)
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use base_db::{CrateId, Edition, VersionReq};
|
||||
use span::{MacroCallId, Span};
|
||||
use syntax::{ast, AstNode};
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
attrs::RawAttrs,
|
||||
db::ExpandDatabase,
|
||||
hygiene::{apply_mark, Transparency},
|
||||
tt, AstId, ExpandError, ExpandResult,
|
||||
};
|
||||
|
||||
/// Old-style `macro_rules` or the new macros 2.0
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct DeclarativeMacroExpander {
|
||||
pub mac: mbe::DeclarativeMacro<span::Span>,
|
||||
pub transparency: Transparency,
|
||||
}
|
||||
|
||||
// FIXME: Remove this once we drop support for 1.76
|
||||
static REQUIREMENT: OnceLock<VersionReq> = OnceLock::new();
|
||||
|
||||
impl DeclarativeMacroExpander {
|
||||
pub fn expand(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
tt: tt::Subtree,
|
||||
call_id: MacroCallId,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let loc = db.lookup_intern_macro_call(call_id);
|
||||
let toolchain = &db.crate_graph()[loc.def.krate].toolchain;
|
||||
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
||||
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
||||
&base_db::Version {
|
||||
pre: base_db::Prerelease::EMPTY,
|
||||
build: base_db::BuildMetadata::EMPTY,
|
||||
major: version.major,
|
||||
minor: version.minor,
|
||||
patch: version.patch,
|
||||
},
|
||||
)
|
||||
});
|
||||
match self.mac.err() {
|
||||
Some(e) => ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }),
|
||||
ExpandError::other(format!("invalid macro definition: {e}")),
|
||||
),
|
||||
None => self
|
||||
.mac
|
||||
.expand(
|
||||
&tt,
|
||||
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
|
||||
new_meta_vars,
|
||||
loc.call_site,
|
||||
)
|
||||
.map_err(Into::into),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand_unhygienic(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
tt: tt::Subtree,
|
||||
krate: CrateId,
|
||||
call_site: Span,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let toolchain = &db.crate_graph()[krate].toolchain;
|
||||
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
||||
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
||||
&base_db::Version {
|
||||
pre: base_db::Prerelease::EMPTY,
|
||||
build: base_db::BuildMetadata::EMPTY,
|
||||
major: version.major,
|
||||
minor: version.minor,
|
||||
patch: version.patch,
|
||||
},
|
||||
)
|
||||
});
|
||||
match self.mac.err() {
|
||||
Some(e) => ExpandResult::new(
|
||||
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||
ExpandError::other(format!("invalid macro definition: {e}")),
|
||||
),
|
||||
None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn expander(
|
||||
db: &dyn ExpandDatabase,
|
||||
def_crate: CrateId,
|
||||
id: AstId<ast::Macro>,
|
||||
) -> Arc<DeclarativeMacroExpander> {
|
||||
let crate_data = &db.crate_graph()[def_crate];
|
||||
let is_2021 = crate_data.edition >= Edition::Edition2021;
|
||||
let (root, map) = crate::db::parse_with_map(db, id.file_id);
|
||||
let root = root.syntax_node();
|
||||
|
||||
let transparency = |node| {
|
||||
// ... would be nice to have the item tree here
|
||||
let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
|
||||
match &*attrs
|
||||
.iter()
|
||||
.find(|it| {
|
||||
it.path.as_ident().and_then(|it| it.as_str())
|
||||
== Some("rustc_macro_transparency")
|
||||
})?
|
||||
.token_tree_value()?
|
||||
.token_trees
|
||||
{
|
||||
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text {
|
||||
"transparent" => Some(Transparency::Transparent),
|
||||
"semitransparent" => Some(Transparency::SemiTransparent),
|
||||
"opaque" => Some(Transparency::Opaque),
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
};
|
||||
let toolchain = crate_data.toolchain.as_ref();
|
||||
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
||||
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
||||
&base_db::Version {
|
||||
pre: base_db::Prerelease::EMPTY,
|
||||
build: base_db::BuildMetadata::EMPTY,
|
||||
major: version.major,
|
||||
minor: version.minor,
|
||||
patch: version.patch,
|
||||
},
|
||||
)
|
||||
});
|
||||
|
||||
let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
|
||||
ast::Macro::MacroRules(macro_rules) => (
|
||||
match macro_rules.token_tree() {
|
||||
Some(arg) => {
|
||||
let tt = mbe::syntax_node_to_token_tree(
|
||||
arg.syntax(),
|
||||
map.as_ref(),
|
||||
map.span_for_range(
|
||||
macro_rules.macro_rules_token().unwrap().text_range(),
|
||||
),
|
||||
);
|
||||
|
||||
mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars)
|
||||
}
|
||||
None => mbe::DeclarativeMacro::from_err(
|
||||
mbe::ParseError::Expected("expected a token tree".into()),
|
||||
is_2021,
|
||||
),
|
||||
},
|
||||
transparency(¯o_rules).unwrap_or(Transparency::SemiTransparent),
|
||||
),
|
||||
ast::Macro::MacroDef(macro_def) => (
|
||||
match macro_def.body() {
|
||||
Some(arg) => {
|
||||
let tt = mbe::syntax_node_to_token_tree(
|
||||
arg.syntax(),
|
||||
map.as_ref(),
|
||||
map.span_for_range(macro_def.macro_token().unwrap().text_range()),
|
||||
);
|
||||
|
||||
mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars)
|
||||
}
|
||||
None => mbe::DeclarativeMacro::from_err(
|
||||
mbe::ParseError::Expected("expected a token tree".into()),
|
||||
is_2021,
|
||||
),
|
||||
},
|
||||
transparency(¯o_def).unwrap_or(Transparency::Opaque),
|
||||
),
|
||||
};
|
||||
Arc::new(DeclarativeMacroExpander { mac, transparency })
|
||||
}
|
||||
}
|
|
@ -245,3 +245,43 @@ pub fn marks_rev(
|
|||
})
|
||||
.map(|ctx| ctx.outer_mark(db))
|
||||
}
|
||||
|
||||
pub(crate) fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
|
||||
use crate::db::{InternMacroCallLookupQuery, InternSyntaxContextLookupQuery};
|
||||
use base_db::salsa::debug::DebugQueryTable;
|
||||
|
||||
let mut s = String::from("Expansions:");
|
||||
let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
|
||||
entries.sort_by_key(|e| e.key);
|
||||
for e in entries {
|
||||
let id = e.key;
|
||||
let expn_data = e.value.as_ref().unwrap();
|
||||
s.push_str(&format!(
|
||||
"\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
|
||||
id,
|
||||
expn_data.kind.file_id(),
|
||||
expn_data.call_site,
|
||||
SyntaxContextId::ROOT, // FIXME expn_data.def_site,
|
||||
expn_data.kind.descr(),
|
||||
));
|
||||
}
|
||||
|
||||
s.push_str("\n\nSyntaxContexts:\n");
|
||||
let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
|
||||
entries.sort_by_key(|e| e.key);
|
||||
for e in entries {
|
||||
struct SyntaxContextDebug<'a>(
|
||||
&'a dyn ExpandDatabase,
|
||||
SyntaxContextId,
|
||||
&'a SyntaxContextData,
|
||||
);
|
||||
|
||||
impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.2.fancy_debug(self.1, self.0, f)
|
||||
}
|
||||
}
|
||||
stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
|
||||
}
|
||||
s
|
||||
}
|
||||
|
|
|
@ -11,16 +11,18 @@ pub mod attrs;
|
|||
pub mod builtin_attr_macro;
|
||||
pub mod builtin_derive_macro;
|
||||
pub mod builtin_fn_macro;
|
||||
pub mod change;
|
||||
pub mod db;
|
||||
pub mod declarative;
|
||||
pub mod eager;
|
||||
pub mod files;
|
||||
pub mod change;
|
||||
pub mod hygiene;
|
||||
pub mod mod_path;
|
||||
pub mod name;
|
||||
pub mod proc_macro;
|
||||
pub mod quote;
|
||||
pub mod span_map;
|
||||
|
||||
mod fixup;
|
||||
|
||||
use attrs::collect_attrs;
|
||||
|
@ -167,7 +169,8 @@ pub struct MacroCallLoc {
|
|||
pub krate: CrateId,
|
||||
/// Some if this is a macro call for an eager macro. Note that this is `None`
|
||||
/// for the eager input macro file.
|
||||
// FIXME: This seems bad to save in an interned structure
|
||||
// FIXME: This is being interned, subtrees can vary quickly differ just slightly causing
|
||||
// leakage problems here
|
||||
eager: Option<Arc<EagerCallInfo>>,
|
||||
pub kind: MacroCallKind,
|
||||
pub call_site: Span,
|
||||
|
@ -220,7 +223,7 @@ pub enum MacroCallKind {
|
|||
},
|
||||
Attr {
|
||||
ast_id: AstId<ast::Item>,
|
||||
// FIXME: This is being interned, subtrees can very quickly differ just slightly causing
|
||||
// FIXME: This is being interned, subtrees can vary quickly differ just slightly causing
|
||||
// leakage problems here
|
||||
attr_args: Option<Arc<tt::Subtree>>,
|
||||
/// Syntactical index of the invoking `#[attribute]`.
|
||||
|
|
|
@ -10,6 +10,7 @@ use crate::{
|
|||
hygiene::{marks_rev, SyntaxContextExt, Transparency},
|
||||
name::{known, AsName, Name},
|
||||
span_map::SpanMapRef,
|
||||
tt,
|
||||
};
|
||||
use base_db::CrateId;
|
||||
use smallvec::SmallVec;
|
||||
|
@ -39,7 +40,7 @@ pub enum PathKind {
|
|||
Crate,
|
||||
/// Absolute path (::foo)
|
||||
Abs,
|
||||
// FIXME: Remove this
|
||||
// FIXME: Can we remove this somehow?
|
||||
/// `$crate` from macro expansion
|
||||
DollarCrate(CrateId),
|
||||
}
|
||||
|
@ -50,11 +51,16 @@ impl ModPath {
|
|||
path: ast::Path,
|
||||
span_map: SpanMapRef<'_>,
|
||||
) -> Option<ModPath> {
|
||||
convert_path(db, None, path, span_map)
|
||||
convert_path(db, path, span_map)
|
||||
}
|
||||
|
||||
pub fn from_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModPath> {
|
||||
convert_path_tt(db, tt)
|
||||
}
|
||||
|
||||
pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath {
|
||||
let segments = segments.into_iter().collect();
|
||||
let mut segments: SmallVec<_> = segments.into_iter().collect();
|
||||
segments.shrink_to_fit();
|
||||
ModPath { kind, segments }
|
||||
}
|
||||
|
||||
|
@ -193,22 +199,15 @@ fn display_fmt_path(
|
|||
|
||||
fn convert_path(
|
||||
db: &dyn ExpandDatabase,
|
||||
prefix: Option<ModPath>,
|
||||
path: ast::Path,
|
||||
span_map: SpanMapRef<'_>,
|
||||
) -> Option<ModPath> {
|
||||
let prefix = match path.qualifier() {
|
||||
Some(qual) => Some(convert_path(db, prefix, qual, span_map)?),
|
||||
None => prefix,
|
||||
};
|
||||
let mut segments = path.segments();
|
||||
|
||||
let segment = path.segment()?;
|
||||
let segment = &segments.next()?;
|
||||
let mut mod_path = match segment.kind()? {
|
||||
ast::PathSegmentKind::Name(name_ref) => {
|
||||
if name_ref.text() == "$crate" {
|
||||
if prefix.is_some() {
|
||||
return None;
|
||||
}
|
||||
ModPath::from_kind(
|
||||
resolve_crate_root(
|
||||
db,
|
||||
|
@ -218,41 +217,36 @@ fn convert_path(
|
|||
.unwrap_or(PathKind::Crate),
|
||||
)
|
||||
} else {
|
||||
let mut res = prefix.unwrap_or_else(|| {
|
||||
ModPath::from_kind(
|
||||
segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
|
||||
)
|
||||
});
|
||||
let mut res = ModPath::from_kind(
|
||||
segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
|
||||
);
|
||||
res.segments.push(name_ref.as_name());
|
||||
res
|
||||
}
|
||||
}
|
||||
ast::PathSegmentKind::SelfTypeKw => {
|
||||
if prefix.is_some() {
|
||||
return None;
|
||||
}
|
||||
ModPath::from_segments(PathKind::Plain, Some(known::SELF_TYPE))
|
||||
}
|
||||
ast::PathSegmentKind::CrateKw => {
|
||||
if prefix.is_some() {
|
||||
return None;
|
||||
}
|
||||
ModPath::from_segments(PathKind::Crate, iter::empty())
|
||||
}
|
||||
ast::PathSegmentKind::SelfKw => {
|
||||
if prefix.is_some() {
|
||||
return None;
|
||||
}
|
||||
ModPath::from_segments(PathKind::Super(0), iter::empty())
|
||||
}
|
||||
ast::PathSegmentKind::CrateKw => ModPath::from_segments(PathKind::Crate, iter::empty()),
|
||||
ast::PathSegmentKind::SelfKw => ModPath::from_segments(PathKind::Super(0), iter::empty()),
|
||||
ast::PathSegmentKind::SuperKw => {
|
||||
let nested_super_count = match prefix.map(|p| p.kind) {
|
||||
Some(PathKind::Super(n)) => n,
|
||||
Some(_) => return None,
|
||||
None => 0,
|
||||
};
|
||||
let mut deg = 1;
|
||||
let mut next_segment = None;
|
||||
while let Some(segment) = segments.next() {
|
||||
match segment.kind()? {
|
||||
ast::PathSegmentKind::SuperKw => deg += 1,
|
||||
ast::PathSegmentKind::Name(name) => {
|
||||
next_segment = Some(name.as_name());
|
||||
break;
|
||||
}
|
||||
ast::PathSegmentKind::Type { .. }
|
||||
| ast::PathSegmentKind::SelfTypeKw
|
||||
| ast::PathSegmentKind::SelfKw
|
||||
| ast::PathSegmentKind::CrateKw => return None,
|
||||
}
|
||||
}
|
||||
|
||||
ModPath::from_segments(PathKind::Super(nested_super_count + 1), iter::empty())
|
||||
ModPath::from_segments(PathKind::Super(deg), next_segment)
|
||||
}
|
||||
ast::PathSegmentKind::Type { .. } => {
|
||||
// not allowed in imports
|
||||
|
@ -260,6 +254,14 @@ fn convert_path(
|
|||
}
|
||||
};
|
||||
|
||||
for segment in segments {
|
||||
let name = match segment.kind()? {
|
||||
ast::PathSegmentKind::Name(name) => name.as_name(),
|
||||
_ => return None,
|
||||
};
|
||||
mod_path.segments.push(name);
|
||||
}
|
||||
|
||||
// handle local_inner_macros :
|
||||
// Basically, even in rustc it is quite hacky:
|
||||
// https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456
|
||||
|
@ -281,6 +283,46 @@ fn convert_path(
|
|||
Some(mod_path)
|
||||
}
|
||||
|
||||
fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModPath> {
|
||||
let mut leafs = tt.iter().filter_map(|tt| match tt {
|
||||
tt::TokenTree::Leaf(leaf) => Some(leaf),
|
||||
tt::TokenTree::Subtree(_) => None,
|
||||
});
|
||||
let mut segments = smallvec::smallvec![];
|
||||
let kind = match leafs.next()? {
|
||||
tt::Leaf::Punct(tt::Punct { char: ':', .. }) => match leafs.next()? {
|
||||
tt::Leaf::Punct(tt::Punct { char: ':', .. }) => PathKind::Abs,
|
||||
_ => return None,
|
||||
},
|
||||
tt::Leaf::Ident(tt::Ident { text, span }) if text == "$crate" => {
|
||||
resolve_crate_root(db, span.ctx).map(PathKind::DollarCrate).unwrap_or(PathKind::Crate)
|
||||
}
|
||||
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "self" => PathKind::Super(0),
|
||||
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "super" => {
|
||||
let mut deg = 1;
|
||||
while let Some(tt::Leaf::Ident(tt::Ident { text, .. })) = leafs.next() {
|
||||
if text != "super" {
|
||||
segments.push(Name::new_text_dont_use(text.clone()));
|
||||
break;
|
||||
}
|
||||
deg += 1;
|
||||
}
|
||||
PathKind::Super(deg)
|
||||
}
|
||||
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "crate" => PathKind::Crate,
|
||||
tt::Leaf::Ident(ident) => {
|
||||
segments.push(Name::new_text_dont_use(ident.text.clone()));
|
||||
PathKind::Plain
|
||||
}
|
||||
_ => return None,
|
||||
};
|
||||
segments.extend(leafs.filter_map(|leaf| match leaf {
|
||||
::tt::Leaf::Ident(ident) => Some(Name::new_text_dont_use(ident.text.clone())),
|
||||
_ => None,
|
||||
}));
|
||||
Some(ModPath { kind, segments })
|
||||
}
|
||||
|
||||
pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option<CrateId> {
|
||||
// When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
|
||||
// we don't want to pretend that the `macro_rules!` definition is in the `macro`
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
//! Span maps for real files and macro expansions.
|
||||
use span::Span;
|
||||
use syntax::TextRange;
|
||||
use span::{FileId, HirFileId, HirFileIdRepr, Span};
|
||||
use syntax::{AstNode, TextRange};
|
||||
use triomphe::Arc;
|
||||
|
||||
pub use span::RealSpanMap;
|
||||
|
||||
use crate::db::ExpandDatabase;
|
||||
|
||||
pub type ExpansionSpanMap = span::SpanMap<Span>;
|
||||
|
||||
/// Spanmap for a macro file or a real file
|
||||
|
@ -34,7 +36,6 @@ impl mbe::SpanMapper<Span> for SpanMapRef<'_> {
|
|||
self.span_for_range(range)
|
||||
}
|
||||
}
|
||||
|
||||
impl SpanMap {
|
||||
pub fn span_for_range(&self, range: TextRange) -> Span {
|
||||
match self {
|
||||
|
@ -53,6 +54,16 @@ impl SpanMap {
|
|||
Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
|
||||
match file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
|
||||
HirFileIdRepr::MacroFile(m) => {
|
||||
SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SpanMapRef<'_> {
|
||||
|
@ -63,3 +74,23 @@ impl SpanMapRef<'_> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> {
|
||||
use syntax::ast::HasModuleItem;
|
||||
let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
|
||||
let ast_id_map = db.ast_id_map(file_id.into());
|
||||
let tree = db.parse(file_id).tree();
|
||||
// FIXME: Descend into modules and other item containing items that are not annotated with attributes
|
||||
// and allocate pairs for those as well. This gives us finer grained span anchors resulting in
|
||||
// better incrementality
|
||||
pairs.extend(
|
||||
tree.items()
|
||||
.map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())),
|
||||
);
|
||||
|
||||
Arc::new(RealSpanMap::from_file(
|
||||
file_id,
|
||||
pairs.into_boxed_slice(),
|
||||
tree.syntax().text_range().end(),
|
||||
))
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ indexmap.workspace = true
|
|||
|
||||
ra-ap-rustc_abi.workspace = true
|
||||
ra-ap-rustc_index.workspace = true
|
||||
ra-ap-rustc_pattern_analysis.workspace = true
|
||||
|
||||
|
||||
# local deps
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
//! Type inference-based diagnostics.
|
||||
mod decl_check;
|
||||
mod expr;
|
||||
mod match_check;
|
||||
mod unsafe_check;
|
||||
mod decl_check;
|
||||
|
||||
pub use crate::diagnostics::{
|
||||
decl_check::{incorrect_case, CaseType, IncorrectCase},
|
||||
|
|
|
@ -11,6 +11,7 @@ use hir_def::{ItemContainerId, Lookup};
|
|||
use hir_expand::name;
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashSet;
|
||||
use rustc_pattern_analysis::usefulness::{compute_match_usefulness, ValidityConstraint};
|
||||
use triomphe::Arc;
|
||||
use typed_arena::Arena;
|
||||
|
||||
|
@ -18,8 +19,7 @@ use crate::{
|
|||
db::HirDatabase,
|
||||
diagnostics::match_check::{
|
||||
self,
|
||||
deconstruct_pat::DeconstructedPat,
|
||||
usefulness::{compute_match_usefulness, MatchCheckCtx},
|
||||
pat_analysis::{self, DeconstructedPat, MatchCheckCtx, WitnessPat},
|
||||
},
|
||||
display::HirDisplay,
|
||||
InferenceResult, Ty, TyExt,
|
||||
|
@ -152,7 +152,14 @@ impl ExprValidator {
|
|||
}
|
||||
|
||||
let pattern_arena = Arena::new();
|
||||
let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db, &pattern_arena);
|
||||
let ty_arena = Arena::new();
|
||||
let cx = MatchCheckCtx::new(
|
||||
self.owner.module(db.upcast()),
|
||||
self.owner,
|
||||
db,
|
||||
&pattern_arena,
|
||||
&ty_arena,
|
||||
);
|
||||
|
||||
let mut m_arms = Vec::with_capacity(arms.len());
|
||||
let mut has_lowering_errors = false;
|
||||
|
@ -178,9 +185,10 @@ impl ExprValidator {
|
|||
// If we had a NotUsefulMatchArm diagnostic, we could
|
||||
// check the usefulness of each pattern as we added it
|
||||
// to the matrix here.
|
||||
let m_arm = match_check::MatchArm {
|
||||
let m_arm = pat_analysis::MatchArm {
|
||||
pat: self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors),
|
||||
has_guard: arm.guard.is_some(),
|
||||
arm_data: (),
|
||||
};
|
||||
m_arms.push(m_arm);
|
||||
if !has_lowering_errors {
|
||||
|
@ -197,7 +205,15 @@ impl ExprValidator {
|
|||
return;
|
||||
}
|
||||
|
||||
let report = compute_match_usefulness(&cx, &m_arms, scrut_ty);
|
||||
let report = match compute_match_usefulness(
|
||||
rustc_pattern_analysis::MatchCtxt { tycx: &cx },
|
||||
m_arms.as_slice(),
|
||||
scrut_ty.clone(),
|
||||
ValidityConstraint::ValidOnly,
|
||||
) {
|
||||
Ok(report) => report,
|
||||
Err(void) => match void {},
|
||||
};
|
||||
|
||||
// FIXME Report unreachable arms
|
||||
// https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/check_match.rs#L200
|
||||
|
@ -213,7 +229,7 @@ impl ExprValidator {
|
|||
|
||||
fn lower_pattern<'p>(
|
||||
&self,
|
||||
cx: &MatchCheckCtx<'_, 'p>,
|
||||
cx: &MatchCheckCtx<'p>,
|
||||
pat: PatId,
|
||||
db: &dyn HirDatabase,
|
||||
body: &Body,
|
||||
|
@ -221,7 +237,7 @@ impl ExprValidator {
|
|||
) -> &'p DeconstructedPat<'p> {
|
||||
let mut patcx = match_check::PatCtxt::new(db, &self.infer, body);
|
||||
let pattern = patcx.lower_pattern(pat);
|
||||
let pattern = cx.pattern_arena.alloc(DeconstructedPat::from_pat(cx, &pattern));
|
||||
let pattern = cx.pattern_arena.alloc(cx.lower_pat(&pattern));
|
||||
if !patcx.errors.is_empty() {
|
||||
*have_errors = true;
|
||||
}
|
||||
|
@ -364,16 +380,16 @@ fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResul
|
|||
}
|
||||
|
||||
fn missing_match_arms<'p>(
|
||||
cx: &MatchCheckCtx<'_, 'p>,
|
||||
cx: &MatchCheckCtx<'p>,
|
||||
scrut_ty: &Ty,
|
||||
witnesses: Vec<DeconstructedPat<'p>>,
|
||||
witnesses: Vec<WitnessPat<'p>>,
|
||||
arms: &[MatchArm],
|
||||
) -> String {
|
||||
struct DisplayWitness<'a, 'p>(&'a DeconstructedPat<'p>, &'a MatchCheckCtx<'a, 'p>);
|
||||
struct DisplayWitness<'a, 'p>(&'a WitnessPat<'p>, &'a MatchCheckCtx<'p>);
|
||||
impl fmt::Display for DisplayWitness<'_, '_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let DisplayWitness(witness, cx) = *self;
|
||||
let pat = witness.to_pat(cx);
|
||||
let pat = cx.hoist_witness_pat(witness);
|
||||
write!(f, "{}", pat.display(cx.db))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,8 +7,7 @@
|
|||
|
||||
mod pat_util;
|
||||
|
||||
pub(crate) mod deconstruct_pat;
|
||||
pub(crate) mod usefulness;
|
||||
pub(crate) mod pat_analysis;
|
||||
|
||||
use chalk_ir::Mutability;
|
||||
use hir_def::{
|
||||
|
@ -27,8 +26,6 @@ use crate::{
|
|||
|
||||
use self::pat_util::EnumerateAndAdjustIterator;
|
||||
|
||||
pub(crate) use self::usefulness::MatchArm;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) enum PatternError {
|
||||
Unimplemented,
|
||||
|
@ -413,98 +410,3 @@ where
|
|||
(self.0)(f)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait PatternFoldable: Sized {
|
||||
fn fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
|
||||
self.super_fold_with(folder)
|
||||
}
|
||||
|
||||
fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self;
|
||||
}
|
||||
|
||||
pub(crate) trait PatternFolder: Sized {
|
||||
fn fold_pattern(&mut self, pattern: &Pat) -> Pat {
|
||||
pattern.super_fold_with(self)
|
||||
}
|
||||
|
||||
fn fold_pattern_kind(&mut self, kind: &PatKind) -> PatKind {
|
||||
kind.super_fold_with(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: PatternFoldable> PatternFoldable for Box<T> {
|
||||
fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
|
||||
let content: T = (**self).fold_with(folder);
|
||||
Box::new(content)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: PatternFoldable> PatternFoldable for Vec<T> {
|
||||
fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
|
||||
self.iter().map(|t| t.fold_with(folder)).collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: PatternFoldable> PatternFoldable for Option<T> {
|
||||
fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
|
||||
self.as_ref().map(|t| t.fold_with(folder))
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! clone_impls {
|
||||
($($ty:ty),+) => {
|
||||
$(
|
||||
impl PatternFoldable for $ty {
|
||||
fn super_fold_with<F: PatternFolder>(&self, _: &mut F) -> Self {
|
||||
Clone::clone(self)
|
||||
}
|
||||
}
|
||||
)+
|
||||
}
|
||||
}
|
||||
|
||||
clone_impls! { LocalFieldId, Ty, Substitution, EnumVariantId }
|
||||
|
||||
impl PatternFoldable for FieldPat {
|
||||
fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
|
||||
FieldPat { field: self.field.fold_with(folder), pattern: self.pattern.fold_with(folder) }
|
||||
}
|
||||
}
|
||||
|
||||
impl PatternFoldable for Pat {
|
||||
fn fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
|
||||
folder.fold_pattern(self)
|
||||
}
|
||||
|
||||
fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
|
||||
Pat { ty: self.ty.fold_with(folder), kind: self.kind.fold_with(folder) }
|
||||
}
|
||||
}
|
||||
|
||||
impl PatternFoldable for PatKind {
|
||||
fn fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
|
||||
folder.fold_pattern_kind(self)
|
||||
}
|
||||
|
||||
fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
|
||||
match self {
|
||||
PatKind::Wild => PatKind::Wild,
|
||||
PatKind::Binding { name, subpattern } => {
|
||||
PatKind::Binding { name: name.clone(), subpattern: subpattern.fold_with(folder) }
|
||||
}
|
||||
PatKind::Variant { substs, enum_variant, subpatterns } => PatKind::Variant {
|
||||
substs: substs.fold_with(folder),
|
||||
enum_variant: enum_variant.fold_with(folder),
|
||||
subpatterns: subpatterns.fold_with(folder),
|
||||
},
|
||||
PatKind::Leaf { subpatterns } => {
|
||||
PatKind::Leaf { subpatterns: subpatterns.fold_with(folder) }
|
||||
}
|
||||
PatKind::Deref { subpattern } => {
|
||||
PatKind::Deref { subpattern: subpattern.fold_with(folder) }
|
||||
}
|
||||
&PatKind::LiteralBool { value } => PatKind::LiteralBool { value },
|
||||
PatKind::Or { pats } => PatKind::Or { pats: pats.fold_with(folder) },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
475
crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
Normal file
475
crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
Normal file
|
@ -0,0 +1,475 @@
|
|||
//! Interface with `rustc_pattern_analysis`.
|
||||
|
||||
use std::fmt;
|
||||
|
||||
use hir_def::{DefWithBodyId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
|
||||
use rustc_hash::FxHashMap;
|
||||
use rustc_pattern_analysis::{
|
||||
constructor::{Constructor, ConstructorSet, VariantVisibility},
|
||||
index::IdxContainer,
|
||||
Captures, TypeCx,
|
||||
};
|
||||
use smallvec::SmallVec;
|
||||
use stdx::never;
|
||||
use typed_arena::Arena;
|
||||
|
||||
use crate::{
|
||||
db::HirDatabase,
|
||||
infer::normalize,
|
||||
inhabitedness::{is_enum_variant_uninhabited_from, is_ty_uninhabited_from},
|
||||
AdtId, Interner, Scalar, Ty, TyExt, TyKind,
|
||||
};
|
||||
|
||||
use super::{is_box, FieldPat, Pat, PatKind};
|
||||
|
||||
use Constructor::*;
|
||||
|
||||
// Re-export r-a-specific versions of all these types.
|
||||
pub(crate) type DeconstructedPat<'p> =
|
||||
rustc_pattern_analysis::pat::DeconstructedPat<'p, MatchCheckCtx<'p>>;
|
||||
pub(crate) type MatchArm<'p> = rustc_pattern_analysis::MatchArm<'p, MatchCheckCtx<'p>>;
|
||||
pub(crate) type WitnessPat<'p> = rustc_pattern_analysis::pat::WitnessPat<MatchCheckCtx<'p>>;
|
||||
|
||||
/// [Constructor] uses this in unimplemented variants.
|
||||
/// It allows porting match expressions from upstream algorithm without losing semantics.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) enum Void {}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct MatchCheckCtx<'p> {
|
||||
module: ModuleId,
|
||||
body: DefWithBodyId,
|
||||
pub(crate) db: &'p dyn HirDatabase,
|
||||
pub(crate) pattern_arena: &'p Arena<DeconstructedPat<'p>>,
|
||||
ty_arena: &'p Arena<Ty>,
|
||||
exhaustive_patterns: bool,
|
||||
}
|
||||
|
||||
impl<'p> MatchCheckCtx<'p> {
|
||||
pub(crate) fn new(
|
||||
module: ModuleId,
|
||||
body: DefWithBodyId,
|
||||
db: &'p dyn HirDatabase,
|
||||
pattern_arena: &'p Arena<DeconstructedPat<'p>>,
|
||||
ty_arena: &'p Arena<Ty>,
|
||||
) -> Self {
|
||||
let def_map = db.crate_def_map(module.krate());
|
||||
let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns");
|
||||
Self { module, body, db, pattern_arena, exhaustive_patterns, ty_arena }
|
||||
}
|
||||
|
||||
fn is_uninhabited(&self, ty: &Ty) -> bool {
|
||||
is_ty_uninhabited_from(ty, self.module, self.db)
|
||||
}
|
||||
|
||||
/// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`.
|
||||
fn is_foreign_non_exhaustive_enum(&self, ty: &Ty) -> bool {
|
||||
match ty.as_adt() {
|
||||
Some((adt @ hir_def::AdtId::EnumId(_), _)) => {
|
||||
let has_non_exhaustive_attr =
|
||||
self.db.attrs(adt.into()).by_key("non_exhaustive").exists();
|
||||
let is_local = adt.module(self.db.upcast()).krate() == self.module.krate();
|
||||
has_non_exhaustive_attr && !is_local
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn variant_id_for_adt(&self, ctor: &Constructor<Self>, adt: hir_def::AdtId) -> VariantId {
|
||||
match ctor {
|
||||
&Variant(id) => id.into(),
|
||||
Struct | UnionField => {
|
||||
assert!(!matches!(adt, hir_def::AdtId::EnumId(_)));
|
||||
match adt {
|
||||
hir_def::AdtId::EnumId(_) => unreachable!(),
|
||||
hir_def::AdtId::StructId(id) => id.into(),
|
||||
hir_def::AdtId::UnionId(id) => id.into(),
|
||||
}
|
||||
}
|
||||
_ => panic!("bad constructor {self:?} for adt {adt:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
// In the cases of either a `#[non_exhaustive]` field list or a non-public field, we hide
|
||||
// uninhabited fields in order not to reveal the uninhabitedness of the whole variant.
|
||||
// This lists the fields we keep along with their types.
|
||||
fn list_variant_nonhidden_fields<'a>(
|
||||
&'a self,
|
||||
ty: &'a Ty,
|
||||
variant: VariantId,
|
||||
) -> impl Iterator<Item = (LocalFieldId, Ty)> + Captures<'a> + Captures<'p> {
|
||||
let cx = self;
|
||||
let (adt, substs) = ty.as_adt().unwrap();
|
||||
|
||||
let adt_is_local = variant.module(cx.db.upcast()).krate() == cx.module.krate();
|
||||
|
||||
// Whether we must not match the fields of this variant exhaustively.
|
||||
let is_non_exhaustive =
|
||||
cx.db.attrs(variant.into()).by_key("non_exhaustive").exists() && !adt_is_local;
|
||||
|
||||
let visibility = cx.db.field_visibilities(variant);
|
||||
let field_ty = cx.db.field_types(variant);
|
||||
let fields_len = variant.variant_data(cx.db.upcast()).fields().len() as u32;
|
||||
|
||||
(0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).filter_map(move |fid| {
|
||||
let ty = field_ty[fid].clone().substitute(Interner, substs);
|
||||
let ty = normalize(cx.db, cx.db.trait_environment_for_body(cx.body), ty);
|
||||
let is_visible = matches!(adt, hir_def::AdtId::EnumId(..))
|
||||
|| visibility[fid].is_visible_from(cx.db.upcast(), cx.module);
|
||||
let is_uninhabited = cx.is_uninhabited(&ty);
|
||||
|
||||
if is_uninhabited && (!is_visible || is_non_exhaustive) {
|
||||
None
|
||||
} else {
|
||||
Some((fid, ty))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn lower_pat(&self, pat: &Pat) -> DeconstructedPat<'p> {
|
||||
let singleton = |pat| std::slice::from_ref(self.pattern_arena.alloc(pat));
|
||||
let ctor;
|
||||
let fields: &[_];
|
||||
|
||||
match pat.kind.as_ref() {
|
||||
PatKind::Binding { subpattern: Some(subpat), .. } => return self.lower_pat(subpat),
|
||||
PatKind::Binding { subpattern: None, .. } | PatKind::Wild => {
|
||||
ctor = Wildcard;
|
||||
fields = &[];
|
||||
}
|
||||
PatKind::Deref { subpattern } => {
|
||||
ctor = match pat.ty.kind(Interner) {
|
||||
// This is a box pattern.
|
||||
TyKind::Adt(adt, _) if is_box(self.db, adt.0) => Struct,
|
||||
TyKind::Ref(..) => Ref,
|
||||
_ => {
|
||||
never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty);
|
||||
Wildcard
|
||||
}
|
||||
};
|
||||
fields = singleton(self.lower_pat(subpattern));
|
||||
}
|
||||
PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => {
|
||||
match pat.ty.kind(Interner) {
|
||||
TyKind::Tuple(_, substs) => {
|
||||
ctor = Struct;
|
||||
let mut wilds: SmallVec<[_; 2]> = substs
|
||||
.iter(Interner)
|
||||
.map(|arg| arg.assert_ty_ref(Interner).clone())
|
||||
.map(DeconstructedPat::wildcard)
|
||||
.collect();
|
||||
for pat in subpatterns {
|
||||
let idx: u32 = pat.field.into_raw().into();
|
||||
wilds[idx as usize] = self.lower_pat(&pat.pattern);
|
||||
}
|
||||
fields = self.pattern_arena.alloc_extend(wilds)
|
||||
}
|
||||
TyKind::Adt(adt, substs) if is_box(self.db, adt.0) => {
|
||||
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||
// patterns. If we're here we can assume this is a box pattern.
|
||||
// FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
|
||||
// _)` or a box pattern. As a hack to avoid an ICE with the former, we
|
||||
// ignore other fields than the first one. This will trigger an error later
|
||||
// anyway.
|
||||
// See https://github.com/rust-lang/rust/issues/82772 ,
|
||||
// explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977
|
||||
// The problem is that we can't know from the type whether we'll match
|
||||
// normally or through box-patterns. We'll have to figure out a proper
|
||||
// solution when we introduce generalized deref patterns. Also need to
|
||||
// prevent mixing of those two options.
|
||||
let pat =
|
||||
subpatterns.iter().find(|pat| pat.field.into_raw() == 0u32.into());
|
||||
let field = if let Some(pat) = pat {
|
||||
self.lower_pat(&pat.pattern)
|
||||
} else {
|
||||
let ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
|
||||
DeconstructedPat::wildcard(ty)
|
||||
};
|
||||
ctor = Struct;
|
||||
fields = singleton(field);
|
||||
}
|
||||
&TyKind::Adt(adt, _) => {
|
||||
ctor = match pat.kind.as_ref() {
|
||||
PatKind::Leaf { .. } if matches!(adt.0, hir_def::AdtId::UnionId(_)) => {
|
||||
UnionField
|
||||
}
|
||||
PatKind::Leaf { .. } => Struct,
|
||||
PatKind::Variant { enum_variant, .. } => Variant(*enum_variant),
|
||||
_ => {
|
||||
never!();
|
||||
Wildcard
|
||||
}
|
||||
};
|
||||
let variant = self.variant_id_for_adt(&ctor, adt.0);
|
||||
let fields_len = variant.variant_data(self.db.upcast()).fields().len();
|
||||
// For each field in the variant, we store the relevant index into `self.fields` if any.
|
||||
let mut field_id_to_id: Vec<Option<usize>> = vec![None; fields_len];
|
||||
let tys = self
|
||||
.list_variant_nonhidden_fields(&pat.ty, variant)
|
||||
.enumerate()
|
||||
.map(|(i, (fid, ty))| {
|
||||
let field_idx: u32 = fid.into_raw().into();
|
||||
field_id_to_id[field_idx as usize] = Some(i);
|
||||
ty
|
||||
});
|
||||
let mut wilds: SmallVec<[_; 2]> =
|
||||
tys.map(DeconstructedPat::wildcard).collect();
|
||||
for pat in subpatterns {
|
||||
let field_idx: u32 = pat.field.into_raw().into();
|
||||
if let Some(i) = field_id_to_id[field_idx as usize] {
|
||||
wilds[i] = self.lower_pat(&pat.pattern);
|
||||
}
|
||||
}
|
||||
fields = self.pattern_arena.alloc_extend(wilds);
|
||||
}
|
||||
_ => {
|
||||
never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty);
|
||||
ctor = Wildcard;
|
||||
fields = &[];
|
||||
}
|
||||
}
|
||||
}
|
||||
&PatKind::LiteralBool { value } => {
|
||||
ctor = Bool(value);
|
||||
fields = &[];
|
||||
}
|
||||
PatKind::Or { pats } => {
|
||||
ctor = Or;
|
||||
// Collect here because `Arena::alloc_extend` panics on reentrancy.
|
||||
let subpats: SmallVec<[_; 2]> =
|
||||
pats.into_iter().map(|pat| self.lower_pat(pat)).collect();
|
||||
fields = self.pattern_arena.alloc_extend(subpats);
|
||||
}
|
||||
}
|
||||
DeconstructedPat::new(ctor, fields, pat.ty.clone(), ())
|
||||
}
|
||||
|
||||
pub(crate) fn hoist_witness_pat(&self, pat: &WitnessPat<'p>) -> Pat {
|
||||
let mut subpatterns = pat.iter_fields().map(|p| self.hoist_witness_pat(p));
|
||||
let kind = match pat.ctor() {
|
||||
&Bool(value) => PatKind::LiteralBool { value },
|
||||
IntRange(_) => unimplemented!(),
|
||||
Struct | Variant(_) | UnionField => match pat.ty().kind(Interner) {
|
||||
TyKind::Tuple(..) => PatKind::Leaf {
|
||||
subpatterns: subpatterns
|
||||
.zip(0u32..)
|
||||
.map(|(p, i)| FieldPat {
|
||||
field: LocalFieldId::from_raw(i.into()),
|
||||
pattern: p,
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
TyKind::Adt(adt, _) if is_box(self.db, adt.0) => {
|
||||
// Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
|
||||
// of `std`). So this branch is only reachable when the feature is enabled and
|
||||
// the pattern is a box pattern.
|
||||
PatKind::Deref { subpattern: subpatterns.next().unwrap() }
|
||||
}
|
||||
TyKind::Adt(adt, substs) => {
|
||||
let variant = self.variant_id_for_adt(pat.ctor(), adt.0);
|
||||
let subpatterns = self
|
||||
.list_variant_nonhidden_fields(pat.ty(), variant)
|
||||
.zip(subpatterns)
|
||||
.map(|((field, _ty), pattern)| FieldPat { field, pattern })
|
||||
.collect();
|
||||
|
||||
if let VariantId::EnumVariantId(enum_variant) = variant {
|
||||
PatKind::Variant { substs: substs.clone(), enum_variant, subpatterns }
|
||||
} else {
|
||||
PatKind::Leaf { subpatterns }
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
never!("unexpected ctor for type {:?} {:?}", pat.ctor(), pat.ty());
|
||||
PatKind::Wild
|
||||
}
|
||||
},
|
||||
// Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
|
||||
// be careful to reconstruct the correct constant pattern here. However a string
|
||||
// literal pattern will never be reported as a non-exhaustiveness witness, so we
|
||||
// ignore this issue.
|
||||
Ref => PatKind::Deref { subpattern: subpatterns.next().unwrap() },
|
||||
Slice(_) => unimplemented!(),
|
||||
&Str(void) => match void {},
|
||||
Wildcard | NonExhaustive | Hidden => PatKind::Wild,
|
||||
Missing | F32Range(..) | F64Range(..) | Opaque(..) | Or => {
|
||||
never!("can't convert to pattern: {:?}", pat.ctor());
|
||||
PatKind::Wild
|
||||
}
|
||||
};
|
||||
Pat { ty: pat.ty().clone(), kind: Box::new(kind) }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'p> TypeCx for MatchCheckCtx<'p> {
|
||||
type Error = Void;
|
||||
type Ty = Ty;
|
||||
type VariantIdx = EnumVariantId;
|
||||
type StrLit = Void;
|
||||
type ArmData = ();
|
||||
type PatData = ();
|
||||
|
||||
fn is_exhaustive_patterns_feature_on(&self) -> bool {
|
||||
self.exhaustive_patterns
|
||||
}
|
||||
|
||||
fn ctor_arity(
|
||||
&self,
|
||||
ctor: &rustc_pattern_analysis::constructor::Constructor<Self>,
|
||||
ty: &Self::Ty,
|
||||
) -> usize {
|
||||
match ctor {
|
||||
Struct | Variant(_) | UnionField => match *ty.kind(Interner) {
|
||||
TyKind::Tuple(arity, ..) => arity,
|
||||
TyKind::Adt(AdtId(adt), ..) => {
|
||||
if is_box(self.db, adt) {
|
||||
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||
// patterns. If we're here we can assume this is a box pattern.
|
||||
1
|
||||
} else {
|
||||
let variant = self.variant_id_for_adt(ctor, adt);
|
||||
self.list_variant_nonhidden_fields(ty, variant).count()
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
never!("Unexpected type for `Single` constructor: {:?}", ty);
|
||||
0
|
||||
}
|
||||
},
|
||||
Ref => 1,
|
||||
Slice(..) => unimplemented!(),
|
||||
Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) | Opaque(..)
|
||||
| NonExhaustive | Hidden | Missing | Wildcard => 0,
|
||||
Or => {
|
||||
never!("The `Or` constructor doesn't have a fixed arity");
|
||||
0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn ctor_sub_tys(
|
||||
&self,
|
||||
ctor: &rustc_pattern_analysis::constructor::Constructor<Self>,
|
||||
ty: &Self::Ty,
|
||||
) -> &[Self::Ty] {
|
||||
use std::iter::once;
|
||||
fn alloc<'a>(cx: &'a MatchCheckCtx<'_>, iter: impl Iterator<Item = Ty>) -> &'a [Ty] {
|
||||
cx.ty_arena.alloc_extend(iter)
|
||||
}
|
||||
match ctor {
|
||||
Struct | Variant(_) | UnionField => match ty.kind(Interner) {
|
||||
TyKind::Tuple(_, substs) => {
|
||||
let tys = substs.iter(Interner).map(|ty| ty.assert_ty_ref(Interner));
|
||||
alloc(self, tys.cloned())
|
||||
}
|
||||
TyKind::Ref(.., rty) => alloc(self, once(rty.clone())),
|
||||
&TyKind::Adt(AdtId(adt), ref substs) => {
|
||||
if is_box(self.db, adt) {
|
||||
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||
// patterns. If we're here we can assume this is a box pattern.
|
||||
let subst_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
|
||||
alloc(self, once(subst_ty))
|
||||
} else {
|
||||
let variant = self.variant_id_for_adt(ctor, adt);
|
||||
let tys = self.list_variant_nonhidden_fields(ty, variant).map(|(_, ty)| ty);
|
||||
alloc(self, tys)
|
||||
}
|
||||
}
|
||||
ty_kind => {
|
||||
never!("Unexpected type for `{:?}` constructor: {:?}", ctor, ty_kind);
|
||||
alloc(self, once(ty.clone()))
|
||||
}
|
||||
},
|
||||
Ref => match ty.kind(Interner) {
|
||||
TyKind::Ref(.., rty) => alloc(self, once(rty.clone())),
|
||||
ty_kind => {
|
||||
never!("Unexpected type for `{:?}` constructor: {:?}", ctor, ty_kind);
|
||||
alloc(self, once(ty.clone()))
|
||||
}
|
||||
},
|
||||
Slice(_) => unreachable!("Found a `Slice` constructor in match checking"),
|
||||
Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) | Opaque(..)
|
||||
| NonExhaustive | Hidden | Missing | Wildcard => &[],
|
||||
Or => {
|
||||
never!("called `Fields::wildcards` on an `Or` ctor");
|
||||
&[]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn ctors_for_ty(
|
||||
&self,
|
||||
ty: &Self::Ty,
|
||||
) -> Result<rustc_pattern_analysis::constructor::ConstructorSet<Self>, Self::Error> {
|
||||
let cx = self;
|
||||
|
||||
// Unhandled types are treated as non-exhaustive. Being explicit here instead of falling
|
||||
// to catchall arm to ease further implementation.
|
||||
let unhandled = || ConstructorSet::Unlistable;
|
||||
|
||||
// This determines the set of all possible constructors for the type `ty`. For numbers,
|
||||
// arrays and slices we use ranges and variable-length slices when appropriate.
|
||||
//
|
||||
// If the `exhaustive_patterns` feature is enabled, we make sure to omit constructors that
|
||||
// are statically impossible. E.g., for `Option<!>`, we do not include `Some(_)` in the
|
||||
// returned list of constructors.
|
||||
// Invariant: this is empty if and only if the type is uninhabited (as determined by
|
||||
// `cx.is_uninhabited()`).
|
||||
Ok(match ty.kind(Interner) {
|
||||
TyKind::Scalar(Scalar::Bool) => ConstructorSet::Bool,
|
||||
TyKind::Scalar(Scalar::Char) => unhandled(),
|
||||
TyKind::Scalar(Scalar::Int(..) | Scalar::Uint(..)) => unhandled(),
|
||||
TyKind::Array(..) | TyKind::Slice(..) => unhandled(),
|
||||
TyKind::Adt(AdtId(hir_def::AdtId::EnumId(enum_id)), subst) => {
|
||||
let enum_data = cx.db.enum_data(*enum_id);
|
||||
let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(ty);
|
||||
|
||||
if enum_data.variants.is_empty() && !is_declared_nonexhaustive {
|
||||
ConstructorSet::NoConstructors
|
||||
} else {
|
||||
let mut variants = FxHashMap::default();
|
||||
for &(variant, _) in enum_data.variants.iter() {
|
||||
let is_uninhabited =
|
||||
is_enum_variant_uninhabited_from(variant, subst, cx.module, cx.db);
|
||||
let visibility = if is_uninhabited {
|
||||
VariantVisibility::Empty
|
||||
} else {
|
||||
VariantVisibility::Visible
|
||||
};
|
||||
variants.insert(variant, visibility);
|
||||
}
|
||||
|
||||
ConstructorSet::Variants {
|
||||
variants: IdxContainer(variants),
|
||||
non_exhaustive: is_declared_nonexhaustive,
|
||||
}
|
||||
}
|
||||
}
|
||||
TyKind::Adt(AdtId(hir_def::AdtId::UnionId(_)), _) => ConstructorSet::Union,
|
||||
TyKind::Adt(..) | TyKind::Tuple(..) => {
|
||||
ConstructorSet::Struct { empty: cx.is_uninhabited(ty) }
|
||||
}
|
||||
TyKind::Ref(..) => ConstructorSet::Ref,
|
||||
TyKind::Never => ConstructorSet::NoConstructors,
|
||||
// This type is one for which we cannot list constructors, like `str` or `f64`.
|
||||
_ => ConstructorSet::Unlistable,
|
||||
})
|
||||
}
|
||||
|
||||
fn debug_pat(
|
||||
_f: &mut fmt::Formatter<'_>,
|
||||
_pat: &rustc_pattern_analysis::pat::DeconstructedPat<'_, Self>,
|
||||
) -> fmt::Result {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn bug(&self, fmt: fmt::Arguments<'_>) -> ! {
|
||||
panic!("{}", fmt)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'p> fmt::Debug for MatchCheckCtx<'p> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("MatchCheckCtx").finish()
|
||||
}
|
||||
}
|
|
@ -1,824 +0,0 @@
|
|||
//! Based on rust-lang/rust (last sync f31622a50 2021-11-12)
|
||||
//! <https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs>
|
||||
//!
|
||||
//! -----
|
||||
//!
|
||||
//! This file includes the logic for exhaustiveness and reachability checking for pattern-matching.
|
||||
//! Specifically, given a list of patterns for a type, we can tell whether:
|
||||
//! (a) each pattern is reachable (reachability)
|
||||
//! (b) the patterns cover every possible value for the type (exhaustiveness)
|
||||
//!
|
||||
//! The algorithm implemented here is a modified version of the one described in [this
|
||||
//! paper](http://moscova.inria.fr/~maranget/papers/warn/index.html). We have however generalized
|
||||
//! it to accommodate the variety of patterns that Rust supports. We thus explain our version here,
|
||||
//! without being as rigorous.
|
||||
//!
|
||||
//!
|
||||
//! # Summary
|
||||
//!
|
||||
//! The core of the algorithm is the notion of "usefulness". A pattern `q` is said to be *useful*
|
||||
//! relative to another pattern `p` of the same type if there is a value that is matched by `q` and
|
||||
//! not matched by `p`. This generalizes to many `p`s: `q` is useful w.r.t. a list of patterns
|
||||
//! `p_1 .. p_n` if there is a value that is matched by `q` and by none of the `p_i`. We write
|
||||
//! `usefulness(p_1 .. p_n, q)` for a function that returns a list of such values. The aim of this
|
||||
//! file is to compute it efficiently.
|
||||
//!
|
||||
//! This is enough to compute reachability: a pattern in a `match` expression is reachable iff it
|
||||
//! is useful w.r.t. the patterns above it:
|
||||
//! ```rust
|
||||
//! match x {
|
||||
//! Some(_) => ...,
|
||||
//! None => ..., // reachable: `None` is matched by this but not the branch above
|
||||
//! Some(0) => ..., // unreachable: all the values this matches are already matched by
|
||||
//! // `Some(_)` above
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! This is also enough to compute exhaustiveness: a match is exhaustive iff the wildcard `_`
|
||||
//! pattern is _not_ useful w.r.t. the patterns in the match. The values returned by `usefulness`
|
||||
//! are used to tell the user which values are missing.
|
||||
//! ```rust
|
||||
//! match x {
|
||||
//! Some(0) => ...,
|
||||
//! None => ...,
|
||||
//! // not exhaustive: `_` is useful because it matches `Some(1)`
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! The entrypoint of this file is the [`compute_match_usefulness`] function, which computes
|
||||
//! reachability for each match branch and exhaustiveness for the whole match.
|
||||
//!
|
||||
//!
|
||||
//! # Constructors and fields
|
||||
//!
|
||||
//! Note: we will often abbreviate "constructor" as "ctor".
|
||||
//!
|
||||
//! The idea that powers everything that is done in this file is the following: a (matcheable)
|
||||
//! value is made from a constructor applied to a number of subvalues. Examples of constructors are
|
||||
//! `Some`, `None`, `(,)` (the 2-tuple constructor), `Foo {..}` (the constructor for a struct
|
||||
//! `Foo`), and `2` (the constructor for the number `2`). This is natural when we think of
|
||||
//! pattern-matching, and this is the basis for what follows.
|
||||
//!
|
||||
//! Some of the ctors listed above might feel weird: `None` and `2` don't take any arguments.
|
||||
//! That's ok: those are ctors that take a list of 0 arguments; they are the simplest case of
|
||||
//! ctors. We treat `2` as a ctor because `u64` and other number types behave exactly like a huge
|
||||
//! `enum`, with one variant for each number. This allows us to see any matcheable value as made up
|
||||
//! from a tree of ctors, each having a set number of children. For example: `Foo { bar: None,
|
||||
//! baz: Ok(0) }` is made from 4 different ctors, namely `Foo{..}`, `None`, `Ok` and `0`.
|
||||
//!
|
||||
//! This idea can be extended to patterns: they are also made from constructors applied to fields.
|
||||
//! A pattern for a given type is allowed to use all the ctors for values of that type (which we
|
||||
//! call "value constructors"), but there are also pattern-only ctors. The most important one is
|
||||
//! the wildcard (`_`), and the others are integer ranges (`0..=10`), variable-length slices (`[x,
|
||||
//! ..]`), and or-patterns (`Ok(0) | Err(_)`). Examples of valid patterns are `42`, `Some(_)`, `Foo
|
||||
//! { bar: Some(0) | None, baz: _ }`. Note that a binder in a pattern (e.g. `Some(x)`) matches the
|
||||
//! same values as a wildcard (e.g. `Some(_)`), so we treat both as wildcards.
|
||||
//!
|
||||
//! From this deconstruction we can compute whether a given value matches a given pattern; we
|
||||
//! simply look at ctors one at a time. Given a pattern `p` and a value `v`, we want to compute
|
||||
//! `matches!(v, p)`. It's mostly straightforward: we compare the head ctors and when they match
|
||||
//! we compare their fields recursively. A few representative examples:
|
||||
//!
|
||||
//! - `matches!(v, _) := true`
|
||||
//! - `matches!((v0, v1), (p0, p1)) := matches!(v0, p0) && matches!(v1, p1)`
|
||||
//! - `matches!(Foo { bar: v0, baz: v1 }, Foo { bar: p0, baz: p1 }) := matches!(v0, p0) && matches!(v1, p1)`
|
||||
//! - `matches!(Ok(v0), Ok(p0)) := matches!(v0, p0)`
|
||||
//! - `matches!(Ok(v0), Err(p0)) := false` (incompatible variants)
|
||||
//! - `matches!(v, 1..=100) := matches!(v, 1) || ... || matches!(v, 100)`
|
||||
//! - `matches!([v0], [p0, .., p1]) := false` (incompatible lengths)
|
||||
//! - `matches!([v0, v1, v2], [p0, .., p1]) := matches!(v0, p0) && matches!(v2, p1)`
|
||||
//! - `matches!(v, p0 | p1) := matches!(v, p0) || matches!(v, p1)`
|
||||
//!
|
||||
//! Constructors, fields and relevant operations are defined in the [`super::deconstruct_pat`] module.
|
||||
//!
|
||||
//! Note: this constructors/fields distinction may not straightforwardly apply to every Rust type.
|
||||
//! For example a value of type `Rc<u64>` can't be deconstructed that way, and `&str` has an
|
||||
//! infinitude of constructors. There are also subtleties with visibility of fields and
|
||||
//! uninhabitedness and various other things. The constructors idea can be extended to handle most
|
||||
//! of these subtleties though; caveats are documented where relevant throughout the code.
|
||||
//!
|
||||
//! Whether constructors cover each other is computed by [`Constructor::is_covered_by`].
|
||||
//!
|
||||
//!
|
||||
//! # Specialization
|
||||
//!
|
||||
//! Recall that we wish to compute `usefulness(p_1 .. p_n, q)`: given a list of patterns `p_1 ..
|
||||
//! p_n` and a pattern `q`, all of the same type, we want to find a list of values (called
|
||||
//! "witnesses") that are matched by `q` and by none of the `p_i`. We obviously don't just
|
||||
//! enumerate all possible values. From the discussion above we see that we can proceed
|
||||
//! ctor-by-ctor: for each value ctor of the given type, we ask "is there a value that starts with
|
||||
//! this constructor and matches `q` and none of the `p_i`?". As we saw above, there's a lot we can
|
||||
//! say from knowing only the first constructor of our candidate value.
|
||||
//!
|
||||
//! Let's take the following example:
|
||||
//! ```
|
||||
//! match x {
|
||||
//! Enum::Variant1(_) => {} // `p1`
|
||||
//! Enum::Variant2(None, 0) => {} // `p2`
|
||||
//! Enum::Variant2(Some(_), 0) => {} // `q`
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! We can easily see that if our candidate value `v` starts with `Variant1` it will not match `q`.
|
||||
//! If `v = Variant2(v0, v1)` however, whether or not it matches `p2` and `q` will depend on `v0`
|
||||
//! and `v1`. In fact, such a `v` will be a witness of usefulness of `q` exactly when the tuple
|
||||
//! `(v0, v1)` is a witness of usefulness of `q'` in the following reduced match:
|
||||
//!
|
||||
//! ```
|
||||
//! match x {
|
||||
//! (None, 0) => {} // `p2'`
|
||||
//! (Some(_), 0) => {} // `q'`
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! This motivates a new step in computing usefulness, that we call _specialization_.
|
||||
//! Specialization consist of filtering a list of patterns for those that match a constructor, and
|
||||
//! then looking into the constructor's fields. This enables usefulness to be computed recursively.
|
||||
//!
|
||||
//! Instead of acting on a single pattern in each row, we will consider a list of patterns for each
|
||||
//! row, and we call such a list a _pattern-stack_. The idea is that we will specialize the
|
||||
//! leftmost pattern, which amounts to popping the constructor and pushing its fields, which feels
|
||||
//! like a stack. We note a pattern-stack simply with `[p_1 ... p_n]`.
|
||||
//! Here's a sequence of specializations of a list of pattern-stacks, to illustrate what's
|
||||
//! happening:
|
||||
//! ```
|
||||
//! [Enum::Variant1(_)]
|
||||
//! [Enum::Variant2(None, 0)]
|
||||
//! [Enum::Variant2(Some(_), 0)]
|
||||
//! //==>> specialize with `Variant2`
|
||||
//! [None, 0]
|
||||
//! [Some(_), 0]
|
||||
//! //==>> specialize with `Some`
|
||||
//! [_, 0]
|
||||
//! //==>> specialize with `true` (say the type was `bool`)
|
||||
//! [0]
|
||||
//! //==>> specialize with `0`
|
||||
//! []
|
||||
//! ```
|
||||
//!
|
||||
//! The function `specialize(c, p)` takes a value constructor `c` and a pattern `p`, and returns 0
|
||||
//! or more pattern-stacks. If `c` does not match the head constructor of `p`, it returns nothing;
|
||||
//! otherwise if returns the fields of the constructor. This only returns more than one
|
||||
//! pattern-stack if `p` has a pattern-only constructor.
|
||||
//!
|
||||
//! - Specializing for the wrong constructor returns nothing
|
||||
//!
|
||||
//! `specialize(None, Some(p0)) := []`
|
||||
//!
|
||||
//! - Specializing for the correct constructor returns a single row with the fields
|
||||
//!
|
||||
//! `specialize(Variant1, Variant1(p0, p1, p2)) := [[p0, p1, p2]]`
|
||||
//!
|
||||
//! `specialize(Foo{..}, Foo { bar: p0, baz: p1 }) := [[p0, p1]]`
|
||||
//!
|
||||
//! - For or-patterns, we specialize each branch and concatenate the results
|
||||
//!
|
||||
//! `specialize(c, p0 | p1) := specialize(c, p0) ++ specialize(c, p1)`
|
||||
//!
|
||||
//! - We treat the other pattern constructors as if they were a large or-pattern of all the
|
||||
//! possibilities:
|
||||
//!
|
||||
//! `specialize(c, _) := specialize(c, Variant1(_) | Variant2(_, _) | ...)`
|
||||
//!
|
||||
//! `specialize(c, 1..=100) := specialize(c, 1 | ... | 100)`
|
||||
//!
|
||||
//! `specialize(c, [p0, .., p1]) := specialize(c, [p0, p1] | [p0, _, p1] | [p0, _, _, p1] | ...)`
|
||||
//!
|
||||
//! - If `c` is a pattern-only constructor, `specialize` is defined on a case-by-case basis. See
|
||||
//! the discussion about constructor splitting in [`super::deconstruct_pat`].
|
||||
//!
|
||||
//!
|
||||
//! We then extend this function to work with pattern-stacks as input, by acting on the first
|
||||
//! column and keeping the other columns untouched.
|
||||
//!
|
||||
//! Specialization for the whole matrix is done in [`Matrix::specialize_constructor`]. Note that
|
||||
//! or-patterns in the first column are expanded before being stored in the matrix. Specialization
|
||||
//! for a single patstack is done from a combination of [`Constructor::is_covered_by`] and
|
||||
//! [`PatStack::pop_head_constructor`]. The internals of how it's done mostly live in the
|
||||
//! [`Fields`] struct.
|
||||
//!
|
||||
//!
|
||||
//! # Computing usefulness
|
||||
//!
|
||||
//! We now have all we need to compute usefulness. The inputs to usefulness are a list of
|
||||
//! pattern-stacks `p_1 ... p_n` (one per row), and a new pattern_stack `q`. The paper and this
|
||||
//! file calls the list of patstacks a _matrix_. They must all have the same number of columns and
|
||||
//! the patterns in a given column must all have the same type. `usefulness` returns a (possibly
|
||||
//! empty) list of witnesses of usefulness. These witnesses will also be pattern-stacks.
|
||||
//!
|
||||
//! - base case: `n_columns == 0`.
|
||||
//! Since a pattern-stack functions like a tuple of patterns, an empty one functions like the
|
||||
//! unit type. Thus `q` is useful iff there are no rows above it, i.e. if `n == 0`.
|
||||
//!
|
||||
//! - inductive case: `n_columns > 0`.
|
||||
//! We need a way to list the constructors we want to try. We will be more clever in the next
|
||||
//! section but for now assume we list all value constructors for the type of the first column.
|
||||
//!
|
||||
//! - for each such ctor `c`:
|
||||
//!
|
||||
//! - for each `q'` returned by `specialize(c, q)`:
|
||||
//!
|
||||
//! - we compute `usefulness(specialize(c, p_1) ... specialize(c, p_n), q')`
|
||||
//!
|
||||
//! - for each witness found, we revert specialization by pushing the constructor `c` on top.
|
||||
//!
|
||||
//! - We return the concatenation of all the witnesses found, if any.
|
||||
//!
|
||||
//! Example:
|
||||
//! ```
|
||||
//! [Some(true)] // p_1
|
||||
//! [None] // p_2
|
||||
//! [Some(_)] // q
|
||||
//! //==>> try `None`: `specialize(None, q)` returns nothing
|
||||
//! //==>> try `Some`: `specialize(Some, q)` returns a single row
|
||||
//! [true] // p_1'
|
||||
//! [_] // q'
|
||||
//! //==>> try `true`: `specialize(true, q')` returns a single row
|
||||
//! [] // p_1''
|
||||
//! [] // q''
|
||||
//! //==>> base case; `n != 0` so `q''` is not useful.
|
||||
//! //==>> go back up a step
|
||||
//! [true] // p_1'
|
||||
//! [_] // q'
|
||||
//! //==>> try `false`: `specialize(false, q')` returns a single row
|
||||
//! [] // q''
|
||||
//! //==>> base case; `n == 0` so `q''` is useful. We return the single witness `[]`
|
||||
//! witnesses:
|
||||
//! []
|
||||
//! //==>> undo the specialization with `false`
|
||||
//! witnesses:
|
||||
//! [false]
|
||||
//! //==>> undo the specialization with `Some`
|
||||
//! witnesses:
|
||||
//! [Some(false)]
|
||||
//! //==>> we have tried all the constructors. The output is the single witness `[Some(false)]`.
|
||||
//! ```
|
||||
//!
|
||||
//! This computation is done in [`is_useful`]. In practice we don't care about the list of
|
||||
//! witnesses when computing reachability; we only need to know whether any exist. We do keep the
|
||||
//! witnesses when computing exhaustiveness to report them to the user.
|
||||
//!
|
||||
//!
|
||||
//! # Making usefulness tractable: constructor splitting
|
||||
//!
|
||||
//! We're missing one last detail: which constructors do we list? Naively listing all value
|
||||
//! constructors cannot work for types like `u64` or `&str`, so we need to be more clever. The
|
||||
//! first obvious insight is that we only want to list constructors that are covered by the head
|
||||
//! constructor of `q`. If it's a value constructor, we only try that one. If it's a pattern-only
|
||||
//! constructor, we use the final clever idea for this algorithm: _constructor splitting_, where we
|
||||
//! group together constructors that behave the same.
|
||||
//!
|
||||
//! The details are not necessary to understand this file, so we explain them in
|
||||
//! [`super::deconstruct_pat`]. Splitting is done by the [`Constructor::split`] function.
|
||||
|
||||
use std::iter::once;
|
||||
|
||||
use hir_def::{AdtId, DefWithBodyId, HasModule, ModuleId};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use typed_arena::Arena;
|
||||
|
||||
use crate::{db::HirDatabase, inhabitedness::is_ty_uninhabited_from, Ty, TyExt};
|
||||
|
||||
use super::deconstruct_pat::{Constructor, DeconstructedPat, Fields, SplitWildcard};
|
||||
|
||||
use self::{helper::Captures, ArmType::*, Usefulness::*};
|
||||
|
||||
pub(crate) struct MatchCheckCtx<'a, 'p> {
|
||||
pub(crate) module: ModuleId,
|
||||
pub(crate) body: DefWithBodyId,
|
||||
pub(crate) db: &'a dyn HirDatabase,
|
||||
/// Lowered patterns from arms plus generated by the check.
|
||||
pub(crate) pattern_arena: &'p Arena<DeconstructedPat<'p>>,
|
||||
exhaustive_patterns: bool,
|
||||
}
|
||||
|
||||
impl<'a, 'p> MatchCheckCtx<'a, 'p> {
|
||||
pub(crate) fn new(
|
||||
module: ModuleId,
|
||||
body: DefWithBodyId,
|
||||
db: &'a dyn HirDatabase,
|
||||
pattern_arena: &'p Arena<DeconstructedPat<'p>>,
|
||||
) -> Self {
|
||||
let def_map = db.crate_def_map(module.krate());
|
||||
let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns");
|
||||
Self { module, body, db, pattern_arena, exhaustive_patterns }
|
||||
}
|
||||
|
||||
pub(super) fn is_uninhabited(&self, ty: &Ty) -> bool {
|
||||
if self.feature_exhaustive_patterns() {
|
||||
is_ty_uninhabited_from(ty, self.module, self.db)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`.
|
||||
pub(super) fn is_foreign_non_exhaustive_enum(&self, ty: &Ty) -> bool {
|
||||
match ty.as_adt() {
|
||||
Some((adt @ AdtId::EnumId(_), _)) => {
|
||||
let has_non_exhaustive_attr =
|
||||
self.db.attrs(adt.into()).by_key("non_exhaustive").exists();
|
||||
let is_local = adt.module(self.db.upcast()).krate() == self.module.krate();
|
||||
has_non_exhaustive_attr && !is_local
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
// Rust's unstable feature described as "Allows exhaustive pattern matching on types that contain uninhabited types."
|
||||
pub(super) fn feature_exhaustive_patterns(&self) -> bool {
|
||||
self.exhaustive_patterns
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub(super) struct PatCtxt<'a, 'p> {
|
||||
pub(super) cx: &'a MatchCheckCtx<'a, 'p>,
|
||||
/// Type of the current column under investigation.
|
||||
pub(super) ty: &'a Ty,
|
||||
/// Whether the current pattern is the whole pattern as found in a match arm, or if it's a
|
||||
/// subpattern.
|
||||
pub(super) is_top_level: bool,
|
||||
/// Whether the current pattern is from a `non_exhaustive` enum.
|
||||
pub(super) is_non_exhaustive: bool,
|
||||
}
|
||||
|
||||
/// A row of a matrix. Rows of len 1 are very common, which is why `SmallVec[_; 2]`
|
||||
/// works well.
|
||||
#[derive(Clone)]
|
||||
pub(super) struct PatStack<'p> {
|
||||
pats: SmallVec<[&'p DeconstructedPat<'p>; 2]>,
|
||||
}
|
||||
|
||||
impl<'p> PatStack<'p> {
|
||||
fn from_pattern(pat: &'p DeconstructedPat<'p>) -> Self {
|
||||
Self::from_vec(smallvec![pat])
|
||||
}
|
||||
|
||||
fn from_vec(vec: SmallVec<[&'p DeconstructedPat<'p>; 2]>) -> Self {
|
||||
PatStack { pats: vec }
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
self.pats.is_empty()
|
||||
}
|
||||
|
||||
fn len(&self) -> usize {
|
||||
self.pats.len()
|
||||
}
|
||||
|
||||
fn head(&self) -> &'p DeconstructedPat<'p> {
|
||||
self.pats[0]
|
||||
}
|
||||
|
||||
// Recursively expand the first pattern into its subpatterns. Only useful if the pattern is an
|
||||
// or-pattern. Panics if `self` is empty.
|
||||
fn expand_or_pat(&self) -> impl Iterator<Item = PatStack<'p>> + Captures<'_> {
|
||||
self.head().iter_fields().map(move |pat| {
|
||||
let mut new_patstack = PatStack::from_pattern(pat);
|
||||
new_patstack.pats.extend_from_slice(&self.pats[1..]);
|
||||
new_patstack
|
||||
})
|
||||
}
|
||||
|
||||
/// This computes `S(self.head().ctor(), self)`. See top of the file for explanations.
|
||||
///
|
||||
/// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing
|
||||
/// fields filled with wild patterns.
|
||||
///
|
||||
/// This is roughly the inverse of `Constructor::apply`.
|
||||
fn pop_head_constructor(&self, cx: &MatchCheckCtx<'_, 'p>, ctor: &Constructor) -> PatStack<'p> {
|
||||
// We pop the head pattern and push the new fields extracted from the arguments of
|
||||
// `self.head()`.
|
||||
let mut new_fields: SmallVec<[_; 2]> = self.head().specialize(cx, ctor);
|
||||
new_fields.extend_from_slice(&self.pats[1..]);
|
||||
PatStack::from_vec(new_fields)
|
||||
}
|
||||
}
|
||||
|
||||
/// A 2D matrix.
|
||||
#[derive(Clone)]
|
||||
pub(super) struct Matrix<'p> {
|
||||
patterns: Vec<PatStack<'p>>,
|
||||
}
|
||||
|
||||
impl<'p> Matrix<'p> {
|
||||
fn empty() -> Self {
|
||||
Matrix { patterns: vec![] }
|
||||
}
|
||||
|
||||
/// Number of columns of this matrix. `None` is the matrix is empty.
|
||||
pub(super) fn _column_count(&self) -> Option<usize> {
|
||||
self.patterns.first().map(|r| r.len())
|
||||
}
|
||||
|
||||
/// Pushes a new row to the matrix. If the row starts with an or-pattern, this recursively
|
||||
/// expands it.
|
||||
fn push(&mut self, row: PatStack<'p>) {
|
||||
if !row.is_empty() && row.head().is_or_pat() {
|
||||
self.patterns.extend(row.expand_or_pat());
|
||||
} else {
|
||||
self.patterns.push(row);
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterate over the first component of each row
|
||||
fn heads(&self) -> impl Iterator<Item = &'p DeconstructedPat<'p>> + Clone + Captures<'_> {
|
||||
self.patterns.iter().map(|r| r.head())
|
||||
}
|
||||
|
||||
/// This computes `S(constructor, self)`. See top of the file for explanations.
|
||||
fn specialize_constructor(&self, pcx: PatCtxt<'_, 'p>, ctor: &Constructor) -> Matrix<'p> {
|
||||
let mut matrix = Matrix::empty();
|
||||
for row in &self.patterns {
|
||||
if ctor.is_covered_by(pcx, row.head().ctor()) {
|
||||
let new_row = row.pop_head_constructor(pcx.cx, ctor);
|
||||
matrix.push(new_row);
|
||||
}
|
||||
}
|
||||
matrix
|
||||
}
|
||||
}
|
||||
|
||||
/// This carries the results of computing usefulness, as described at the top of the file. When
|
||||
/// checking usefulness of a match branch, we use the `NoWitnesses` variant, which also keeps track
|
||||
/// of potential unreachable sub-patterns (in the presence of or-patterns). When checking
|
||||
/// exhaustiveness of a whole match, we use the `WithWitnesses` variant, which carries a list of
|
||||
/// witnesses of non-exhaustiveness when there are any.
|
||||
/// Which variant to use is dictated by `ArmType`.
|
||||
enum Usefulness<'p> {
|
||||
/// If we don't care about witnesses, simply remember if the pattern was useful.
|
||||
NoWitnesses { useful: bool },
|
||||
/// Carries a list of witnesses of non-exhaustiveness. If empty, indicates that the whole
|
||||
/// pattern is unreachable.
|
||||
WithWitnesses(Vec<Witness<'p>>),
|
||||
}
|
||||
|
||||
impl<'p> Usefulness<'p> {
|
||||
fn new_useful(preference: ArmType) -> Self {
|
||||
match preference {
|
||||
// A single (empty) witness of reachability.
|
||||
FakeExtraWildcard => WithWitnesses(vec![Witness(vec![])]),
|
||||
RealArm => NoWitnesses { useful: true },
|
||||
}
|
||||
}
|
||||
fn new_not_useful(preference: ArmType) -> Self {
|
||||
match preference {
|
||||
FakeExtraWildcard => WithWitnesses(vec![]),
|
||||
RealArm => NoWitnesses { useful: false },
|
||||
}
|
||||
}
|
||||
|
||||
fn is_useful(&self) -> bool {
|
||||
match self {
|
||||
Usefulness::NoWitnesses { useful } => *useful,
|
||||
Usefulness::WithWitnesses(witnesses) => !witnesses.is_empty(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Combine usefulnesses from two branches. This is an associative operation.
|
||||
fn extend(&mut self, other: Self) {
|
||||
match (&mut *self, other) {
|
||||
(WithWitnesses(_), WithWitnesses(o)) if o.is_empty() => {}
|
||||
(WithWitnesses(s), WithWitnesses(o)) if s.is_empty() => *self = WithWitnesses(o),
|
||||
(WithWitnesses(s), WithWitnesses(o)) => s.extend(o),
|
||||
(NoWitnesses { useful: s_useful }, NoWitnesses { useful: o_useful }) => {
|
||||
*s_useful = *s_useful || o_useful
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
/// After calculating usefulness after a specialization, call this to reconstruct a usefulness
|
||||
/// that makes sense for the matrix pre-specialization. This new usefulness can then be merged
|
||||
/// with the results of specializing with the other constructors.
|
||||
fn apply_constructor(
|
||||
self,
|
||||
pcx: PatCtxt<'_, 'p>,
|
||||
matrix: &Matrix<'p>,
|
||||
ctor: &Constructor,
|
||||
) -> Self {
|
||||
match self {
|
||||
NoWitnesses { .. } => self,
|
||||
WithWitnesses(ref witnesses) if witnesses.is_empty() => self,
|
||||
WithWitnesses(witnesses) => {
|
||||
let new_witnesses = if let Constructor::Missing { .. } = ctor {
|
||||
// We got the special `Missing` constructor, so each of the missing constructors
|
||||
// gives a new pattern that is not caught by the match. We list those patterns.
|
||||
let new_patterns = if pcx.is_non_exhaustive {
|
||||
// Here we don't want the user to try to list all variants, we want them to add
|
||||
// a wildcard, so we only suggest that.
|
||||
vec![DeconstructedPat::wildcard(pcx.ty.clone())]
|
||||
} else {
|
||||
let mut split_wildcard = SplitWildcard::new(pcx);
|
||||
split_wildcard.split(pcx, matrix.heads().map(DeconstructedPat::ctor));
|
||||
|
||||
// This lets us know if we skipped any variants because they are marked
|
||||
// `doc(hidden)` or they are unstable feature gate (only stdlib types).
|
||||
let mut hide_variant_show_wild = false;
|
||||
// Construct for each missing constructor a "wild" version of this
|
||||
// constructor, that matches everything that can be built with
|
||||
// it. For example, if `ctor` is a `Constructor::Variant` for
|
||||
// `Option::Some`, we get the pattern `Some(_)`.
|
||||
let mut new: Vec<DeconstructedPat<'_>> = split_wildcard
|
||||
.iter_missing(pcx)
|
||||
.filter_map(|missing_ctor| {
|
||||
// Check if this variant is marked `doc(hidden)`
|
||||
if missing_ctor.is_doc_hidden_variant(pcx)
|
||||
|| missing_ctor.is_unstable_variant(pcx)
|
||||
{
|
||||
hide_variant_show_wild = true;
|
||||
return None;
|
||||
}
|
||||
Some(DeconstructedPat::wild_from_ctor(pcx, missing_ctor.clone()))
|
||||
})
|
||||
.collect();
|
||||
|
||||
if hide_variant_show_wild {
|
||||
new.push(DeconstructedPat::wildcard(pcx.ty.clone()))
|
||||
}
|
||||
|
||||
new
|
||||
};
|
||||
|
||||
witnesses
|
||||
.into_iter()
|
||||
.flat_map(|witness| {
|
||||
new_patterns.iter().map(move |pat| {
|
||||
Witness(
|
||||
witness
|
||||
.0
|
||||
.iter()
|
||||
.chain(once(pat))
|
||||
.map(DeconstructedPat::clone_and_forget_reachability)
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
witnesses
|
||||
.into_iter()
|
||||
.map(|witness| witness.apply_constructor(pcx, ctor))
|
||||
.collect()
|
||||
};
|
||||
WithWitnesses(new_witnesses)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum ArmType {
|
||||
FakeExtraWildcard,
|
||||
RealArm,
|
||||
}
|
||||
|
||||
/// A witness of non-exhaustiveness for error reporting, represented
|
||||
/// as a list of patterns (in reverse order of construction) with
|
||||
/// wildcards inside to represent elements that can take any inhabitant
|
||||
/// of the type as a value.
|
||||
///
|
||||
/// A witness against a list of patterns should have the same types
|
||||
/// and length as the pattern matched against. Because Rust `match`
|
||||
/// is always against a single pattern, at the end the witness will
|
||||
/// have length 1, but in the middle of the algorithm, it can contain
|
||||
/// multiple patterns.
|
||||
///
|
||||
/// For example, if we are constructing a witness for the match against
|
||||
///
|
||||
/// ```
|
||||
/// struct Pair(Option<(u32, u32)>, bool);
|
||||
///
|
||||
/// match (p: Pair) {
|
||||
/// Pair(None, _) => {}
|
||||
/// Pair(_, false) => {}
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// We'll perform the following steps:
|
||||
/// 1. Start with an empty witness
|
||||
/// `Witness(vec![])`
|
||||
/// 2. Push a witness `true` against the `false`
|
||||
/// `Witness(vec![true])`
|
||||
/// 3. Push a witness `Some(_)` against the `None`
|
||||
/// `Witness(vec![true, Some(_)])`
|
||||
/// 4. Apply the `Pair` constructor to the witnesses
|
||||
/// `Witness(vec![Pair(Some(_), true)])`
|
||||
///
|
||||
/// The final `Pair(Some(_), true)` is then the resulting witness.
|
||||
pub(crate) struct Witness<'p>(Vec<DeconstructedPat<'p>>);
|
||||
|
||||
impl<'p> Witness<'p> {
|
||||
/// Asserts that the witness contains a single pattern, and returns it.
|
||||
fn single_pattern(self) -> DeconstructedPat<'p> {
|
||||
assert_eq!(self.0.len(), 1);
|
||||
self.0.into_iter().next().unwrap()
|
||||
}
|
||||
|
||||
/// Constructs a partial witness for a pattern given a list of
|
||||
/// patterns expanded by the specialization step.
|
||||
///
|
||||
/// When a pattern P is discovered to be useful, this function is used bottom-up
|
||||
/// to reconstruct a complete witness, e.g., a pattern P' that covers a subset
|
||||
/// of values, V, where each value in that set is not covered by any previously
|
||||
/// used patterns and is covered by the pattern P'. Examples:
|
||||
///
|
||||
/// left_ty: tuple of 3 elements
|
||||
/// pats: [10, 20, _] => (10, 20, _)
|
||||
///
|
||||
/// left_ty: struct X { a: (bool, &'static str), b: usize}
|
||||
/// pats: [(false, "foo"), 42] => X { a: (false, "foo"), b: 42 }
|
||||
fn apply_constructor(mut self, pcx: PatCtxt<'_, 'p>, ctor: &Constructor) -> Self {
|
||||
let pat = {
|
||||
let len = self.0.len();
|
||||
let arity = ctor.arity(pcx);
|
||||
let pats = self.0.drain((len - arity)..).rev();
|
||||
let fields = Fields::from_iter(pcx.cx, pats);
|
||||
DeconstructedPat::new(ctor.clone(), fields, pcx.ty.clone())
|
||||
};
|
||||
|
||||
self.0.push(pat);
|
||||
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Algorithm from <http://moscova.inria.fr/~maranget/papers/warn/index.html>.
|
||||
/// The algorithm from the paper has been modified to correctly handle empty
|
||||
/// types. The changes are:
|
||||
/// (0) We don't exit early if the pattern matrix has zero rows. We just
|
||||
/// continue to recurse over columns.
|
||||
/// (1) all_constructors will only return constructors that are statically
|
||||
/// possible. E.g., it will only return `Ok` for `Result<T, !>`.
|
||||
///
|
||||
/// This finds whether a (row) vector `v` of patterns is 'useful' in relation
|
||||
/// to a set of such vectors `m` - this is defined as there being a set of
|
||||
/// inputs that will match `v` but not any of the sets in `m`.
|
||||
///
|
||||
/// All the patterns at each column of the `matrix ++ v` matrix must have the same type.
|
||||
///
|
||||
/// This is used both for reachability checking (if a pattern isn't useful in
|
||||
/// relation to preceding patterns, it is not reachable) and exhaustiveness
|
||||
/// checking (if a wildcard pattern is useful in relation to a matrix, the
|
||||
/// matrix isn't exhaustive).
|
||||
///
|
||||
/// `is_under_guard` is used to inform if the pattern has a guard. If it
|
||||
/// has one it must not be inserted into the matrix. This shouldn't be
|
||||
/// relied on for soundness.
|
||||
fn is_useful<'p>(
|
||||
cx: &MatchCheckCtx<'_, 'p>,
|
||||
matrix: &Matrix<'p>,
|
||||
v: &PatStack<'p>,
|
||||
witness_preference: ArmType,
|
||||
is_under_guard: bool,
|
||||
is_top_level: bool,
|
||||
) -> Usefulness<'p> {
|
||||
let Matrix { patterns: rows, .. } = matrix;
|
||||
|
||||
// The base case. We are pattern-matching on () and the return value is
|
||||
// based on whether our matrix has a row or not.
|
||||
// NOTE: This could potentially be optimized by checking rows.is_empty()
|
||||
// first and then, if v is non-empty, the return value is based on whether
|
||||
// the type of the tuple we're checking is inhabited or not.
|
||||
if v.is_empty() {
|
||||
let ret = if rows.is_empty() {
|
||||
Usefulness::new_useful(witness_preference)
|
||||
} else {
|
||||
Usefulness::new_not_useful(witness_preference)
|
||||
};
|
||||
return ret;
|
||||
}
|
||||
|
||||
debug_assert!(rows.iter().all(|r| r.len() == v.len()));
|
||||
|
||||
let ty = v.head().ty();
|
||||
let is_non_exhaustive = cx.is_foreign_non_exhaustive_enum(ty);
|
||||
let pcx = PatCtxt { cx, ty, is_top_level, is_non_exhaustive };
|
||||
|
||||
// If the first pattern is an or-pattern, expand it.
|
||||
let mut ret = Usefulness::new_not_useful(witness_preference);
|
||||
if v.head().is_or_pat() {
|
||||
// We try each or-pattern branch in turn.
|
||||
let mut matrix = matrix.clone();
|
||||
for v in v.expand_or_pat() {
|
||||
let usefulness = is_useful(cx, &matrix, &v, witness_preference, is_under_guard, false);
|
||||
ret.extend(usefulness);
|
||||
// If pattern has a guard don't add it to the matrix.
|
||||
if !is_under_guard {
|
||||
// We push the already-seen patterns into the matrix in order to detect redundant
|
||||
// branches like `Some(_) | Some(0)`.
|
||||
matrix.push(v);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let v_ctor = v.head().ctor();
|
||||
|
||||
// FIXME: implement `overlapping_range_endpoints` lint
|
||||
|
||||
// We split the head constructor of `v`.
|
||||
let split_ctors = v_ctor.split(pcx, matrix.heads().map(DeconstructedPat::ctor));
|
||||
// For each constructor, we compute whether there's a value that starts with it that would
|
||||
// witness the usefulness of `v`.
|
||||
let start_matrix = matrix;
|
||||
for ctor in split_ctors {
|
||||
// We cache the result of `Fields::wildcards` because it is used a lot.
|
||||
let spec_matrix = start_matrix.specialize_constructor(pcx, &ctor);
|
||||
let v = v.pop_head_constructor(cx, &ctor);
|
||||
let usefulness =
|
||||
is_useful(cx, &spec_matrix, &v, witness_preference, is_under_guard, false);
|
||||
let usefulness = usefulness.apply_constructor(pcx, start_matrix, &ctor);
|
||||
|
||||
// FIXME: implement `non_exhaustive_omitted_patterns` lint
|
||||
|
||||
ret.extend(usefulness);
|
||||
}
|
||||
};
|
||||
|
||||
if ret.is_useful() {
|
||||
v.head().set_reachable();
|
||||
}
|
||||
|
||||
ret
|
||||
}
|
||||
|
||||
/// The arm of a match expression.
|
||||
#[derive(Clone, Copy)]
|
||||
pub(crate) struct MatchArm<'p> {
|
||||
pub(crate) pat: &'p DeconstructedPat<'p>,
|
||||
pub(crate) has_guard: bool,
|
||||
}
|
||||
|
||||
/// Indicates whether or not a given arm is reachable.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) enum Reachability {
|
||||
/// The arm is reachable. This additionally carries a set of or-pattern branches that have been
|
||||
/// found to be unreachable despite the overall arm being reachable. Used only in the presence
|
||||
/// of or-patterns, otherwise it stays empty.
|
||||
// FIXME: store unreachable subpattern IDs
|
||||
Reachable,
|
||||
/// The arm is unreachable.
|
||||
Unreachable,
|
||||
}
|
||||
|
||||
/// The output of checking a match for exhaustiveness and arm reachability.
|
||||
pub(crate) struct UsefulnessReport<'p> {
|
||||
/// For each arm of the input, whether that arm is reachable after the arms above it.
|
||||
pub(crate) _arm_usefulness: Vec<(MatchArm<'p>, Reachability)>,
|
||||
/// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of
|
||||
/// exhaustiveness.
|
||||
pub(crate) non_exhaustiveness_witnesses: Vec<DeconstructedPat<'p>>,
|
||||
}
|
||||
|
||||
/// The entrypoint for the usefulness algorithm. Computes whether a match is exhaustive and which
|
||||
/// of its arms are reachable.
|
||||
///
|
||||
/// Note: the input patterns must have been lowered through
|
||||
/// `check_match::MatchVisitor::lower_pattern`.
|
||||
pub(crate) fn compute_match_usefulness<'p>(
|
||||
cx: &MatchCheckCtx<'_, 'p>,
|
||||
arms: &[MatchArm<'p>],
|
||||
scrut_ty: &Ty,
|
||||
) -> UsefulnessReport<'p> {
|
||||
let mut matrix = Matrix::empty();
|
||||
let arm_usefulness = arms
|
||||
.iter()
|
||||
.copied()
|
||||
.map(|arm| {
|
||||
let v = PatStack::from_pattern(arm.pat);
|
||||
is_useful(cx, &matrix, &v, RealArm, arm.has_guard, true);
|
||||
if !arm.has_guard {
|
||||
matrix.push(v);
|
||||
}
|
||||
let reachability = if arm.pat.is_reachable() {
|
||||
Reachability::Reachable
|
||||
} else {
|
||||
Reachability::Unreachable
|
||||
};
|
||||
(arm, reachability)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let wild_pattern = cx.pattern_arena.alloc(DeconstructedPat::wildcard(scrut_ty.clone()));
|
||||
let v = PatStack::from_pattern(wild_pattern);
|
||||
let usefulness = is_useful(cx, &matrix, &v, FakeExtraWildcard, false, true);
|
||||
let non_exhaustiveness_witnesses = match usefulness {
|
||||
WithWitnesses(pats) => pats.into_iter().map(Witness::single_pattern).collect(),
|
||||
NoWitnesses { .. } => panic!("bug"),
|
||||
};
|
||||
UsefulnessReport { _arm_usefulness: arm_usefulness, non_exhaustiveness_witnesses }
|
||||
}
|
||||
|
||||
pub(crate) mod helper {
|
||||
// Copy-pasted from rust/compiler/rustc_data_structures/src/captures.rs
|
||||
/// "Signaling" trait used in impl trait to tag lifetimes that you may
|
||||
/// need to capture but don't really need for other reasons.
|
||||
/// Basically a workaround; see [this comment] for details.
|
||||
///
|
||||
/// [this comment]: https://github.com/rust-lang/rust/issues/34511#issuecomment-373423999
|
||||
// FIXME(eddyb) false positive, the lifetime parameter is "phantom" but needed.
|
||||
#[allow(unused_lifetimes)]
|
||||
pub(crate) trait Captures<'a> {}
|
||||
|
||||
impl<'a, T: ?Sized> Captures<'a> for T {}
|
||||
}
|
|
@ -15,6 +15,9 @@ extern crate rustc_abi;
|
|||
#[cfg(not(feature = "in-rust-tree"))]
|
||||
extern crate ra_ap_rustc_abi as rustc_abi;
|
||||
|
||||
// No need to use the in-tree one.
|
||||
extern crate ra_ap_rustc_pattern_analysis as rustc_pattern_analysis;
|
||||
|
||||
mod builder;
|
||||
mod chalk_db;
|
||||
mod chalk_ext;
|
||||
|
@ -38,10 +41,10 @@ pub mod mir;
|
|||
pub mod primitive;
|
||||
pub mod traits;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
#[cfg(test)]
|
||||
mod test_db;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use std::{
|
||||
collections::hash_map::Entry,
|
||||
|
|
|
@ -21,11 +21,11 @@ use hir_def::{
|
|||
};
|
||||
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
|
||||
|
||||
mod borrowck;
|
||||
mod eval;
|
||||
mod lower;
|
||||
mod borrowck;
|
||||
mod pretty;
|
||||
mod monomorphization;
|
||||
mod pretty;
|
||||
|
||||
pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason};
|
||||
pub use eval::{
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
mod never_type;
|
||||
mod coercion;
|
||||
mod regression;
|
||||
mod simple;
|
||||
mod patterns;
|
||||
mod traits;
|
||||
mod method_resolution;
|
||||
mod macros;
|
||||
mod diagnostics;
|
||||
mod display_source_code;
|
||||
mod incremental;
|
||||
mod diagnostics;
|
||||
mod macros;
|
||||
mod method_resolution;
|
||||
mod never_type;
|
||||
mod patterns;
|
||||
mod regression;
|
||||
mod simple;
|
||||
mod traits;
|
||||
|
||||
use std::{collections::HashMap, env};
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
use std::ops::ControlFlow;
|
||||
|
||||
use base_db::FileId;
|
||||
use hir_def::{
|
||||
attr::AttrsWithOwner,
|
||||
item_scope::ItemInNs,
|
||||
|
@ -11,12 +10,8 @@ use hir_def::{
|
|||
resolver::{HasResolver, Resolver, TypeNs},
|
||||
AssocItemId, AttrDefId, ModuleDefId,
|
||||
};
|
||||
use hir_expand::{
|
||||
name::Name,
|
||||
span_map::{RealSpanMap, SpanMapRef},
|
||||
};
|
||||
use hir_expand::{mod_path::PathKind, name::Name};
|
||||
use hir_ty::{db::HirDatabase, method_resolution};
|
||||
use syntax::{ast, AstNode};
|
||||
|
||||
use crate::{
|
||||
Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl,
|
||||
|
@ -129,7 +124,7 @@ fn resolve_doc_path_on_(
|
|||
AttrDefId::GenericParamId(_) => return None,
|
||||
};
|
||||
|
||||
let mut modpath = modpath_from_str(db, link)?;
|
||||
let mut modpath = modpath_from_str(link)?;
|
||||
|
||||
let resolved = resolver.resolve_module_path_in_items(db.upcast(), &modpath);
|
||||
if resolved.is_none() {
|
||||
|
@ -305,34 +300,37 @@ fn as_module_def_if_namespace_matches(
|
|||
(ns.unwrap_or(expected_ns) == expected_ns).then(|| DocLinkDef::ModuleDef(def))
|
||||
}
|
||||
|
||||
fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option<ModPath> {
|
||||
fn modpath_from_str(link: &str) -> Option<ModPath> {
|
||||
// FIXME: this is not how we should get a mod path here.
|
||||
let try_get_modpath = |link: &str| {
|
||||
let ast_path = ast::SourceFile::parse(&format!("type T = {link};"))
|
||||
.syntax_node()
|
||||
.descendants()
|
||||
.find_map(ast::Path::cast)?;
|
||||
if ast_path.syntax().text() != link {
|
||||
return None;
|
||||
}
|
||||
ModPath::from_src(
|
||||
db.upcast(),
|
||||
ast_path,
|
||||
SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::BOGUS)),
|
||||
)
|
||||
let mut parts = link.split("::");
|
||||
let mut first_segment = None;
|
||||
let kind = match parts.next()? {
|
||||
"" => PathKind::Abs,
|
||||
"crate" => PathKind::Crate,
|
||||
"self" => PathKind::Super(0),
|
||||
"super" => {
|
||||
let mut deg = 1;
|
||||
while let Some(segment) = parts.next() {
|
||||
if segment == "super" {
|
||||
deg += 1;
|
||||
} else {
|
||||
first_segment = Some(segment);
|
||||
break;
|
||||
}
|
||||
}
|
||||
PathKind::Super(deg)
|
||||
}
|
||||
segment => {
|
||||
first_segment = Some(segment);
|
||||
PathKind::Plain
|
||||
}
|
||||
};
|
||||
let parts = first_segment.into_iter().chain(parts).map(|segment| match segment.parse() {
|
||||
Ok(idx) => Name::new_tuple_field(idx),
|
||||
Err(_) => Name::new_text_dont_use(segment.into()),
|
||||
});
|
||||
Some(ModPath::from_segments(kind, parts))
|
||||
};
|
||||
|
||||
let full = try_get_modpath(link);
|
||||
if full.is_some() {
|
||||
return full;
|
||||
}
|
||||
|
||||
// Tuple field names cannot be a part of `ModPath` usually, but rustdoc can
|
||||
// resolve doc paths like `TupleStruct::0`.
|
||||
// FIXME: Find a better way to handle these.
|
||||
let (base, maybe_tuple_field) = link.rsplit_once("::")?;
|
||||
let tuple_field = Name::new_tuple_field(maybe_tuple_field.parse().ok()?);
|
||||
let mut modpath = try_get_modpath(base)?;
|
||||
modpath.push_segment(tuple_field);
|
||||
Some(modpath)
|
||||
try_get_modpath(link)
|
||||
}
|
||||
|
|
|
@ -24,12 +24,12 @@
|
|||
mod semantics;
|
||||
mod source_analyzer;
|
||||
|
||||
mod from_id;
|
||||
mod attrs;
|
||||
mod from_id;
|
||||
mod has_source;
|
||||
|
||||
pub mod diagnostics;
|
||||
pub mod db;
|
||||
pub mod diagnostics;
|
||||
pub mod symbols;
|
||||
|
||||
mod display;
|
||||
|
@ -70,13 +70,12 @@ use hir_ty::{
|
|||
primitive::UintTy,
|
||||
traits::FnTrait,
|
||||
AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg,
|
||||
GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution,
|
||||
TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, ValueTyDefId,
|
||||
WhereClause,
|
||||
GenericArgData, InferenceDiagnostic, Interner, ParamKind, QuantifiedWhereClause, Scalar,
|
||||
Substitution, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind,
|
||||
ValueTyDefId, WhereClause,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use nameres::diagnostics::DefDiagnosticKind;
|
||||
use once_cell::unsync::Lazy;
|
||||
use rustc_hash::FxHashSet;
|
||||
use stdx::{impl_from, never};
|
||||
use syntax::{
|
||||
|
@ -1592,53 +1591,46 @@ impl DefWithBody {
|
|||
}
|
||||
|
||||
for diag in source_map.diagnostics() {
|
||||
match diag {
|
||||
BodyDiagnostic::InactiveCode { node, cfg, opts } => acc.push(
|
||||
InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into(),
|
||||
),
|
||||
BodyDiagnostic::MacroError { node, message } => acc.push(
|
||||
MacroError {
|
||||
node: (*node).map(|it| it.into()),
|
||||
precise_location: None,
|
||||
message: message.to_string(),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
BodyDiagnostic::UnresolvedProcMacro { node, krate } => acc.push(
|
||||
UnresolvedProcMacro {
|
||||
node: (*node).map(|it| it.into()),
|
||||
precise_location: None,
|
||||
macro_name: None,
|
||||
kind: MacroKind::ProcMacro,
|
||||
krate: *krate,
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
BodyDiagnostic::UnresolvedMacroCall { node, path } => acc.push(
|
||||
UnresolvedMacroCall {
|
||||
macro_call: (*node).map(|ast_ptr| ast_ptr.into()),
|
||||
precise_location: None,
|
||||
path: path.clone(),
|
||||
is_bang: true,
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
acc.push(match diag {
|
||||
BodyDiagnostic::InactiveCode { node, cfg, opts } => {
|
||||
InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into()
|
||||
}
|
||||
BodyDiagnostic::MacroError { node, message } => MacroError {
|
||||
node: (*node).map(|it| it.into()),
|
||||
precise_location: None,
|
||||
message: message.to_string(),
|
||||
}
|
||||
.into(),
|
||||
BodyDiagnostic::UnresolvedProcMacro { node, krate } => UnresolvedProcMacro {
|
||||
node: (*node).map(|it| it.into()),
|
||||
precise_location: None,
|
||||
macro_name: None,
|
||||
kind: MacroKind::ProcMacro,
|
||||
krate: *krate,
|
||||
}
|
||||
.into(),
|
||||
BodyDiagnostic::UnresolvedMacroCall { node, path } => UnresolvedMacroCall {
|
||||
macro_call: (*node).map(|ast_ptr| ast_ptr.into()),
|
||||
precise_location: None,
|
||||
path: path.clone(),
|
||||
is_bang: true,
|
||||
}
|
||||
.into(),
|
||||
BodyDiagnostic::UnreachableLabel { node, name } => {
|
||||
acc.push(UnreachableLabel { node: *node, name: name.clone() }.into())
|
||||
UnreachableLabel { node: *node, name: name.clone() }.into()
|
||||
}
|
||||
BodyDiagnostic::UndeclaredLabel { node, name } => {
|
||||
acc.push(UndeclaredLabel { node: *node, name: name.clone() }.into())
|
||||
UndeclaredLabel { node: *node, name: name.clone() }.into()
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
let infer = db.infer(self.into());
|
||||
let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1);
|
||||
let expr_syntax = |expr| source_map.expr_syntax(expr).expect("unexpected synthetic");
|
||||
let pat_syntax = |pat| source_map.pat_syntax(pat).expect("unexpected synthetic");
|
||||
for d in &infer.diagnostics {
|
||||
match d {
|
||||
&hir_ty::InferenceDiagnostic::NoSuchField { field: expr, private } => {
|
||||
acc.push(match d {
|
||||
&InferenceDiagnostic::NoSuchField { field: expr, private } => {
|
||||
let expr_or_pat = match expr {
|
||||
ExprOrPatId::ExprId(expr) => {
|
||||
source_map.field_syntax(expr).map(AstPtr::wrap_left)
|
||||
|
@ -1647,57 +1639,48 @@ impl DefWithBody {
|
|||
source_map.pat_field_syntax(pat).map(AstPtr::wrap_right)
|
||||
}
|
||||
};
|
||||
acc.push(NoSuchField { field: expr_or_pat, private }.into())
|
||||
NoSuchField { field: expr_or_pat, private }.into()
|
||||
}
|
||||
&hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
|
||||
acc.push(
|
||||
MismatchedArgCount { call_expr: expr_syntax(call_expr), expected, found }
|
||||
.into(),
|
||||
)
|
||||
&InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
|
||||
MismatchedArgCount { call_expr: expr_syntax(call_expr), expected, found }.into()
|
||||
}
|
||||
&hir_ty::InferenceDiagnostic::PrivateField { expr, field } => {
|
||||
&InferenceDiagnostic::PrivateField { expr, field } => {
|
||||
let expr = expr_syntax(expr);
|
||||
let field = field.into();
|
||||
acc.push(PrivateField { expr, field }.into())
|
||||
PrivateField { expr, field }.into()
|
||||
}
|
||||
&hir_ty::InferenceDiagnostic::PrivateAssocItem { id, item } => {
|
||||
&InferenceDiagnostic::PrivateAssocItem { id, item } => {
|
||||
let expr_or_pat = match id {
|
||||
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
|
||||
ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
|
||||
};
|
||||
let item = item.into();
|
||||
acc.push(PrivateAssocItem { expr_or_pat, item }.into())
|
||||
PrivateAssocItem { expr_or_pat, item }.into()
|
||||
}
|
||||
hir_ty::InferenceDiagnostic::ExpectedFunction { call_expr, found } => {
|
||||
InferenceDiagnostic::ExpectedFunction { call_expr, found } => {
|
||||
let call_expr = expr_syntax(*call_expr);
|
||||
|
||||
acc.push(
|
||||
ExpectedFunction {
|
||||
call: call_expr,
|
||||
found: Type::new(db, DefWithBodyId::from(self), found.clone()),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
ExpectedFunction {
|
||||
call: call_expr,
|
||||
found: Type::new(db, DefWithBodyId::from(self), found.clone()),
|
||||
}
|
||||
.into()
|
||||
}
|
||||
hir_ty::InferenceDiagnostic::UnresolvedField {
|
||||
InferenceDiagnostic::UnresolvedField {
|
||||
expr,
|
||||
receiver,
|
||||
name,
|
||||
method_with_same_name_exists,
|
||||
} => {
|
||||
let expr = expr_syntax(*expr);
|
||||
|
||||
acc.push(
|
||||
UnresolvedField {
|
||||
expr,
|
||||
name: name.clone(),
|
||||
receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()),
|
||||
method_with_same_name_exists: *method_with_same_name_exists,
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
UnresolvedField {
|
||||
expr,
|
||||
name: name.clone(),
|
||||
receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()),
|
||||
method_with_same_name_exists: *method_with_same_name_exists,
|
||||
}
|
||||
.into()
|
||||
}
|
||||
hir_ty::InferenceDiagnostic::UnresolvedMethodCall {
|
||||
InferenceDiagnostic::UnresolvedMethodCall {
|
||||
expr,
|
||||
receiver,
|
||||
name,
|
||||
|
@ -1705,50 +1688,38 @@ impl DefWithBody {
|
|||
assoc_func_with_same_name,
|
||||
} => {
|
||||
let expr = expr_syntax(*expr);
|
||||
|
||||
acc.push(
|
||||
UnresolvedMethodCall {
|
||||
expr,
|
||||
name: name.clone(),
|
||||
receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()),
|
||||
field_with_same_name: field_with_same_name
|
||||
.clone()
|
||||
.map(|ty| Type::new(db, DefWithBodyId::from(self), ty)),
|
||||
assoc_func_with_same_name: *assoc_func_with_same_name,
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
UnresolvedMethodCall {
|
||||
expr,
|
||||
name: name.clone(),
|
||||
receiver: Type::new(db, DefWithBodyId::from(self), receiver.clone()),
|
||||
field_with_same_name: field_with_same_name
|
||||
.clone()
|
||||
.map(|ty| Type::new(db, DefWithBodyId::from(self), ty)),
|
||||
assoc_func_with_same_name: *assoc_func_with_same_name,
|
||||
}
|
||||
.into()
|
||||
}
|
||||
&hir_ty::InferenceDiagnostic::UnresolvedAssocItem { id } => {
|
||||
&InferenceDiagnostic::UnresolvedAssocItem { id } => {
|
||||
let expr_or_pat = match id {
|
||||
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
|
||||
ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
|
||||
};
|
||||
acc.push(UnresolvedAssocItem { expr_or_pat }.into())
|
||||
UnresolvedAssocItem { expr_or_pat }.into()
|
||||
}
|
||||
&hir_ty::InferenceDiagnostic::BreakOutsideOfLoop {
|
||||
expr,
|
||||
is_break,
|
||||
bad_value_break,
|
||||
} => {
|
||||
&InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break, bad_value_break } => {
|
||||
let expr = expr_syntax(expr);
|
||||
acc.push(BreakOutsideOfLoop { expr, is_break, bad_value_break }.into())
|
||||
BreakOutsideOfLoop { expr, is_break, bad_value_break }.into()
|
||||
}
|
||||
hir_ty::InferenceDiagnostic::TypedHole { expr, expected } => {
|
||||
InferenceDiagnostic::TypedHole { expr, expected } => {
|
||||
let expr = expr_syntax(*expr);
|
||||
acc.push(
|
||||
TypedHole {
|
||||
expr,
|
||||
expected: Type::new(db, DefWithBodyId::from(self), expected.clone()),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
|
||||
TypedHole {
|
||||
expr,
|
||||
expected: Type::new(db, DefWithBodyId::from(self), expected.clone()),
|
||||
}
|
||||
.into()
|
||||
}
|
||||
&hir_ty::InferenceDiagnostic::MismatchedTupleStructPatArgCount {
|
||||
pat,
|
||||
expected,
|
||||
found,
|
||||
} => {
|
||||
&InferenceDiagnostic::MismatchedTupleStructPatArgCount { pat, expected, found } => {
|
||||
let expr_or_pat = match pat {
|
||||
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
|
||||
ExprOrPatId::PatId(pat) => {
|
||||
|
@ -1762,11 +1733,9 @@ impl DefWithBody {
|
|||
InFile { file_id, value: ptr }
|
||||
}
|
||||
};
|
||||
acc.push(
|
||||
MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into(),
|
||||
)
|
||||
MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into()
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
for (pat_or_expr, mismatch) in infer.type_mismatches() {
|
||||
let expr_or_pat = match pat_or_expr {
|
||||
|
@ -1805,8 +1774,6 @@ impl DefWithBody {
|
|||
}
|
||||
}
|
||||
|
||||
let hir_body = db.body(self.into());
|
||||
|
||||
if let Ok(borrowck_results) = db.borrowck(self.into()) {
|
||||
for borrowck_result in borrowck_results.iter() {
|
||||
let mir_body = &borrowck_result.mir_body;
|
||||
|
@ -1828,7 +1795,7 @@ impl DefWithBody {
|
|||
)
|
||||
}
|
||||
let mol = &borrowck_result.mutability_of_locals;
|
||||
for (binding_id, binding_data) in hir_body.bindings.iter() {
|
||||
for (binding_id, binding_data) in body.bindings.iter() {
|
||||
if binding_data.problems.is_some() {
|
||||
// We should report specific diagnostics for these problems, not `need-mut` and `unused-mut`.
|
||||
continue;
|
||||
|
|
|
@ -116,11 +116,9 @@ trait AddRewrite {
|
|||
new: Vec<T>,
|
||||
target: TextRange,
|
||||
) -> Option<()>;
|
||||
fn yeet() {}
|
||||
}
|
||||
|
||||
impl AddRewrite for Assists {
|
||||
fn yeet() {}
|
||||
fn add_rewrite<T: AstNode>(
|
||||
&mut self,
|
||||
label: &str,
|
||||
|
|
|
@ -111,6 +111,8 @@ mod handlers {
|
|||
mod add_label_to_loop;
|
||||
mod add_lifetime_to_type;
|
||||
mod add_missing_impl_members;
|
||||
mod add_missing_match_arms;
|
||||
mod add_return_type;
|
||||
mod add_turbo_fish;
|
||||
mod apply_demorgan;
|
||||
mod auto_import;
|
||||
|
@ -124,15 +126,15 @@ mod handlers {
|
|||
mod convert_iter_for_each_to_for;
|
||||
mod convert_let_else_to_match;
|
||||
mod convert_match_to_let_else;
|
||||
mod convert_named_struct_to_tuple_struct;
|
||||
mod convert_nested_function_to_closure;
|
||||
mod convert_to_guarded_return;
|
||||
mod convert_tuple_return_type_to_struct;
|
||||
mod convert_tuple_struct_to_named_struct;
|
||||
mod convert_named_struct_to_tuple_struct;
|
||||
mod convert_to_guarded_return;
|
||||
mod convert_two_arm_bool_match_to_matches_macro;
|
||||
mod convert_while_to_loop;
|
||||
mod desugar_doc_comment;
|
||||
mod destructure_tuple_binding;
|
||||
mod desugar_doc_comment;
|
||||
mod expand_glob_import;
|
||||
mod extract_expressions_from_format_string;
|
||||
mod extract_function;
|
||||
|
@ -140,7 +142,6 @@ mod handlers {
|
|||
mod extract_struct_from_enum_variant;
|
||||
mod extract_type_alias;
|
||||
mod extract_variable;
|
||||
mod add_missing_match_arms;
|
||||
mod fix_visibility;
|
||||
mod flip_binexpr;
|
||||
mod flip_comma;
|
||||
|
@ -148,6 +149,7 @@ mod handlers {
|
|||
mod generate_constant;
|
||||
mod generate_default_from_enum_variant;
|
||||
mod generate_default_from_new;
|
||||
mod generate_delegate_methods;
|
||||
mod generate_delegate_trait;
|
||||
mod generate_deref;
|
||||
mod generate_derive;
|
||||
|
@ -162,62 +164,60 @@ mod handlers {
|
|||
mod generate_is_empty_from_len;
|
||||
mod generate_mut_trait_impl;
|
||||
mod generate_new;
|
||||
mod generate_delegate_methods;
|
||||
mod generate_trait_from_impl;
|
||||
mod add_return_type;
|
||||
mod inline_call;
|
||||
mod inline_const_as_literal;
|
||||
mod inline_local_variable;
|
||||
mod inline_macro;
|
||||
mod inline_type_alias;
|
||||
mod into_to_qualified_from;
|
||||
mod introduce_named_generic;
|
||||
mod introduce_named_lifetime;
|
||||
mod invert_if;
|
||||
mod merge_imports;
|
||||
mod merge_match_arms;
|
||||
mod merge_nested_if;
|
||||
mod move_bounds;
|
||||
mod move_const_to_impl;
|
||||
mod move_from_mod_rs;
|
||||
mod move_guard;
|
||||
mod move_module_to_file;
|
||||
mod move_to_mod_rs;
|
||||
mod move_from_mod_rs;
|
||||
mod number_representation;
|
||||
mod promote_local_to_const;
|
||||
mod pull_assignment_up;
|
||||
mod qualify_path;
|
||||
mod qualify_method_call;
|
||||
mod qualify_path;
|
||||
mod raw_string;
|
||||
mod remove_dbg;
|
||||
mod remove_mut;
|
||||
mod remove_parentheses;
|
||||
mod remove_unused_imports;
|
||||
mod remove_unused_param;
|
||||
mod remove_parentheses;
|
||||
mod reorder_fields;
|
||||
mod reorder_impl_items;
|
||||
mod replace_try_expr_with_match;
|
||||
mod replace_arith_op;
|
||||
mod replace_derive_with_manual_impl;
|
||||
mod replace_if_let_with_match;
|
||||
mod replace_is_method_with_if_let_method;
|
||||
mod replace_method_eager_lazy;
|
||||
mod replace_arith_op;
|
||||
mod introduce_named_generic;
|
||||
mod replace_let_with_if_let;
|
||||
mod replace_method_eager_lazy;
|
||||
mod replace_named_generic_with_impl;
|
||||
mod replace_qualified_name_with_use;
|
||||
mod replace_string_with_char;
|
||||
mod replace_try_expr_with_match;
|
||||
mod replace_turbofish_with_explicit_type;
|
||||
mod split_import;
|
||||
mod unmerge_match_arm;
|
||||
mod unwrap_tuple;
|
||||
mod sort_items;
|
||||
mod split_import;
|
||||
mod toggle_ignore;
|
||||
mod unmerge_match_arm;
|
||||
mod unmerge_use;
|
||||
mod unnecessary_async;
|
||||
mod unqualify_method_call;
|
||||
mod unwrap_block;
|
||||
mod unwrap_result_return_type;
|
||||
mod unqualify_method_call;
|
||||
mod unwrap_tuple;
|
||||
mod wrap_return_type_in_result;
|
||||
mod into_to_qualified_from;
|
||||
mod merge_nested_if;
|
||||
|
||||
pub(crate) fn all() -> &'static [Handler] {
|
||||
&[
|
||||
|
|
|
@ -23,8 +23,8 @@ use syntax::{
|
|||
|
||||
use crate::assist_context::{AssistContext, SourceChangeBuilder};
|
||||
|
||||
pub(crate) mod suggest_name;
|
||||
mod gen_trait_fn_body;
|
||||
pub(crate) mod suggest_name;
|
||||
|
||||
pub(crate) fn unwrap_trivial_block(block_expr: ast::BlockExpr) -> ast::Expr {
|
||||
extract_trivial_expression(&block_expr)
|
||||
|
|
|
@ -2,8 +2,10 @@
|
|||
|
||||
pub(crate) mod attribute;
|
||||
pub(crate) mod dot;
|
||||
pub(crate) mod env_vars;
|
||||
pub(crate) mod expr;
|
||||
pub(crate) mod extern_abi;
|
||||
pub(crate) mod extern_crate;
|
||||
pub(crate) mod field;
|
||||
pub(crate) mod flyimport;
|
||||
pub(crate) mod fn_param;
|
||||
|
@ -19,8 +21,6 @@ pub(crate) mod snippet;
|
|||
pub(crate) mod r#type;
|
||||
pub(crate) mod use_;
|
||||
pub(crate) mod vis;
|
||||
pub(crate) mod env_vars;
|
||||
pub(crate) mod extern_crate;
|
||||
|
||||
use std::iter;
|
||||
|
||||
|
|
|
@ -25,8 +25,8 @@ use crate::{
|
|||
mod cfg;
|
||||
mod derive;
|
||||
mod lint;
|
||||
mod repr;
|
||||
mod macro_use;
|
||||
mod repr;
|
||||
|
||||
pub(crate) use self::derive::complete_derive_path;
|
||||
|
||||
|
|
|
@ -8,9 +8,9 @@ mod context;
|
|||
mod item;
|
||||
mod render;
|
||||
|
||||
mod snippet;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
mod snippet;
|
||||
|
||||
use ide_db::{
|
||||
base_db::FilePosition,
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
//! `render` module provides utilities for rendering completion suggestions
|
||||
//! into code pieces that will be presented to user.
|
||||
|
||||
pub(crate) mod macro_;
|
||||
pub(crate) mod function;
|
||||
pub(crate) mod const_;
|
||||
pub(crate) mod function;
|
||||
pub(crate) mod literal;
|
||||
pub(crate) mod macro_;
|
||||
pub(crate) mod pattern;
|
||||
pub(crate) mod type_alias;
|
||||
pub(crate) mod variant;
|
||||
pub(crate) mod union_literal;
|
||||
pub(crate) mod literal;
|
||||
pub(crate) mod variant;
|
||||
|
||||
use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type};
|
||||
use ide_db::{
|
||||
|
|
|
@ -12,8 +12,8 @@ mod attribute;
|
|||
mod expression;
|
||||
mod flyimport;
|
||||
mod fn_param;
|
||||
mod item_list;
|
||||
mod item;
|
||||
mod item_list;
|
||||
mod pattern;
|
||||
mod predicate;
|
||||
mod proc_macros;
|
||||
|
|
|
@ -9,6 +9,7 @@ mod apply_change;
|
|||
pub mod active_parameter;
|
||||
pub mod assists;
|
||||
pub mod defs;
|
||||
pub mod documentation;
|
||||
pub mod famous_defs;
|
||||
pub mod helpers;
|
||||
pub mod items_locator;
|
||||
|
@ -22,7 +23,6 @@ pub mod symbol_index;
|
|||
pub mod traits;
|
||||
pub mod ty_filter;
|
||||
pub mod use_trivial_constructor;
|
||||
pub mod documentation;
|
||||
|
||||
pub mod imports {
|
||||
pub mod import_assets;
|
||||
|
@ -35,10 +35,10 @@ pub mod generated {
|
|||
}
|
||||
|
||||
pub mod syntax_helpers {
|
||||
pub mod node_ext;
|
||||
pub mod insert_whitespace_into_node;
|
||||
pub mod format_string;
|
||||
pub mod format_string_exprs;
|
||||
pub mod insert_whitespace_into_node;
|
||||
pub mod node_ext;
|
||||
|
||||
pub use parser::LexedStr;
|
||||
}
|
||||
|
@ -414,6 +414,6 @@ impl SnippetCap {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
mod sourcegen_lints;
|
||||
mod line_index;
|
||||
mod sourcegen_lints;
|
||||
}
|
||||
|
|
|
@ -3,10 +3,10 @@ use hir::InFile;
|
|||
use ide_db::base_db::FileRange;
|
||||
use syntax::{
|
||||
ast::{self, HasArgList},
|
||||
AstNode, SyntaxNodePtr,
|
||||
AstNode, AstPtr,
|
||||
};
|
||||
|
||||
use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
|
||||
use crate::{adjusted_display_range_new, Diagnostic, DiagnosticCode, DiagnosticsContext};
|
||||
|
||||
// Diagnostic: mismatched-tuple-struct-pat-arg-count
|
||||
//
|
||||
|
@ -24,7 +24,7 @@ pub(crate) fn mismatched_tuple_struct_pat_arg_count(
|
|||
Diagnostic::new(
|
||||
DiagnosticCode::RustcHardError("E0023"),
|
||||
message,
|
||||
invalid_args_range(ctx, d.expr_or_pat.map(Into::into), d.expected, d.found),
|
||||
invalid_args_range(ctx, d.expr_or_pat, d.expected, d.found),
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -40,17 +40,17 @@ pub(crate) fn mismatched_arg_count(
|
|||
Diagnostic::new(
|
||||
DiagnosticCode::RustcHardError("E0107"),
|
||||
message,
|
||||
invalid_args_range(ctx, d.call_expr.map(Into::into), d.expected, d.found),
|
||||
invalid_args_range(ctx, d.call_expr.map(AstPtr::wrap_left), d.expected, d.found),
|
||||
)
|
||||
}
|
||||
|
||||
fn invalid_args_range(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
source: InFile<SyntaxNodePtr>,
|
||||
source: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
|
||||
expected: usize,
|
||||
found: usize,
|
||||
) -> FileRange {
|
||||
adjusted_display_range::<Either<ast::Expr, ast::TupleStructPat>>(ctx, source, &|expr| {
|
||||
adjusted_display_range_new(ctx, source, &|expr| {
|
||||
let (text_range, r_paren_token, expected_arg) = match expr {
|
||||
Either::Left(ast::Expr::CallExpr(call)) => {
|
||||
let arg_list = call.arg_list()?;
|
||||
|
@ -68,7 +68,7 @@ fn invalid_args_range(
|
|||
arg_list.args().nth(expected).map(|it| it.syntax().text_range()),
|
||||
)
|
||||
}
|
||||
Either::Right(pat) => {
|
||||
Either::Right(ast::Pat::TupleStructPat(pat)) => {
|
||||
let r_paren = pat.r_paren_token()?;
|
||||
let l_paren = pat.l_paren_token()?;
|
||||
(
|
||||
|
|
|
@ -128,6 +128,36 @@ fn missing_record_expr_field_fixes(
|
|||
mod tests {
|
||||
use crate::tests::{check_diagnostics, check_fix, check_no_fix};
|
||||
|
||||
#[test]
|
||||
fn dont_work_for_field_with_disabled_cfg() {
|
||||
check_diagnostics(
|
||||
r#"
|
||||
struct Test {
|
||||
#[cfg(feature = "hello")]
|
||||
test: u32,
|
||||
other: u32
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let a = Test {
|
||||
#[cfg(feature = "hello")]
|
||||
test: 1,
|
||||
other: 1
|
||||
};
|
||||
|
||||
let Test {
|
||||
#[cfg(feature = "hello")]
|
||||
test,
|
||||
mut other,
|
||||
..
|
||||
} = a;
|
||||
|
||||
other += 1;
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_such_field_diagnostics() {
|
||||
check_diagnostics(
|
||||
|
|
|
@ -44,30 +44,30 @@ mod handlers {
|
|||
pub(crate) mod private_assoc_item;
|
||||
pub(crate) mod private_field;
|
||||
pub(crate) mod replace_filter_map_next_with_find_map;
|
||||
pub(crate) mod trait_impl_orphan;
|
||||
pub(crate) mod trait_impl_incorrect_safety;
|
||||
pub(crate) mod trait_impl_missing_assoc_item;
|
||||
pub(crate) mod trait_impl_orphan;
|
||||
pub(crate) mod trait_impl_redundant_assoc_item;
|
||||
pub(crate) mod typed_hole;
|
||||
pub(crate) mod type_mismatch;
|
||||
pub(crate) mod typed_hole;
|
||||
pub(crate) mod undeclared_label;
|
||||
pub(crate) mod unimplemented_builtin_macro;
|
||||
pub(crate) mod unreachable_label;
|
||||
pub(crate) mod unresolved_assoc_item;
|
||||
pub(crate) mod unresolved_extern_crate;
|
||||
pub(crate) mod unresolved_field;
|
||||
pub(crate) mod unresolved_method;
|
||||
pub(crate) mod unresolved_import;
|
||||
pub(crate) mod unresolved_macro_call;
|
||||
pub(crate) mod unresolved_method;
|
||||
pub(crate) mod unresolved_module;
|
||||
pub(crate) mod unresolved_proc_macro;
|
||||
pub(crate) mod undeclared_label;
|
||||
pub(crate) mod unreachable_label;
|
||||
pub(crate) mod unused_variables;
|
||||
|
||||
// The handlers below are unusual, the implement the diagnostics as well.
|
||||
pub(crate) mod field_shorthand;
|
||||
pub(crate) mod useless_braces;
|
||||
pub(crate) mod unlinked_file;
|
||||
pub(crate) mod json_is_not_rust;
|
||||
pub(crate) mod unlinked_file;
|
||||
pub(crate) mod useless_braces;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -69,11 +69,11 @@
|
|||
// // foo($a, $b) ==>> ($a).foo($b)
|
||||
// ```
|
||||
|
||||
mod fragments;
|
||||
mod from_comment;
|
||||
mod matching;
|
||||
mod nester;
|
||||
mod parsing;
|
||||
mod fragments;
|
||||
mod replacing;
|
||||
mod resolving;
|
||||
mod search;
|
||||
|
|
|
@ -25,13 +25,13 @@ mod bind_pat;
|
|||
mod binding_mode;
|
||||
mod chaining;
|
||||
mod closing_brace;
|
||||
mod closure_ret;
|
||||
mod closure_captures;
|
||||
mod closure_ret;
|
||||
mod discriminant;
|
||||
mod fn_lifetime_fn;
|
||||
mod implicit_drop;
|
||||
mod implicit_static;
|
||||
mod param_name;
|
||||
mod implicit_drop;
|
||||
mod range_exclusive;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
|
|
|
@ -21,24 +21,25 @@ macro_rules! eprintln {
|
|||
mod fixture;
|
||||
|
||||
mod markup;
|
||||
mod prime_caches;
|
||||
mod navigation_target;
|
||||
mod prime_caches;
|
||||
|
||||
mod annotations;
|
||||
mod call_hierarchy;
|
||||
mod signature_help;
|
||||
mod doc_links;
|
||||
mod highlight_related;
|
||||
mod expand_macro;
|
||||
mod extend_selection;
|
||||
mod fetch_crates;
|
||||
mod file_structure;
|
||||
mod folding_ranges;
|
||||
mod goto_declaration;
|
||||
mod goto_definition;
|
||||
mod goto_implementation;
|
||||
mod goto_type_definition;
|
||||
mod highlight_related;
|
||||
mod hover;
|
||||
mod inlay_hints;
|
||||
mod interpret_function;
|
||||
mod join_lines;
|
||||
mod markdown_remove;
|
||||
mod matching_brace;
|
||||
|
@ -48,6 +49,8 @@ mod parent_module;
|
|||
mod references;
|
||||
mod rename;
|
||||
mod runnables;
|
||||
mod shuffle_crate_graph;
|
||||
mod signature_help;
|
||||
mod ssr;
|
||||
mod static_index;
|
||||
mod status;
|
||||
|
@ -56,12 +59,9 @@ mod syntax_tree;
|
|||
mod typing;
|
||||
mod view_crate_graph;
|
||||
mod view_hir;
|
||||
mod view_mir;
|
||||
mod interpret_function;
|
||||
mod view_item_tree;
|
||||
mod shuffle_crate_graph;
|
||||
mod fetch_crates;
|
||||
mod view_memory_layout;
|
||||
mod view_mir;
|
||||
|
||||
use std::ffi::OsStr;
|
||||
|
||||
|
@ -79,7 +79,7 @@ use syntax::SourceFile;
|
|||
use triomphe::Arc;
|
||||
use view_memory_layout::{view_memory_layout, RecursiveMemoryLayout};
|
||||
|
||||
use crate::navigation_target::{ToNav, TryToNav};
|
||||
use crate::navigation_target::ToNav;
|
||||
|
||||
pub use crate::{
|
||||
annotations::{Annotation, AnnotationConfig, AnnotationKind, AnnotationLocation},
|
||||
|
@ -104,7 +104,7 @@ pub use crate::{
|
|||
SymbolInformationKind,
|
||||
},
|
||||
move_item::Direction,
|
||||
navigation_target::{NavigationTarget, UpmappingResult},
|
||||
navigation_target::{NavigationTarget, TryToNav, UpmappingResult},
|
||||
prime_caches::ParallelPrimeCachesProgress,
|
||||
references::ReferenceSearchResult,
|
||||
rename::RenameError,
|
||||
|
|
|
@ -76,7 +76,7 @@ pub(crate) trait ToNav {
|
|||
fn to_nav(&self, db: &RootDatabase) -> UpmappingResult<NavigationTarget>;
|
||||
}
|
||||
|
||||
pub(crate) trait TryToNav {
|
||||
pub trait TryToNav {
|
||||
fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>>;
|
||||
}
|
||||
|
||||
|
|
|
@ -3,11 +3,11 @@ pub(crate) mod tags;
|
|||
mod highlights;
|
||||
mod injector;
|
||||
|
||||
mod highlight;
|
||||
mod format;
|
||||
mod macro_;
|
||||
mod inject;
|
||||
mod escape;
|
||||
mod format;
|
||||
mod highlight;
|
||||
mod inject;
|
||||
mod macro_;
|
||||
|
||||
mod html;
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -8,11 +8,11 @@
|
|||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
mod parser;
|
||||
mod expander;
|
||||
mod parser;
|
||||
mod syntax_bridge;
|
||||
mod tt_iter;
|
||||
mod to_parser_input;
|
||||
mod tt_iter;
|
||||
|
||||
#[cfg(test)]
|
||||
mod benchmark;
|
||||
|
|
|
@ -234,7 +234,7 @@ where
|
|||
let mut stack = NonEmptyVec::new(entry);
|
||||
|
||||
while let Some((token, abs_range)) = conv.bump() {
|
||||
let tt::Subtree { delimiter, token_trees: result } = stack.last_mut();
|
||||
let tt::Subtree { delimiter, token_trees } = stack.last_mut();
|
||||
|
||||
let tt = match token.as_leaf() {
|
||||
Some(leaf) => tt::TokenTree::Leaf(leaf.clone()),
|
||||
|
@ -243,7 +243,7 @@ where
|
|||
COMMENT => {
|
||||
let span = conv.span_for(abs_range);
|
||||
if let Some(tokens) = conv.convert_doc_comment(&token, span) {
|
||||
result.extend(tokens);
|
||||
token_trees.extend(tokens);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
@ -317,7 +317,7 @@ where
|
|||
span: conv
|
||||
.span_for(TextRange::at(abs_range.start(), TextSize::of('\''))),
|
||||
});
|
||||
result.push(apostrophe.into());
|
||||
token_trees.push(apostrophe.into());
|
||||
|
||||
let ident = tt::Leaf::from(tt::Ident {
|
||||
text: SmolStr::new(&token.to_text(conv)[1..]),
|
||||
|
@ -326,7 +326,7 @@ where
|
|||
abs_range.end(),
|
||||
)),
|
||||
});
|
||||
result.push(ident.into());
|
||||
token_trees.push(ident.into());
|
||||
continue;
|
||||
}
|
||||
_ => continue,
|
||||
|
@ -337,7 +337,7 @@ where
|
|||
},
|
||||
};
|
||||
|
||||
result.push(tt);
|
||||
token_trees.push(tt);
|
||||
}
|
||||
|
||||
// If we get here, we've consumed all input tokens.
|
||||
|
|
|
@ -30,12 +30,12 @@
|
|||
|
||||
mod attributes;
|
||||
mod expressions;
|
||||
mod generic_args;
|
||||
mod generic_params;
|
||||
mod items;
|
||||
mod params;
|
||||
mod paths;
|
||||
mod patterns;
|
||||
mod generic_args;
|
||||
mod generic_params;
|
||||
mod types;
|
||||
|
||||
use crate::{
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
mod consts;
|
||||
mod adt;
|
||||
mod consts;
|
||||
mod traits;
|
||||
mod use_item;
|
||||
|
||||
|
|
|
@ -26,15 +26,15 @@ extern crate ra_ap_rustc_lexer as rustc_lexer;
|
|||
#[cfg(feature = "in-rust-tree")]
|
||||
extern crate rustc_lexer;
|
||||
|
||||
mod lexed_str;
|
||||
mod token_set;
|
||||
mod syntax_kind;
|
||||
mod event;
|
||||
mod parser;
|
||||
mod grammar;
|
||||
mod input;
|
||||
mod lexed_str;
|
||||
mod output;
|
||||
mod parser;
|
||||
mod shortcuts;
|
||||
mod syntax_kind;
|
||||
mod token_set;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
mod prefix_entries;
|
||||
mod sourcegen_inline_tests;
|
||||
mod top_entries;
|
||||
mod prefix_entries;
|
||||
|
||||
use std::{
|
||||
fmt::Write,
|
||||
|
|
|
@ -100,7 +100,8 @@ fn main() {
|
|||
.current_dir(&staging_dir)
|
||||
.args(["pkgid", name])
|
||||
.output()
|
||||
.unwrap().stdout,
|
||||
.unwrap()
|
||||
.stdout,
|
||||
)
|
||||
.unwrap();
|
||||
let pkgid = pkgid.trim();
|
||||
|
@ -109,7 +110,8 @@ fn main() {
|
|||
for message in Message::parse_stream(output.stdout.as_slice()) {
|
||||
if let Message::CompilerArtifact(artifact) = message.unwrap() {
|
||||
if artifact.target.kind.contains(&"proc-macro".to_string()) {
|
||||
if artifact.package_id.repr.starts_with(&repr) || artifact.package_id.repr == pkgid {
|
||||
if artifact.package_id.repr.starts_with(&repr) || artifact.package_id.repr == pkgid
|
||||
{
|
||||
artifact_path = Some(PathBuf::from(&artifact.filenames[0]));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,8 +21,8 @@ extern crate proc_macro;
|
|||
extern crate rustc_driver as _;
|
||||
|
||||
mod dylib;
|
||||
mod server;
|
||||
mod proc_macros;
|
||||
mod server;
|
||||
|
||||
use std::{
|
||||
collections::{hash_map::Entry, HashMap},
|
||||
|
|
|
@ -13,9 +13,9 @@ use proc_macro::bridge;
|
|||
mod token_stream;
|
||||
pub use token_stream::TokenStream;
|
||||
|
||||
pub mod token_id;
|
||||
pub mod rust_analyzer_span;
|
||||
mod symbol;
|
||||
pub mod token_id;
|
||||
pub use symbol::*;
|
||||
use tt::Spacing;
|
||||
|
||||
|
|
|
@ -206,7 +206,7 @@ impl server::TokenStream for TokenIdServer {
|
|||
stream: if subtree.token_trees.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(subtree.token_trees.into_iter().collect())
|
||||
Some(TokenStream { token_trees: subtree.token_trees })
|
||||
},
|
||||
span: bridge::DelimSpan::from_single(subtree.delimiter.open),
|
||||
}),
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
mod stop_watch;
|
||||
mod memory_usage;
|
||||
#[cfg(feature = "cpu_profiler")]
|
||||
mod google_cpu_profiler;
|
||||
mod hprof;
|
||||
mod memory_usage;
|
||||
mod stop_watch;
|
||||
mod tree;
|
||||
|
||||
use std::cell::RefCell;
|
||||
|
|
|
@ -17,15 +17,15 @@
|
|||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
mod manifest_path;
|
||||
mod build_scripts;
|
||||
mod cargo_workspace;
|
||||
mod cfg_flag;
|
||||
mod manifest_path;
|
||||
mod project_json;
|
||||
mod sysroot;
|
||||
mod workspace;
|
||||
mod rustc_cfg;
|
||||
mod build_scripts;
|
||||
mod sysroot;
|
||||
pub mod target_data_layout;
|
||||
mod workspace;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
//! Various batch processing tasks, intended primarily for debugging.
|
||||
|
||||
pub mod flags;
|
||||
mod parse;
|
||||
mod symbols;
|
||||
mod highlight;
|
||||
mod analysis_stats;
|
||||
mod diagnostics;
|
||||
mod ssr;
|
||||
pub mod flags;
|
||||
mod highlight;
|
||||
mod lsif;
|
||||
mod scip;
|
||||
mod parse;
|
||||
mod run_tests;
|
||||
mod rustc_tests;
|
||||
mod scip;
|
||||
mod ssr;
|
||||
mod symbols;
|
||||
|
||||
mod progress_report;
|
||||
|
||||
|
|
|
@ -1319,6 +1319,9 @@ pub(crate) fn handle_code_lens_resolve(
|
|||
snap: GlobalStateSnapshot,
|
||||
code_lens: CodeLens,
|
||||
) -> anyhow::Result<CodeLens> {
|
||||
if code_lens.data.is_none() {
|
||||
return Ok(code_lens);
|
||||
}
|
||||
let Some(annotation) = from_proto::annotation(&snap, code_lens.clone())? else {
|
||||
return Ok(code_lens);
|
||||
};
|
||||
|
@ -1327,13 +1330,14 @@ pub(crate) fn handle_code_lens_resolve(
|
|||
let mut acc = Vec::new();
|
||||
to_proto::code_lens(&mut acc, &snap, annotation)?;
|
||||
|
||||
let res = match acc.pop() {
|
||||
let mut res = match acc.pop() {
|
||||
Some(it) if acc.is_empty() => it,
|
||||
_ => {
|
||||
never!();
|
||||
code_lens
|
||||
}
|
||||
};
|
||||
res.data = None;
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||
|
||||
use core::fmt;
|
||||
|
||||
pub(crate) mod utils;
|
||||
pub(crate) mod semantic_tokens;
|
||||
pub mod ext;
|
||||
pub(crate) mod from_proto;
|
||||
pub(crate) mod semantic_tokens;
|
||||
pub(crate) mod to_proto;
|
||||
pub(crate) mod utils;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LspError {
|
||||
|
|
|
@ -154,6 +154,7 @@ fn check_licenses() {
|
|||
Apache-2.0
|
||||
Apache-2.0 OR BSL-1.0
|
||||
Apache-2.0 OR MIT
|
||||
Apache-2.0 WITH LLVM-exception
|
||||
Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT
|
||||
Apache-2.0/MIT
|
||||
BSD-3-Clause
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
//! File and span related types.
|
||||
// FIXME: This should be moved into its own crate to get rid of the dependency inversion, base-db
|
||||
// has business depending on tt, tt should depend on a span crate only (which unforunately will have
|
||||
// to depend on salsa)
|
||||
use std::fmt::{self, Write};
|
||||
|
||||
use salsa::InternId;
|
||||
|
|
|
@ -6,13 +6,13 @@ use std::io as sio;
|
|||
use std::process::Command;
|
||||
use std::{cmp::Ordering, ops, time::Instant};
|
||||
|
||||
pub mod anymap;
|
||||
mod macros;
|
||||
pub mod process;
|
||||
pub mod panic_context;
|
||||
pub mod non_empty_vec;
|
||||
pub mod panic_context;
|
||||
pub mod process;
|
||||
pub mod rand;
|
||||
pub mod thread;
|
||||
pub mod anymap;
|
||||
|
||||
pub use always_assert::{always, never};
|
||||
pub use itertools;
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
//! Abstract Syntax Tree, layered on top of untyped `SyntaxNode`s
|
||||
|
||||
mod generated;
|
||||
mod traits;
|
||||
mod token_ext;
|
||||
mod node_ext;
|
||||
mod expr_ext;
|
||||
mod operators;
|
||||
pub mod edit;
|
||||
pub mod edit_in_place;
|
||||
mod expr_ext;
|
||||
mod generated;
|
||||
pub mod make;
|
||||
mod node_ext;
|
||||
mod operators;
|
||||
pub mod prec;
|
||||
mod token_ext;
|
||||
mod traits;
|
||||
|
||||
use std::marker::PhantomData;
|
||||
|
||||
|
|
|
@ -32,22 +32,22 @@ macro_rules! eprintln {
|
|||
($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
|
||||
}
|
||||
|
||||
mod syntax_node;
|
||||
mod syntax_error;
|
||||
mod parsing;
|
||||
mod validation;
|
||||
mod ptr;
|
||||
mod token_text;
|
||||
mod syntax_error;
|
||||
mod syntax_node;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
mod token_text;
|
||||
mod validation;
|
||||
|
||||
pub mod algo;
|
||||
pub mod ast;
|
||||
#[doc(hidden)]
|
||||
pub mod fuzz;
|
||||
pub mod utils;
|
||||
pub mod ted;
|
||||
pub mod hacks;
|
||||
pub mod ted;
|
||||
pub mod utils;
|
||||
|
||||
use std::marker::PhantomData;
|
||||
|
||||
|
|
|
@ -6,7 +6,9 @@
|
|||
"brackets": [
|
||||
["{", "}"],
|
||||
["[", "]"],
|
||||
["(", ")"]
|
||||
["(", ")"],
|
||||
["#[", "]"],
|
||||
["#![", "]"]
|
||||
],
|
||||
"colorizedBracketPairs": [
|
||||
["{", "}"],
|
||||
|
@ -17,6 +19,8 @@
|
|||
{ "open": "{", "close": "}" },
|
||||
{ "open": "[", "close": "]" },
|
||||
{ "open": "(", "close": ")" },
|
||||
{ "open": "#[", "close": "]" },
|
||||
{ "open": "#![", "close": "]" },
|
||||
{ "open": "\"", "close": "\"", "notIn": ["string"] },
|
||||
{ "open": "/*", "close": " */" },
|
||||
{ "open": "`", "close": "`", "notIn": ["string"] }
|
||||
|
|
|
@ -6,11 +6,11 @@
|
|||
|
||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||
|
||||
mod msg;
|
||||
mod stdio;
|
||||
mod error;
|
||||
mod socket;
|
||||
mod msg;
|
||||
mod req_queue;
|
||||
mod socket;
|
||||
mod stdio;
|
||||
|
||||
use std::{
|
||||
io,
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
reorder_modules = false
|
||||
reorder_modules = true
|
||||
use_small_heuristics = "Max"
|
||||
|
|
|
@ -12,11 +12,11 @@
|
|||
|
||||
mod flags;
|
||||
|
||||
mod install;
|
||||
mod release;
|
||||
mod dist;
|
||||
mod publish;
|
||||
mod install;
|
||||
mod metrics;
|
||||
mod publish;
|
||||
mod release;
|
||||
|
||||
use anyhow::bail;
|
||||
use std::{
|
||||
|
|
Loading…
Reference in a new issue