Merge commit '4f55ebbd4' into HEAD

This commit is contained in:
Amos Wenger 2022-11-25 13:03:10 +01:00
commit e6540cff74
125 changed files with 2895 additions and 1103 deletions

30
Cargo.lock generated
View file

@ -863,7 +863,7 @@ dependencies = [
[[package]]
name = "lsp-server"
version = "0.6.0"
version = "0.7.0"
dependencies = [
"crossbeam-channel",
"log",
@ -1190,9 +1190,9 @@ version = "0.0.0"
[[package]]
name = "proc-macro2"
version = "1.0.43"
version = "1.0.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab"
checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b"
dependencies = [
"unicode-ident",
]
@ -1502,18 +1502,18 @@ dependencies = [
[[package]]
name = "serde"
version = "1.0.143"
version = "1.0.144"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53e8e5d5b70924f74ff5c6d64d9a5acd91422117c60f48c4e07855238a254553"
checksum = "0f747710de3dcd43b88c9168773254e809d8ddbdf9653b84e2554ab219f17860"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.143"
version = "1.0.144"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3d8e8de557aee63c26b85b947f5e59b690d0454c753f3adeb5cd7835ab88391"
checksum = "94ed3a816fb1d101812f83e789f888322c34e291f894f19590dc310963e87a00"
dependencies = [
"proc-macro2",
"quote",
@ -1522,9 +1522,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.83"
version = "1.0.85"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38dd04e3c8279e75b31ef29dbdceebfe5ad89f4d0937213c53f7d49d01b3d5a7"
checksum = "e55a28e3aaef9d5ce0506d0a14dbba8054ddc7e499ef522dd8b26859ec9d4a44"
dependencies = [
"indexmap",
"itoa",
@ -1593,9 +1593,9 @@ dependencies = [
[[package]]
name = "syn"
version = "1.0.99"
version = "1.0.102"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13"
checksum = "3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1"
dependencies = [
"proc-macro2",
"quote",
@ -2082,18 +2082,18 @@ checksum = "06069a848f95fceae3e5e03c0ddc8cb78452b56654ee0c8e68f938cf790fb9e3"
[[package]]
name = "xflags"
version = "0.2.4"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f14fe1ed41a5a2b5ef3f565586c4a8a559ee55d3953faab360a771135bdee00"
checksum = "cbf19f5031a1a812e96fede16f8161218883079946cea87619d3613db1efd268"
dependencies = [
"xflags-macros",
]
[[package]]
name = "xflags-macros"
version = "0.2.4"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45d11d5fc2a97287eded8b170ca80533b3c42646dd7fa386a5eb045817921022"
checksum = "2afbd7f2039bb6cad2dd45f0c5dff49c0d4e26118398768b7a605524d4251809"
[[package]]
name = "xshell"

View file

@ -196,7 +196,7 @@ impl ChangeFixture {
Env::default(),
Ok(Vec::new()),
false,
CrateOrigin::CratesIo { repo: None },
CrateOrigin::CratesIo { repo: None, name: None },
);
} else {
for (from, to, prelude) in crate_deps {
@ -270,7 +270,7 @@ impl ChangeFixture {
Env::default(),
Ok(proc_macro),
true,
CrateOrigin::CratesIo { repo: None },
CrateOrigin::CratesIo { repo: None, name: None },
);
for krate in all_crates {
@ -398,7 +398,7 @@ fn parse_crate(crate_str: String) -> (String, CrateOrigin, Option<String>) {
let (version, origin) = match b.split_once(':') {
Some(("CratesIo", data)) => match data.split_once(',') {
Some((version, url)) => {
(version, CrateOrigin::CratesIo { repo: Some(url.to_owned()) })
(version, CrateOrigin::CratesIo { repo: Some(url.to_owned()), name: None })
}
_ => panic!("Bad crates.io parameter: {}", data),
},
@ -409,7 +409,7 @@ fn parse_crate(crate_str: String) -> (String, CrateOrigin, Option<String>) {
let crate_origin = match &*crate_str {
"std" => CrateOrigin::Lang(LangCrateOrigin::Std),
"core" => CrateOrigin::Lang(LangCrateOrigin::Core),
_ => CrateOrigin::CratesIo { repo: None },
_ => CrateOrigin::CratesIo { repo: None, name: None },
};
(crate_str, crate_origin, None)
}

View file

@ -136,7 +136,7 @@ impl ops::Deref for CrateName {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum CrateOrigin {
/// Crates that are from crates.io official registry,
CratesIo { repo: Option<String> },
CratesIo { repo: Option<String>, name: Option<String> },
/// Crates that are provided by the language, like std, core, proc-macro, ...
Lang(LangCrateOrigin),
}
@ -648,7 +648,7 @@ mod tests {
Env::default(),
Ok(Vec::new()),
false,
CrateOrigin::CratesIo { repo: None },
CrateOrigin::CratesIo { repo: None, name: None },
);
let crate2 = graph.add_crate_root(
FileId(2u32),
@ -660,7 +660,7 @@ mod tests {
Env::default(),
Ok(Vec::new()),
false,
CrateOrigin::CratesIo { repo: None },
CrateOrigin::CratesIo { repo: None, name: None },
);
let crate3 = graph.add_crate_root(
FileId(3u32),
@ -672,7 +672,7 @@ mod tests {
Env::default(),
Ok(Vec::new()),
false,
CrateOrigin::CratesIo { repo: None },
CrateOrigin::CratesIo { repo: None, name: None },
);
assert!(graph
.add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
@ -698,7 +698,7 @@ mod tests {
Env::default(),
Ok(Vec::new()),
false,
CrateOrigin::CratesIo { repo: None },
CrateOrigin::CratesIo { repo: None, name: None },
);
let crate2 = graph.add_crate_root(
FileId(2u32),
@ -710,7 +710,7 @@ mod tests {
Env::default(),
Ok(Vec::new()),
false,
CrateOrigin::CratesIo { repo: None },
CrateOrigin::CratesIo { repo: None, name: None },
);
assert!(graph
.add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
@ -733,7 +733,7 @@ mod tests {
Env::default(),
Ok(Vec::new()),
false,
CrateOrigin::CratesIo { repo: None },
CrateOrigin::CratesIo { repo: None, name: None },
);
let crate2 = graph.add_crate_root(
FileId(2u32),
@ -745,7 +745,7 @@ mod tests {
Env::default(),
Ok(Vec::new()),
false,
CrateOrigin::CratesIo { repo: None },
CrateOrigin::CratesIo { repo: None, name: None },
);
let crate3 = graph.add_crate_root(
FileId(3u32),
@ -757,7 +757,7 @@ mod tests {
Env::default(),
Ok(Vec::new()),
false,
CrateOrigin::CratesIo { repo: None },
CrateOrigin::CratesIo { repo: None, name: None },
);
assert!(graph
.add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
@ -780,7 +780,7 @@ mod tests {
Env::default(),
Ok(Vec::new()),
false,
CrateOrigin::CratesIo { repo: None },
CrateOrigin::CratesIo { repo: None, name: None },
);
let crate2 = graph.add_crate_root(
FileId(2u32),
@ -792,7 +792,7 @@ mod tests {
Env::default(),
Ok(Vec::new()),
false,
CrateOrigin::CratesIo { repo: None },
CrateOrigin::CratesIo { repo: None, name: None },
);
assert!(graph
.add_dep(

View file

@ -169,13 +169,17 @@ impl FlycheckActor {
}
fn next_event(&self, inbox: &Receiver<Restart>) -> Option<Event> {
let check_chan = self.cargo_handle.as_ref().map(|cargo| &cargo.receiver);
if let Ok(msg) = inbox.try_recv() {
// give restarts a preference so check outputs don't block a restart or stop
return Some(Event::Restart(msg));
}
select! {
recv(inbox) -> msg => msg.ok().map(Event::Restart),
recv(check_chan.unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())),
}
}
fn run(mut self, inbox: Receiver<Restart>) {
while let Some(event) = self.next_event(&inbox) {
'event: while let Some(event) = self.next_event(&inbox) {
match event {
Event::Restart(Restart::No) => {
self.cancel_check_process();
@ -183,7 +187,12 @@ impl FlycheckActor {
Event::Restart(Restart::Yes) => {
// Cancel the previously spawned process
self.cancel_check_process();
while let Ok(_) = inbox.recv_timeout(Duration::from_millis(50)) {}
while let Ok(restart) = inbox.recv_timeout(Duration::from_millis(50)) {
// restart chained with a stop, so just cancel
if let Restart::No = restart {
continue 'event;
}
}
let command = self.check_command();
tracing::debug!(?command, "will restart flycheck");

View file

@ -1,12 +1,12 @@
//! Defines hir-level representation of structs, enums and unions
use std::sync::Arc;
use std::{num::NonZeroU32, sync::Arc};
use base_db::CrateId;
use either::Either;
use hir_expand::{
name::{AsName, Name},
InFile,
HirFileId, InFile,
};
use la_arena::{Arena, ArenaMap};
use syntax::ast::{self, HasName, HasVisibility};
@ -14,15 +14,18 @@ use tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree};
use crate::{
body::{CfgExpander, LowerCtx},
builtin_type::{BuiltinInt, BuiltinUint},
db::DefDatabase,
intern::Interned,
item_tree::{AttrOwner, Field, Fields, ItemTree, ModItem, RawVisibilityId},
item_tree::{AttrOwner, Field, FieldAstId, Fields, ItemTree, ModItem, RawVisibilityId},
nameres::diagnostics::DefDiagnostic,
src::HasChildSource,
src::HasSource,
trace::Trace,
type_ref::TypeRef,
visibility::RawVisibility,
EnumId, LocalEnumVariantId, LocalFieldId, Lookup, ModuleId, StructId, UnionId, VariantId,
EnumId, LocalEnumVariantId, LocalFieldId, LocalModuleId, Lookup, ModuleId, StructId, UnionId,
VariantId,
};
use cfg::CfgOptions;
@ -31,7 +34,7 @@ use cfg::CfgOptions;
pub struct StructData {
pub name: Name,
pub variant_data: Arc<VariantData>,
pub repr: Option<ReprKind>,
pub repr: Option<ReprData>,
pub visibility: RawVisibility,
}
@ -39,6 +42,7 @@ pub struct StructData {
pub struct EnumData {
pub name: Name,
pub variants: Arena<EnumVariantData>,
pub repr: Option<ReprData>,
pub visibility: RawVisibility,
}
@ -63,10 +67,19 @@ pub struct FieldData {
pub visibility: RawVisibility,
}
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Copy, Debug, Clone, PartialEq, Eq)]
pub enum ReprKind {
Packed,
Other,
C,
BuiltinInt { builtin: Either<BuiltinInt, BuiltinUint>, is_c: bool },
Transparent,
Default,
}
#[derive(Copy, Debug, Clone, PartialEq, Eq)]
pub struct ReprData {
pub kind: ReprKind,
pub packed: bool,
pub align: Option<NonZeroU32>,
}
fn repr_from_value(
@ -74,25 +87,71 @@ fn repr_from_value(
krate: CrateId,
item_tree: &ItemTree,
of: AttrOwner,
) -> Option<ReprKind> {
) -> Option<ReprData> {
item_tree.attrs(db, krate, of).by_key("repr").tt_values().find_map(parse_repr_tt)
}
fn parse_repr_tt(tt: &Subtree) -> Option<ReprKind> {
fn parse_repr_tt(tt: &Subtree) -> Option<ReprData> {
match tt.delimiter {
Some(Delimiter { kind: DelimiterKind::Parenthesis, .. }) => {}
_ => return None,
}
let mut it = tt.token_trees.iter();
match it.next()? {
TokenTree::Leaf(Leaf::Ident(ident)) if ident.text == "packed" => Some(ReprKind::Packed),
_ => Some(ReprKind::Other),
let mut data = ReprData { kind: ReprKind::Default, packed: false, align: None };
let mut tts = tt.token_trees.iter().peekable();
while let Some(tt) = tts.next() {
if let TokenTree::Leaf(Leaf::Ident(ident)) = tt {
match &*ident.text {
"packed" => {
data.packed = true;
if let Some(TokenTree::Subtree(_)) = tts.peek() {
tts.next();
}
}
"align" => {
if let Some(TokenTree::Subtree(tt)) = tts.peek() {
tts.next();
if let Some(TokenTree::Leaf(Leaf::Literal(lit))) = tt.token_trees.first() {
if let Ok(align) = lit.text.parse() {
data.align = Some(align);
}
}
}
}
"C" => {
if let ReprKind::BuiltinInt { is_c, .. } = &mut data.kind {
*is_c = true;
} else {
data.kind = ReprKind::C;
}
}
"transparent" => data.kind = ReprKind::Transparent,
repr => {
let is_c = matches!(data.kind, ReprKind::C);
if let Some(builtin) = BuiltinInt::from_suffix(repr)
.map(Either::Left)
.or_else(|| BuiltinUint::from_suffix(repr).map(Either::Right))
{
data.kind = ReprKind::BuiltinInt { builtin, is_c };
}
}
}
}
}
Some(data)
}
impl StructData {
pub(crate) fn struct_data_query(db: &dyn DefDatabase, id: StructId) -> Arc<StructData> {
db.struct_data_with_diagnostics(id).0
}
pub(crate) fn struct_data_with_diagnostics_query(
db: &dyn DefDatabase,
id: StructId,
) -> (Arc<StructData>, Arc<[DefDiagnostic]>) {
let loc = id.lookup(db);
let krate = loc.container.krate;
let item_tree = loc.id.item_tree(db);
@ -100,15 +159,35 @@ impl StructData {
let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone();
let strukt = &item_tree[loc.id.value];
let variant_data = lower_fields(db, krate, &item_tree, &cfg_options, &strukt.fields, None);
Arc::new(StructData {
name: strukt.name.clone(),
variant_data: Arc::new(variant_data),
repr,
visibility: item_tree[strukt.visibility].clone(),
})
let (variant_data, diagnostics) = lower_fields(
db,
krate,
loc.id.file_id(),
loc.container.local_id,
&item_tree,
&cfg_options,
&strukt.fields,
None,
);
(
Arc::new(StructData {
name: strukt.name.clone(),
variant_data: Arc::new(variant_data),
repr,
visibility: item_tree[strukt.visibility].clone(),
}),
diagnostics.into(),
)
}
pub(crate) fn union_data_query(db: &dyn DefDatabase, id: UnionId) -> Arc<StructData> {
db.union_data_with_diagnostics(id).0
}
pub(crate) fn union_data_with_diagnostics_query(
db: &dyn DefDatabase,
id: UnionId,
) -> (Arc<StructData>, Arc<[DefDiagnostic]>) {
let loc = id.lookup(db);
let krate = loc.container.krate;
let item_tree = loc.id.item_tree(db);
@ -116,56 +195,98 @@ impl StructData {
let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone();
let union = &item_tree[loc.id.value];
let variant_data = lower_fields(db, krate, &item_tree, &cfg_options, &union.fields, None);
Arc::new(StructData {
name: union.name.clone(),
variant_data: Arc::new(variant_data),
repr,
visibility: item_tree[union.visibility].clone(),
})
let (variant_data, diagnostics) = lower_fields(
db,
krate,
loc.id.file_id(),
loc.container.local_id,
&item_tree,
&cfg_options,
&union.fields,
None,
);
(
Arc::new(StructData {
name: union.name.clone(),
variant_data: Arc::new(variant_data),
repr,
visibility: item_tree[union.visibility].clone(),
}),
diagnostics.into(),
)
}
}
impl EnumData {
pub(crate) fn enum_data_query(db: &dyn DefDatabase, e: EnumId) -> Arc<EnumData> {
db.enum_data_with_diagnostics(e).0
}
pub(crate) fn enum_data_with_diagnostics_query(
db: &dyn DefDatabase,
e: EnumId,
) -> (Arc<EnumData>, Arc<[DefDiagnostic]>) {
let loc = e.lookup(db);
let krate = loc.container.krate;
let item_tree = loc.id.item_tree(db);
let cfg_options = db.crate_graph()[krate].cfg_options.clone();
let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
let enum_ = &item_tree[loc.id.value];
let mut variants = Arena::new();
let mut diagnostics = Vec::new();
for tree_id in enum_.variants.clone() {
if item_tree.attrs(db, krate, tree_id.into()).is_cfg_enabled(&cfg_options) {
let var = &item_tree[tree_id];
let var_data = lower_fields(
let attrs = item_tree.attrs(db, krate, tree_id.into());
let var = &item_tree[tree_id];
if attrs.is_cfg_enabled(&cfg_options) {
let (var_data, field_diagnostics) = lower_fields(
db,
krate,
loc.id.file_id(),
loc.container.local_id,
&item_tree,
&cfg_options,
&var.fields,
Some(enum_.visibility),
);
diagnostics.extend(field_diagnostics);
variants.alloc(EnumVariantData {
name: var.name.clone(),
variant_data: Arc::new(var_data),
});
} else {
diagnostics.push(DefDiagnostic::unconfigured_code(
loc.container.local_id,
InFile::new(loc.id.file_id(), var.ast_id.upcast()),
attrs.cfg().unwrap(),
cfg_options.clone(),
))
}
}
Arc::new(EnumData {
name: enum_.name.clone(),
variants,
visibility: item_tree[enum_.visibility].clone(),
})
(
Arc::new(EnumData {
name: enum_.name.clone(),
variants,
repr,
visibility: item_tree[enum_.visibility].clone(),
}),
diagnostics.into(),
)
}
pub fn variant(&self, name: &Name) -> Option<LocalEnumVariantId> {
let (id, _) = self.variants.iter().find(|(_id, data)| &data.name == name)?;
Some(id)
}
pub fn variant_body_type(&self) -> Either<BuiltinInt, BuiltinUint> {
match self.repr {
Some(ReprData { kind: ReprKind::BuiltinInt { builtin, .. }, .. }) => builtin,
_ => Either::Left(BuiltinInt::Isize),
}
}
}
impl HasChildSource<LocalEnumVariantId> for EnumId {
@ -324,31 +445,64 @@ fn lower_struct(
fn lower_fields(
db: &dyn DefDatabase,
krate: CrateId,
current_file_id: HirFileId,
container: LocalModuleId,
item_tree: &ItemTree,
cfg_options: &CfgOptions,
fields: &Fields,
override_visibility: Option<RawVisibilityId>,
) -> VariantData {
) -> (VariantData, Vec<DefDiagnostic>) {
let mut diagnostics = Vec::new();
match fields {
Fields::Record(flds) => {
let mut arena = Arena::new();
for field_id in flds.clone() {
if item_tree.attrs(db, krate, field_id.into()).is_cfg_enabled(cfg_options) {
arena.alloc(lower_field(item_tree, &item_tree[field_id], override_visibility));
let attrs = item_tree.attrs(db, krate, field_id.into());
let field = &item_tree[field_id];
if attrs.is_cfg_enabled(cfg_options) {
arena.alloc(lower_field(item_tree, field, override_visibility));
} else {
diagnostics.push(DefDiagnostic::unconfigured_code(
container,
InFile::new(
current_file_id,
match field.ast_id {
FieldAstId::Record(it) => it.upcast(),
FieldAstId::Tuple(it) => it.upcast(),
},
),
attrs.cfg().unwrap(),
cfg_options.clone(),
))
}
}
VariantData::Record(arena)
(VariantData::Record(arena), diagnostics)
}
Fields::Tuple(flds) => {
let mut arena = Arena::new();
for field_id in flds.clone() {
if item_tree.attrs(db, krate, field_id.into()).is_cfg_enabled(cfg_options) {
arena.alloc(lower_field(item_tree, &item_tree[field_id], override_visibility));
let attrs = item_tree.attrs(db, krate, field_id.into());
let field = &item_tree[field_id];
if attrs.is_cfg_enabled(cfg_options) {
arena.alloc(lower_field(item_tree, field, override_visibility));
} else {
diagnostics.push(DefDiagnostic::unconfigured_code(
container,
InFile::new(
current_file_id,
match field.ast_id {
FieldAstId::Record(it) => it.upcast(),
FieldAstId::Tuple(it) => it.upcast(),
},
),
attrs.cfg().unwrap(),
cfg_options.clone(),
))
}
}
VariantData::Tuple(arena)
(VariantData::Tuple(arena), diagnostics)
}
Fields::Unit => VariantData::Unit,
Fields::Unit => (VariantData::Unit, diagnostics),
}
}

View file

@ -27,7 +27,7 @@ use crate::{
macro_id_to_def_id,
nameres::DefMap,
path::{ModPath, Path},
src::HasSource,
src::{HasChildSource, HasSource},
AsMacroCall, BlockId, DefWithBodyId, HasModule, LocalModuleId, Lookup, MacroId, ModuleId,
UnresolvedMacro,
};
@ -324,6 +324,12 @@ impl Body {
let src = s.source(db);
(src.file_id, s.module(db), src.value.body())
}
DefWithBodyId::VariantId(v) => {
let e = v.parent.lookup(db);
let src = v.parent.child_source(db);
let variant = &src.value[v.local_id];
(src.file_id, e.container, variant.expr())
}
};
let expander = Expander::new(db, file_id, module);
let (mut body, source_map) = Body::new(db, expander, params, body);

View file

@ -29,8 +29,9 @@ use crate::{
builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint},
db::DefDatabase,
expr::{
dummy_expr_id, Array, BindingAnnotation, Expr, ExprId, FloatTypeWrapper, Label, LabelId,
Literal, MatchArm, Pat, PatId, RecordFieldPat, RecordLitField, Statement,
dummy_expr_id, Array, BindingAnnotation, ClosureKind, Expr, ExprId, FloatTypeWrapper,
Label, LabelId, Literal, MatchArm, Movability, Pat, PatId, RecordFieldPat, RecordLitField,
Statement,
},
intern::Interned,
item_scope::BuiltinShadowMode,
@ -97,6 +98,7 @@ pub(super) fn lower(
name_to_pat_grouping: Default::default(),
is_lowering_inside_or_pat: false,
is_lowering_assignee_expr: false,
is_lowering_generator: false,
}
.collect(params, body)
}
@ -111,6 +113,7 @@ struct ExprCollector<'a> {
name_to_pat_grouping: FxHashMap<Name, Vec<PatId>>,
is_lowering_inside_or_pat: bool,
is_lowering_assignee_expr: bool,
is_lowering_generator: bool,
}
impl ExprCollector<'_> {
@ -358,6 +361,7 @@ impl ExprCollector<'_> {
self.alloc_expr(Expr::Return { expr }, syntax_ptr)
}
ast::Expr::YieldExpr(e) => {
self.is_lowering_generator = true;
let expr = e.expr().map(|e| self.collect_expr(e));
self.alloc_expr(Expr::Yield { expr }, syntax_ptr)
}
@ -459,13 +463,31 @@ impl ExprCollector<'_> {
.ret_type()
.and_then(|r| r.ty())
.map(|it| Interned::new(TypeRef::from_ast(&self.ctx(), it)));
let prev_is_lowering_generator = self.is_lowering_generator;
self.is_lowering_generator = false;
let body = self.collect_expr_opt(e.body());
let closure_kind = if self.is_lowering_generator {
let movability = if e.static_token().is_some() {
Movability::Static
} else {
Movability::Movable
};
ClosureKind::Generator(movability)
} else {
ClosureKind::Closure
};
self.is_lowering_generator = prev_is_lowering_generator;
self.alloc_expr(
Expr::Closure {
args: args.into(),
arg_types: arg_types.into(),
ret_type,
body,
closure_kind,
},
syntax_ptr,
)

View file

@ -2,8 +2,10 @@
use std::fmt::{self, Write};
use syntax::ast::HasName;
use crate::{
expr::{Array, BindingAnnotation, Literal, Statement},
expr::{Array, BindingAnnotation, ClosureKind, Literal, Movability, Statement},
pretty::{print_generic_args, print_path, print_type_ref},
type_ref::TypeRef,
};
@ -32,6 +34,16 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo
};
format!("const {} = ", name)
}
DefWithBodyId::VariantId(it) => {
needs_semi = false;
let src = it.parent.child_source(db);
let variant = &src.value[it.local_id];
let name = match &variant.name() {
Some(name) => name.to_string(),
None => "_".to_string(),
};
format!("{}", name)
}
};
let mut p = Printer { body, buf: header, indent_level: 0, needs_indent: false };
@ -350,7 +362,10 @@ impl<'a> Printer<'a> {
self.print_expr(*index);
w!(self, "]");
}
Expr::Closure { args, arg_types, ret_type, body } => {
Expr::Closure { args, arg_types, ret_type, body, closure_kind } => {
if let ClosureKind::Generator(Movability::Static) = closure_kind {
w!(self, "static ");
}
w!(self, "|");
for (i, (pat, ty)) in args.iter().zip(arg_types.iter()).enumerate() {
if i != 0 {

View file

@ -198,6 +198,10 @@ impl ChildBySource for EnumId {
impl ChildBySource for DefWithBodyId {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
let body = db.body(*self);
if let &DefWithBodyId::VariantId(v) = self {
VariantId::EnumVariantId(v).child_by_source_to(db, res, file_id)
}
for (_, def_map) in body.blocks(db) {
// All block expressions are merged into the same map, because they logically all add
// inner items to the containing `DefWithBodyId`.

View file

@ -219,7 +219,7 @@ impl TraitData {
pub(crate) fn trait_data_with_diagnostics_query(
db: &dyn DefDatabase,
tr: TraitId,
) -> (Arc<TraitData>, Arc<Vec<DefDiagnostic>>) {
) -> (Arc<TraitData>, Arc<[DefDiagnostic]>) {
let tr_loc @ ItemLoc { container: module_id, id: tree_id } = tr.lookup(db);
let item_tree = tree_id.item_tree(db);
let tr_def = &item_tree[tree_id.value];
@ -251,7 +251,7 @@ impl TraitData {
visibility,
skip_array_during_method_dispatch,
}),
Arc::new(diagnostics),
diagnostics.into(),
)
}
@ -299,7 +299,7 @@ impl ImplData {
pub(crate) fn impl_data_with_diagnostics_query(
db: &dyn DefDatabase,
id: ImplId,
) -> (Arc<ImplData>, Arc<Vec<DefDiagnostic>>) {
) -> (Arc<ImplData>, Arc<[DefDiagnostic]>) {
let _p = profile::span("impl_data_with_diagnostics_query");
let ItemLoc { container: module_id, id: tree_id } = id.lookup(db);
@ -318,7 +318,7 @@ impl ImplData {
(
Arc::new(ImplData { target_trait, self_ty, items, is_negative, attribute_calls }),
Arc::new(diagnostics),
diagnostics.into(),
)
}

View file

@ -97,24 +97,33 @@ pub trait DefDatabase: InternDatabase + AstDatabase + Upcast<dyn AstDatabase> {
#[salsa::invoke(StructData::struct_data_query)]
fn struct_data(&self, id: StructId) -> Arc<StructData>;
#[salsa::invoke(StructData::struct_data_with_diagnostics_query)]
fn struct_data_with_diagnostics(&self, id: StructId)
-> (Arc<StructData>, Arc<[DefDiagnostic]>);
#[salsa::invoke(StructData::union_data_query)]
fn union_data(&self, id: UnionId) -> Arc<StructData>;
#[salsa::invoke(StructData::union_data_with_diagnostics_query)]
fn union_data_with_diagnostics(&self, id: UnionId) -> (Arc<StructData>, Arc<[DefDiagnostic]>);
#[salsa::invoke(EnumData::enum_data_query)]
fn enum_data(&self, e: EnumId) -> Arc<EnumData>;
#[salsa::invoke(EnumData::enum_data_with_diagnostics_query)]
fn enum_data_with_diagnostics(&self, e: EnumId) -> (Arc<EnumData>, Arc<[DefDiagnostic]>);
#[salsa::invoke(ImplData::impl_data_query)]
fn impl_data(&self, e: ImplId) -> Arc<ImplData>;
#[salsa::invoke(ImplData::impl_data_with_diagnostics_query)]
fn impl_data_with_diagnostics(&self, e: ImplId) -> (Arc<ImplData>, Arc<Vec<DefDiagnostic>>);
fn impl_data_with_diagnostics(&self, e: ImplId) -> (Arc<ImplData>, Arc<[DefDiagnostic]>);
#[salsa::invoke(TraitData::trait_data_query)]
fn trait_data(&self, e: TraitId) -> Arc<TraitData>;
#[salsa::invoke(TraitData::trait_data_with_diagnostics_query)]
fn trait_data_with_diagnostics(&self, tr: TraitId)
-> (Arc<TraitData>, Arc<Vec<DefDiagnostic>>);
fn trait_data_with_diagnostics(&self, tr: TraitId) -> (Arc<TraitData>, Arc<[DefDiagnostic]>);
#[salsa::invoke(TypeAliasData::type_alias_data_query)]
fn type_alias_data(&self, e: TypeAliasId) -> Arc<TypeAliasData>;

View file

@ -198,6 +198,7 @@ pub enum Expr {
arg_types: Box<[Option<Interned<TypeRef>>]>,
ret_type: Option<Interned<TypeRef>>,
body: ExprId,
closure_kind: ClosureKind,
},
Tuple {
exprs: Box<[ExprId]>,
@ -211,6 +212,18 @@ pub enum Expr {
Underscore,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ClosureKind {
Closure,
Generator(Movability),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Movability {
Static,
Movable,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Array {
ElementList { elements: Box<[ExprId]>, is_assignee_expr: bool },

View file

@ -333,8 +333,8 @@ fn calculate_best_path(
db,
def_map,
visited_modules,
from,
crate_root,
from,
info.container,
max_len - 1,
prefixed,

View file

@ -943,6 +943,7 @@ impl AssocItem {
pub struct Variant {
pub name: Name,
pub fields: Fields,
pub ast_id: FileAstId<ast::Variant>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@ -952,10 +953,17 @@ pub enum Fields {
Unit,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum FieldAstId {
Record(FileAstId<ast::RecordField>),
Tuple(FileAstId<ast::TupleField>),
}
/// A single field of an enum variant or struct
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Field {
pub name: Name,
pub type_ref: Interned<TypeRef>,
pub visibility: RawVisibilityId,
pub ast_id: FieldAstId,
}

View file

@ -184,7 +184,8 @@ impl<'a> Ctx<'a> {
let name = field.name()?.as_name();
let visibility = self.lower_visibility(field);
let type_ref = self.lower_type_ref_opt(field.ty());
let res = Field { name, type_ref, visibility };
let ast_id = FieldAstId::Record(self.source_ast_id_map.ast_id(field));
let res = Field { name, type_ref, visibility, ast_id };
Some(res)
}
@ -203,7 +204,8 @@ impl<'a> Ctx<'a> {
let name = Name::new_tuple_field(idx);
let visibility = self.lower_visibility(field);
let type_ref = self.lower_type_ref_opt(field.ty());
Field { name, type_ref, visibility }
let ast_id = FieldAstId::Tuple(self.source_ast_id_map.ast_id(field));
Field { name, type_ref, visibility, ast_id }
}
fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
@ -247,7 +249,8 @@ impl<'a> Ctx<'a> {
fn lower_variant(&mut self, variant: &ast::Variant) -> Option<Variant> {
let name = variant.name()?.as_name();
let fields = self.lower_fields(&variant.kind());
let res = Variant { name, fields };
let ast_id = self.source_ast_id_map.ast_id(variant);
let res = Variant { name, fields, ast_id };
Some(res)
}

View file

@ -115,7 +115,7 @@ impl<'a> Printer<'a> {
w!(self, "{{");
self.indented(|this| {
for field in fields.clone() {
let Field { visibility, name, type_ref } = &this.tree[field];
let Field { visibility, name, type_ref, ast_id: _ } = &this.tree[field];
this.print_attrs_of(field);
this.print_visibility(*visibility);
w!(this, "{}: ", name);
@ -129,7 +129,7 @@ impl<'a> Printer<'a> {
w!(self, "(");
self.indented(|this| {
for field in fields.clone() {
let Field { visibility, name, type_ref } = &this.tree[field];
let Field { visibility, name, type_ref, ast_id: _ } = &this.tree[field];
this.print_attrs_of(field);
this.print_visibility(*visibility);
w!(this, "{}: ", name);
@ -323,7 +323,7 @@ impl<'a> Printer<'a> {
self.print_where_clause_and_opening_brace(generic_params);
self.indented(|this| {
for variant in variants.clone() {
let Variant { name, fields } = &this.tree[variant];
let Variant { name, fields, ast_id: _ } = &this.tree[variant];
this.print_attrs_of(variant);
w!(this, "{}", name);
this.print_fields(fields);

View file

@ -474,16 +474,24 @@ pub enum DefWithBodyId {
FunctionId(FunctionId),
StaticId(StaticId),
ConstId(ConstId),
VariantId(EnumVariantId),
}
impl_from!(FunctionId, ConstId, StaticId for DefWithBodyId);
impl From<EnumVariantId> for DefWithBodyId {
fn from(id: EnumVariantId) -> Self {
DefWithBodyId::VariantId(id)
}
}
impl DefWithBodyId {
pub fn as_generic_def_id(self) -> Option<GenericDefId> {
match self {
DefWithBodyId::FunctionId(f) => Some(f.into()),
DefWithBodyId::StaticId(_) => None,
DefWithBodyId::ConstId(c) => Some(c.into()),
DefWithBodyId::VariantId(c) => Some(c.into()),
}
}
}
@ -681,6 +689,7 @@ impl HasModule for DefWithBodyId {
DefWithBodyId::FunctionId(it) => it.lookup(db).module(db),
DefWithBodyId::StaticId(it) => it.lookup(db).module(db),
DefWithBodyId::ConstId(it) => it.lookup(db).module(db),
DefWithBodyId::VariantId(it) => it.parent.lookup(db).container,
}
}
}
@ -691,6 +700,7 @@ impl DefWithBodyId {
DefWithBodyId::FunctionId(it) => it.lookup(db).id.value.into(),
DefWithBodyId::StaticId(it) => it.lookup(db).id.value.into(),
DefWithBodyId::ConstId(it) => it.lookup(db).id.value.into(),
DefWithBodyId::VariantId(it) => it.parent.lookup(db).id.value.into(),
}
}
}

View file

@ -2122,7 +2122,7 @@ impl ModCollector<'_, '_> {
fn emit_unconfigured_diagnostic(&mut self, item: ModItem, cfg: &CfgExpr) {
let ast_id = item.ast_id(self.item_tree);
let ast_id = InFile::new(self.file_id(), ast_id);
let ast_id = InFile::new(self.file_id(), ast_id.upcast());
self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code(
self.module_id,
ast_id,

View file

@ -4,7 +4,7 @@ use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions};
use hir_expand::MacroCallKind;
use la_arena::Idx;
use syntax::ast;
use syntax::ast::{self, AnyHasAttrs};
use crate::{
attr::AttrId,
@ -22,7 +22,7 @@ pub enum DefDiagnosticKind {
UnresolvedImport { id: ItemTreeId<item_tree::Import>, index: Idx<ast::UseTree> },
UnconfiguredCode { ast: AstId<ast::Item>, cfg: CfgExpr, opts: CfgOptions },
UnconfiguredCode { ast: AstId<AnyHasAttrs>, cfg: CfgExpr, opts: CfgOptions },
UnresolvedProcMacro { ast: MacroCallKind, krate: CrateId },
@ -75,7 +75,7 @@ impl DefDiagnostic {
pub fn unconfigured_code(
container: LocalModuleId,
ast: AstId<ast::Item>,
ast: AstId<ast::AnyHasAttrs>,
cfg: CfgExpr,
opts: CfgOptions,
) -> Self {

View file

@ -839,6 +839,7 @@ impl HasResolver for DefWithBodyId {
DefWithBodyId::ConstId(c) => c.resolver(db),
DefWithBodyId::FunctionId(f) => f.resolver(db),
DefWithBodyId::StaticId(s) => s.resolver(db),
DefWithBodyId::VariantId(v) => v.parent.resolver(db),
}
}
}

View file

@ -93,7 +93,12 @@ impl AstIdMap {
// trait does not change ids of top-level items, which helps caching.
bdfs(node, |it| {
let kind = it.kind();
if ast::Item::can_cast(kind) || ast::BlockExpr::can_cast(kind) {
if ast::Item::can_cast(kind)
|| ast::BlockExpr::can_cast(kind)
|| ast::Variant::can_cast(kind)
|| ast::RecordField::can_cast(kind)
|| ast::TupleField::can_cast(kind)
{
res.alloc(&it);
true
} else {

View file

@ -811,6 +811,31 @@ impl<'a> InFile<&'a SyntaxNode> {
_ => None,
}
}
pub fn original_syntax_node(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxNode>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefor can't find an `N` node in the input
if !self.file_id.is_macro() {
return Some(self.map(Clone::clone));
} else if !self.file_id.is_attr_macro(db) {
return None;
}
if let Some(InFile { file_id, value: (first, last) }) = ascend_node_border_tokens(db, self)
{
if file_id.is_macro() {
let range = first.text_range().cover(last.text_range());
tracing::error!("Failed mapping out of macro file for {:?}", range);
return None;
}
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes
let anc = algo::least_common_ancestor(&first.parent()?, &last.parent()?)?;
let kind = self.value.kind();
let value = anc.ancestors().find(|it| it.kind() == kind)?;
return Some(InFile::new(file_id, value));
}
None
}
}
impl InFile<SyntaxToken> {

View file

@ -263,6 +263,7 @@ pub mod known {
Iterator,
IntoIterator,
Item,
IntoIter,
Try,
Ok,
Future,

View file

@ -1,7 +1,7 @@
//! In certain situations, rust automatically inserts derefs as necessary: for
//! example, field accesses `foo.bar` still work when `foo` is actually a
//! reference to a type with the field `bar`. This is an approximation of the
//! logic in rustc (which lives in librustc_typeck/check/autoderef.rs).
//! logic in rustc (which lives in rustc_hir_analysis/check/autoderef.rs).
use std::sync::Arc;
@ -123,13 +123,14 @@ fn deref_by_trait(table: &mut InferenceTable<'_>, ty: Ty) -> Option<Ty> {
let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?;
let projection = {
let b = TyBuilder::assoc_type_projection(db, target);
let b = TyBuilder::subst_for_def(db, deref_trait, None);
if b.remaining() != 1 {
// the Target type + Deref trait should only have one generic parameter,
// namely Deref's Self type
return None;
}
b.push(ty).build()
let deref_subst = b.push(ty).build();
TyBuilder::assoc_type_projection(db, target, Some(deref_subst)).build()
};
// Check that the type implements Deref at all

View file

@ -6,19 +6,19 @@ use chalk_ir::{
cast::{Cast, CastTo, Caster},
fold::TypeFoldable,
interner::HasInterner,
AdtId, BoundVar, DebruijnIndex, Scalar,
AdtId, DebruijnIndex, Scalar,
};
use hir_def::{
builtin_type::BuiltinType, generics::TypeOrConstParamData, ConstParamId, GenericDefId, TraitId,
TypeAliasId,
builtin_type::BuiltinType, generics::TypeOrConstParamData, ConstParamId, DefWithBodyId,
GenericDefId, TraitId, TypeAliasId,
};
use smallvec::SmallVec;
use crate::{
consteval::unknown_const_as_generic, db::HirDatabase, infer::unify::InferenceTable, primitive,
to_assoc_type_id, to_chalk_trait_id, utils::generics, Binders, CallableSig, ConstData,
ConstValue, GenericArg, GenericArgData, Interner, ProjectionTy, Substitution, TraitRef, Ty,
TyDefId, TyExt, TyKind, ValueTyDefId,
to_assoc_type_id, to_chalk_trait_id, utils::generics, Binders, BoundVar, CallableSig,
GenericArg, Interner, ProjectionTy, Substitution, TraitRef, Ty, TyDefId, TyExt, TyKind,
ValueTyDefId,
};
#[derive(Debug, Clone, PartialEq, Eq)]
@ -34,17 +34,32 @@ pub struct TyBuilder<D> {
data: D,
vec: SmallVec<[GenericArg; 2]>,
param_kinds: SmallVec<[ParamKind; 2]>,
parent_subst: Substitution,
}
impl<A> TyBuilder<A> {
fn with_data<B>(self, data: B) -> TyBuilder<B> {
TyBuilder { data, param_kinds: self.param_kinds, vec: self.vec }
TyBuilder {
data,
vec: self.vec,
param_kinds: self.param_kinds,
parent_subst: self.parent_subst,
}
}
}
impl<D> TyBuilder<D> {
fn new(data: D, param_kinds: SmallVec<[ParamKind; 2]>) -> TyBuilder<D> {
TyBuilder { data, vec: SmallVec::with_capacity(param_kinds.len()), param_kinds }
fn new(
data: D,
param_kinds: SmallVec<[ParamKind; 2]>,
parent_subst: Option<Substitution>,
) -> Self {
let parent_subst = parent_subst.unwrap_or_else(|| Substitution::empty(Interner));
Self { data, vec: SmallVec::with_capacity(param_kinds.len()), param_kinds, parent_subst }
}
fn new_empty(data: D) -> Self {
TyBuilder::new(data, SmallVec::new(), None)
}
fn build_internal(self) -> (D, Substitution) {
@ -52,13 +67,18 @@ impl<D> TyBuilder<D> {
for (a, e) in self.vec.iter().zip(self.param_kinds.iter()) {
self.assert_match_kind(a, e);
}
let subst = Substitution::from_iter(Interner, self.vec);
let subst = Substitution::from_iter(
Interner,
self.vec.into_iter().chain(self.parent_subst.iter(Interner).cloned()),
);
(self.data, subst)
}
pub fn push(mut self, arg: impl CastTo<GenericArg>) -> Self {
assert!(self.remaining() > 0);
let arg = arg.cast(Interner);
let expected_kind = &self.param_kinds[self.vec.len()];
let arg_kind = match arg.data(Interner) {
chalk_ir::GenericArgData::Ty(_) => ParamKind::Type,
chalk_ir::GenericArgData::Lifetime(_) => panic!("Got lifetime in TyBuilder::push"),
@ -68,7 +88,9 @@ impl<D> TyBuilder<D> {
}
};
assert_eq!(*expected_kind, arg_kind);
self.vec.push(arg);
self
}
@ -79,20 +101,12 @@ impl<D> TyBuilder<D> {
pub fn fill_with_bound_vars(self, debruijn: DebruijnIndex, starting_from: usize) -> Self {
// self.fill is inlined to make borrow checker happy
let mut this = self;
let other = this.param_kinds.iter().skip(this.vec.len());
let other = &this.param_kinds[this.vec.len()..];
let filler = (starting_from..).zip(other).map(|(idx, kind)| match kind {
ParamKind::Type => {
GenericArgData::Ty(TyKind::BoundVar(BoundVar::new(debruijn, idx)).intern(Interner))
.intern(Interner)
ParamKind::Type => BoundVar::new(debruijn, idx).to_ty(Interner).cast(Interner),
ParamKind::Const(ty) => {
BoundVar::new(debruijn, idx).to_const(Interner, ty.clone()).cast(Interner)
}
ParamKind::Const(ty) => GenericArgData::Const(
ConstData {
value: ConstValue::BoundVar(BoundVar::new(debruijn, idx)),
ty: ty.clone(),
}
.intern(Interner),
)
.intern(Interner),
});
this.vec.extend(filler.take(this.remaining()).casted(Interner));
assert_eq!(this.remaining(), 0);
@ -102,8 +116,8 @@ impl<D> TyBuilder<D> {
pub fn fill_with_unknown(self) -> Self {
// self.fill is inlined to make borrow checker happy
let mut this = self;
let filler = this.param_kinds.iter().skip(this.vec.len()).map(|x| match x {
ParamKind::Type => GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner),
let filler = this.param_kinds[this.vec.len()..].iter().map(|x| match x {
ParamKind::Type => TyKind::Error.intern(Interner).cast(Interner),
ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
});
this.vec.extend(filler.casted(Interner));
@ -113,33 +127,17 @@ impl<D> TyBuilder<D> {
pub(crate) fn fill_with_inference_vars(self, table: &mut InferenceTable<'_>) -> Self {
self.fill(|x| match x {
ParamKind::Type => GenericArgData::Ty(table.new_type_var()).intern(Interner),
ParamKind::Const(ty) => {
GenericArgData::Const(table.new_const_var(ty.clone())).intern(Interner)
}
ParamKind::Type => table.new_type_var().cast(Interner),
ParamKind::Const(ty) => table.new_const_var(ty.clone()).cast(Interner),
})
}
pub fn fill(mut self, filler: impl FnMut(&ParamKind) -> GenericArg) -> Self {
self.vec.extend(self.param_kinds.iter().skip(self.vec.len()).map(filler));
self.vec.extend(self.param_kinds[self.vec.len()..].iter().map(filler));
assert_eq!(self.remaining(), 0);
self
}
pub fn use_parent_substs(mut self, parent_substs: &Substitution) -> Self {
assert!(self.vec.is_empty());
assert!(parent_substs.len(Interner) <= self.param_kinds.len());
self.extend(parent_substs.iter(Interner).cloned());
self
}
fn extend(&mut self, it: impl Iterator<Item = GenericArg> + Clone) {
for x in it.clone().zip(self.param_kinds.iter().skip(self.vec.len())) {
self.assert_match_kind(&x.0, &x.1);
}
self.vec.extend(it);
}
fn assert_match_kind(&self, a: &chalk_ir::GenericArg<Interner>, e: &ParamKind) {
match (a.data(Interner), e) {
(chalk_ir::GenericArgData::Ty(_), ParamKind::Type)
@ -188,21 +186,42 @@ impl TyBuilder<()> {
params.placeholder_subst(db)
}
pub fn subst_for_def(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> TyBuilder<()> {
let def = def.into();
let params = generics(db.upcast(), def);
TyBuilder::new(
(),
params
.iter()
.map(|(id, data)| match data {
TypeOrConstParamData::TypeParamData(_) => ParamKind::Type,
TypeOrConstParamData::ConstParamData(_) => {
ParamKind::Const(db.const_param_ty(ConstParamId::from_unchecked(id)))
}
})
.collect(),
)
pub fn subst_for_def(
db: &dyn HirDatabase,
def: impl Into<GenericDefId>,
parent_subst: Option<Substitution>,
) -> TyBuilder<()> {
let generics = generics(db.upcast(), def.into());
assert!(generics.parent_generics().is_some() == parent_subst.is_some());
let params = generics
.iter_self()
.map(|(id, data)| match data {
TypeOrConstParamData::TypeParamData(_) => ParamKind::Type,
TypeOrConstParamData::ConstParamData(_) => {
ParamKind::Const(db.const_param_ty(ConstParamId::from_unchecked(id)))
}
})
.collect();
TyBuilder::new((), params, parent_subst)
}
/// Creates a `TyBuilder` to build `Substitution` for a generator defined in `parent`.
///
/// A generator's substitution consists of:
/// - resume type of generator
/// - yield type of generator ([`Generator::Yield`](std::ops::Generator::Yield))
/// - return type of generator ([`Generator::Return`](std::ops::Generator::Return))
/// - generic parameters in scope on `parent`
/// in this order.
///
/// This method prepopulates the builder with placeholder substitution of `parent`, so you
/// should only push exactly 3 `GenericArg`s before building.
pub fn subst_for_generator(db: &dyn HirDatabase, parent: DefWithBodyId) -> TyBuilder<()> {
let parent_subst =
parent.as_generic_def_id().map(|p| generics(db.upcast(), p).placeholder_subst(db));
// These represent resume type, yield type, and return type of generator.
let params = std::iter::repeat(ParamKind::Type).take(3).collect();
TyBuilder::new((), params, parent_subst)
}
pub fn build(self) -> Substitution {
@ -213,7 +232,7 @@ impl TyBuilder<()> {
impl TyBuilder<hir_def::AdtId> {
pub fn adt(db: &dyn HirDatabase, def: hir_def::AdtId) -> TyBuilder<hir_def::AdtId> {
TyBuilder::subst_for_def(db, def).with_data(def)
TyBuilder::subst_for_def(db, def, None).with_data(def)
}
pub fn fill_with_defaults(
@ -221,16 +240,27 @@ impl TyBuilder<hir_def::AdtId> {
db: &dyn HirDatabase,
mut fallback: impl FnMut() -> Ty,
) -> Self {
// Note that we're building ADT, so we never have parent generic parameters.
let defaults = db.generic_defaults(self.data.into());
let dummy_ty = TyKind::Error.intern(Interner).cast(Interner);
for default_ty in defaults.iter().skip(self.vec.len()) {
if let GenericArgData::Ty(x) = default_ty.skip_binders().data(Interner) {
// NOTE(skip_binders): we only check if the arg type is error type.
if let Some(x) = default_ty.skip_binders().ty(Interner) {
if x.is_unknown() {
self.vec.push(fallback().cast(Interner));
continue;
}
};
// each default can depend on the previous parameters
let subst_so_far = Substitution::from_iter(Interner, self.vec.clone());
}
// Each default can only depend on the previous parameters.
// FIXME: we don't handle const generics here.
let subst_so_far = Substitution::from_iter(
Interner,
self.vec
.iter()
.cloned()
.chain(iter::repeat(dummy_ty.clone()))
.take(self.param_kinds.len()),
);
self.vec.push(default_ty.clone().substitute(Interner, &subst_so_far).cast(Interner));
}
self
@ -245,7 +275,7 @@ impl TyBuilder<hir_def::AdtId> {
pub struct Tuple(usize);
impl TyBuilder<Tuple> {
pub fn tuple(size: usize) -> TyBuilder<Tuple> {
TyBuilder::new(Tuple(size), iter::repeat(ParamKind::Type).take(size).collect())
TyBuilder::new(Tuple(size), iter::repeat(ParamKind::Type).take(size).collect(), None)
}
pub fn build(self) -> Ty {
@ -256,7 +286,7 @@ impl TyBuilder<Tuple> {
impl TyBuilder<TraitId> {
pub fn trait_ref(db: &dyn HirDatabase, def: TraitId) -> TyBuilder<TraitId> {
TyBuilder::subst_for_def(db, def).with_data(def)
TyBuilder::subst_for_def(db, def, None).with_data(def)
}
pub fn build(self) -> TraitRef {
@ -266,8 +296,12 @@ impl TyBuilder<TraitId> {
}
impl TyBuilder<TypeAliasId> {
pub fn assoc_type_projection(db: &dyn HirDatabase, def: TypeAliasId) -> TyBuilder<TypeAliasId> {
TyBuilder::subst_for_def(db, def).with_data(def)
pub fn assoc_type_projection(
db: &dyn HirDatabase,
def: TypeAliasId,
parent_subst: Option<Substitution>,
) -> TyBuilder<TypeAliasId> {
TyBuilder::subst_for_def(db, def, parent_subst).with_data(def)
}
pub fn build(self) -> ProjectionTy {
@ -277,19 +311,6 @@ impl TyBuilder<TypeAliasId> {
}
impl<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>> TyBuilder<Binders<T>> {
fn subst_binders(b: Binders<T>) -> Self {
let param_kinds = b
.binders
.iter(Interner)
.map(|x| match x {
chalk_ir::VariableKind::Ty(_) => ParamKind::Type,
chalk_ir::VariableKind::Lifetime => panic!("Got lifetime parameter"),
chalk_ir::VariableKind::Const(ty) => ParamKind::Const(ty.clone()),
})
.collect();
TyBuilder::new(b, param_kinds)
}
pub fn build(self) -> T {
let (b, subst) = self.build_internal();
b.substitute(Interner, &subst)
@ -297,15 +318,41 @@ impl<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>> TyBuilder<Bin
}
impl TyBuilder<Binders<Ty>> {
pub fn def_ty(db: &dyn HirDatabase, def: TyDefId) -> TyBuilder<Binders<Ty>> {
TyBuilder::subst_binders(db.ty(def))
pub fn def_ty(
db: &dyn HirDatabase,
def: TyDefId,
parent_subst: Option<Substitution>,
) -> TyBuilder<Binders<Ty>> {
let poly_ty = db.ty(def);
let id: GenericDefId = match def {
TyDefId::BuiltinType(_) => {
assert!(parent_subst.is_none());
return TyBuilder::new_empty(poly_ty);
}
TyDefId::AdtId(id) => id.into(),
TyDefId::TypeAliasId(id) => id.into(),
};
TyBuilder::subst_for_def(db, id, parent_subst).with_data(poly_ty)
}
pub fn impl_self_ty(db: &dyn HirDatabase, def: hir_def::ImplId) -> TyBuilder<Binders<Ty>> {
TyBuilder::subst_binders(db.impl_self_ty(def))
TyBuilder::subst_for_def(db, def, None).with_data(db.impl_self_ty(def))
}
pub fn value_ty(db: &dyn HirDatabase, def: ValueTyDefId) -> TyBuilder<Binders<Ty>> {
TyBuilder::subst_binders(db.value_ty(def))
pub fn value_ty(
db: &dyn HirDatabase,
def: ValueTyDefId,
parent_subst: Option<Substitution>,
) -> TyBuilder<Binders<Ty>> {
let poly_value_ty = db.value_ty(def);
let id = match def.to_generic_def_id() {
Some(id) => id,
None => {
// static items
assert!(parent_subst.is_none());
return TyBuilder::new_empty(poly_value_ty);
}
};
TyBuilder::subst_for_def(db, id, parent_subst).with_data(poly_value_ty)
}
}

View file

@ -11,6 +11,7 @@ use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait};
use base_db::CrateId;
use hir_def::{
expr::Movability,
lang_item::{lang_attr, LangItemTarget},
AssocItemId, GenericDefId, HasModule, ItemContainerId, Lookup, ModuleId, TypeAliasId,
};
@ -26,9 +27,9 @@ use crate::{
to_assoc_type_id, to_chalk_trait_id,
traits::ChalkContext,
utils::generics,
AliasEq, AliasTy, BoundVar, CallableDefId, DebruijnIndex, FnDefId, Interner, ProjectionTy,
ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder,
TyExt, TyKind, WhereClause,
wrap_empty_binders, AliasEq, AliasTy, BoundVar, CallableDefId, DebruijnIndex, FnDefId,
Interner, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef,
TraitRefExt, Ty, TyBuilder, TyExt, TyKind, WhereClause,
};
pub(crate) type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum<Interner>;
@ -372,17 +373,62 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
}
fn generator_datum(
&self,
_: chalk_ir::GeneratorId<Interner>,
id: chalk_ir::GeneratorId<Interner>,
) -> std::sync::Arc<chalk_solve::rust_ir::GeneratorDatum<Interner>> {
// FIXME
unimplemented!()
let (parent, expr) = self.db.lookup_intern_generator(id.into());
// We fill substitution with unknown type, because we only need to know whether the generic
// params are types or consts to build `Binders` and those being filled up are for
// `resume_type`, `yield_type`, and `return_type` of the generator in question.
let subst = TyBuilder::subst_for_generator(self.db, parent).fill_with_unknown().build();
let input_output = rust_ir::GeneratorInputOutputDatum {
resume_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0))
.intern(Interner),
yield_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 1))
.intern(Interner),
return_type: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 2))
.intern(Interner),
// FIXME: calculate upvars
upvars: vec![],
};
let it = subst
.iter(Interner)
.map(|it| it.constant(Interner).map(|c| c.data(Interner).ty.clone()));
let input_output = crate::make_type_and_const_binders(it, input_output);
let movability = match self.db.body(parent)[expr] {
hir_def::expr::Expr::Closure {
closure_kind: hir_def::expr::ClosureKind::Generator(movability),
..
} => movability,
_ => unreachable!("non generator expression interned as generator"),
};
let movability = match movability {
Movability::Static => rust_ir::Movability::Static,
Movability::Movable => rust_ir::Movability::Movable,
};
Arc::new(rust_ir::GeneratorDatum { movability, input_output })
}
fn generator_witness_datum(
&self,
_: chalk_ir::GeneratorId<Interner>,
id: chalk_ir::GeneratorId<Interner>,
) -> std::sync::Arc<chalk_solve::rust_ir::GeneratorWitnessDatum<Interner>> {
// FIXME
unimplemented!()
// FIXME: calculate inner types
let inner_types =
rust_ir::GeneratorWitnessExistential { types: wrap_empty_binders(vec![]) };
let (parent, _) = self.db.lookup_intern_generator(id.into());
// See the comment in `generator_datum()` for unknown types.
let subst = TyBuilder::subst_for_generator(self.db, parent).fill_with_unknown().build();
let it = subst
.iter(Interner)
.map(|it| it.constant(Interner).map(|c| c.data(Interner).ty.clone()));
let inner_types = crate::make_type_and_const_binders(it, inner_types);
Arc::new(rust_ir::GeneratorWitnessDatum { inner_types })
}
fn unification_database(&self) -> &dyn chalk_ir::UnificationDatabase<Interner> {
@ -429,10 +475,15 @@ pub(crate) fn associated_ty_data_query(
let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast());
let ctx = crate::TyLoweringContext::new(db, &resolver)
.with_type_param_mode(crate::lower::ParamLoweringMode::Variable);
let pro_ty = TyBuilder::assoc_type_projection(db, type_alias)
let trait_subst = TyBuilder::subst_for_def(db, trait_, None)
.fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, generic_params.len_self())
.build();
let pro_ty = TyBuilder::assoc_type_projection(db, type_alias, Some(trait_subst))
.fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, 0)
.build();
let self_ty = TyKind::Alias(AliasTy::Projection(pro_ty)).intern(Interner);
let mut bounds: Vec<_> = type_alias_data
.bounds
.iter()

View file

@ -152,7 +152,7 @@ impl TyExt for Ty {
TyKind::FnDef(def, parameters) => {
let callable_def = db.lookup_intern_callable_def((*def).into());
let sig = db.callable_item_signature(callable_def);
Some(sig.substitute(Interner, &parameters))
Some(sig.substitute(Interner, parameters))
}
TyKind::Closure(.., substs) => {
let sig_param = substs.at(Interner, 0).assert_ty_ref(Interner);
@ -166,6 +166,8 @@ impl TyExt for Ty {
let trait_ref = match self.kind(Interner) {
// The principal trait bound should be the first element of the bounds. This is an
// invariant ensured by `TyLoweringContext::lower_dyn_trait()`.
// FIXME: dyn types may not have principal trait and we don't want to return auto trait
// here.
TyKind::Dyn(dyn_ty) => dyn_ty.bounds.skip_binders().interned().get(0).and_then(|b| {
match b.skip_binders() {
WhereClause::Implemented(trait_ref) => Some(trait_ref),

View file

@ -7,14 +7,17 @@ use std::{
use chalk_ir::{BoundVar, DebruijnIndex, GenericArgData, IntTy, Scalar};
use hir_def::{
builtin_type::BuiltinInt,
expr::{ArithOp, BinaryOp, Expr, ExprId, Literal, Pat, PatId},
path::ModPath,
resolver::{resolver_for_expr, ResolveValueResult, Resolver, ValueNs},
src::HasChildSource,
type_ref::ConstScalar,
ConstId, DefWithBodyId,
ConstId, DefWithBodyId, EnumVariantId, Lookup,
};
use la_arena::{Arena, Idx};
use la_arena::{Arena, Idx, RawIdx};
use stdx::never;
use syntax::ast::HasName;
use crate::{
db::HirDatabase, infer::InferenceContext, lower::ParamLoweringMode, to_placeholder_idx,
@ -77,6 +80,7 @@ pub enum ConstEvalError {
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ComputedExpr {
Literal(Literal),
Enum(String, EnumVariantId, Literal),
Tuple(Box<[ComputedExpr]>),
}
@ -104,6 +108,7 @@ impl Display for ComputedExpr {
Literal::String(x) => std::fmt::Debug::fmt(x, f),
Literal::ByteString(x) => std::fmt::Debug::fmt(x, f),
},
ComputedExpr::Enum(name, _, _) => name.fmt(f),
ComputedExpr::Tuple(t) => {
f.write_char('(')?;
for x in &**t {
@ -148,13 +153,51 @@ fn is_valid(scalar: &Scalar, value: i128) -> bool {
}
}
fn get_name(ctx: &mut ConstEvalCtx<'_>, variant: EnumVariantId) -> String {
let loc = variant.parent.lookup(ctx.db.upcast());
let children = variant.parent.child_source(ctx.db.upcast());
let item_tree = loc.id.item_tree(ctx.db.upcast());
let variant_name = children.value[variant.local_id].name();
let enum_name = item_tree[loc.id.value].name.to_string();
enum_name + "::" + &variant_name.unwrap().to_string()
}
pub fn eval_const(
expr_id: ExprId,
ctx: &mut ConstEvalCtx<'_>,
) -> Result<ComputedExpr, ConstEvalError> {
let u128_to_i128 = |it: u128| -> Result<i128, ConstEvalError> {
it.try_into().map_err(|_| ConstEvalError::NotSupported("u128 is too big"))
};
let expr = &ctx.exprs[expr_id];
match expr {
Expr::Missing => Err(ConstEvalError::IncompleteExpr),
Expr::Missing => match ctx.owner {
// evaluate the implicit variant index of an enum variant without expression
// FIXME: This should return the type of the enum representation
DefWithBodyId::VariantId(variant) => {
let prev_idx: u32 = variant.local_id.into_raw().into();
let prev_idx = prev_idx.checked_sub(1).map(RawIdx::from).map(Idx::from_raw);
let value = match prev_idx {
Some(local_id) => {
let prev_variant = EnumVariantId { local_id, parent: variant.parent };
1 + match ctx.db.const_eval_variant(prev_variant)? {
ComputedExpr::Literal(Literal::Int(v, _)) => v,
ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
_ => {
return Err(ConstEvalError::NotSupported(
"Enum can't contain this kind of value",
))
}
}
}
_ => 0,
};
Ok(ComputedExpr::Literal(Literal::Int(value, Some(BuiltinInt::I128))))
}
_ => Err(ConstEvalError::IncompleteExpr),
},
Expr::Literal(l) => Ok(ComputedExpr::Literal(l.clone())),
&Expr::UnaryOp { expr, op } => {
let ty = &ctx.expr_ty(expr);
@ -167,9 +210,7 @@ pub fn eval_const(
return Ok(ComputedExpr::Literal(Literal::Bool(!b)))
}
ComputedExpr::Literal(Literal::Int(v, _)) => v,
ComputedExpr::Literal(Literal::Uint(v, _)) => v
.try_into()
.map_err(|_| ConstEvalError::NotSupported("too big u128"))?,
ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
_ => return Err(ConstEvalError::NotSupported("this kind of operator")),
};
let r = match ty.kind(Interner) {
@ -198,9 +239,7 @@ pub fn eval_const(
hir_def::expr::UnaryOp::Neg => {
let v = match ev {
ComputedExpr::Literal(Literal::Int(v, _)) => v,
ComputedExpr::Literal(Literal::Uint(v, _)) => v
.try_into()
.map_err(|_| ConstEvalError::NotSupported("too big u128"))?,
ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
_ => return Err(ConstEvalError::NotSupported("this kind of operator")),
};
Ok(ComputedExpr::Literal(Literal::Int(
@ -219,16 +258,12 @@ pub fn eval_const(
let op = op.ok_or(ConstEvalError::IncompleteExpr)?;
let v1 = match lhs {
ComputedExpr::Literal(Literal::Int(v, _)) => v,
ComputedExpr::Literal(Literal::Uint(v, _)) => {
v.try_into().map_err(|_| ConstEvalError::NotSupported("too big u128"))?
}
ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
_ => return Err(ConstEvalError::NotSupported("this kind of operator")),
};
let v2 = match rhs {
ComputedExpr::Literal(Literal::Int(v, _)) => v,
ComputedExpr::Literal(Literal::Uint(v, _)) => {
v.try_into().map_err(|_| ConstEvalError::NotSupported("too big u128"))?
}
ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
_ => return Err(ConstEvalError::NotSupported("this kind of operator")),
};
match op {
@ -339,9 +374,22 @@ pub fn eval_const(
ValueNs::GenericParam(_) => {
Err(ConstEvalError::NotSupported("const generic without substitution"))
}
ValueNs::EnumVariantId(id) => match ctx.db.const_eval_variant(id)? {
ComputedExpr::Literal(lit) => {
Ok(ComputedExpr::Enum(get_name(ctx, id), id, lit))
}
_ => Err(ConstEvalError::NotSupported(
"Enums can't evalute to anything but numbers",
)),
},
_ => Err(ConstEvalError::NotSupported("path that are not const or local")),
}
}
// FIXME: Handle the cast target
&Expr::Cast { expr, .. } => match eval_const(expr, ctx)? {
ComputedExpr::Enum(_, _, lit) => Ok(ComputedExpr::Literal(lit)),
_ => Err(ConstEvalError::NotSupported("Can't cast these types")),
},
_ => Err(ConstEvalError::NotSupported("This kind of expression")),
}
}
@ -412,7 +460,15 @@ pub(crate) fn const_eval_recover(
Err(ConstEvalError::Loop)
}
pub(crate) fn const_eval_query(
pub(crate) fn const_eval_variant_recover(
_: &dyn HirDatabase,
_: &[String],
_: &EnumVariantId,
) -> Result<ComputedExpr, ConstEvalError> {
Err(ConstEvalError::Loop)
}
pub(crate) fn const_eval_variant_query(
db: &dyn HirDatabase,
const_id: ConstId,
) -> Result<ComputedExpr, ConstEvalError> {
@ -433,6 +489,26 @@ pub(crate) fn const_eval_query(
result
}
pub(crate) fn const_eval_query_variant(
db: &dyn HirDatabase,
variant_id: EnumVariantId,
) -> Result<ComputedExpr, ConstEvalError> {
let def = variant_id.into();
let body = db.body(def);
let infer = &db.infer(def);
eval_const(
body.body_expr,
&mut ConstEvalCtx {
db,
owner: def,
exprs: &body.exprs,
pats: &body.pats,
local_data: HashMap::default(),
infer,
},
)
}
pub(crate) fn eval_to_const<'a>(
expr: Idx<Expr>,
mode: ParamLoweringMode,

View file

@ -87,6 +87,49 @@ fn consts() {
);
}
#[test]
fn enums() {
check_number(
r#"
enum E {
F1 = 1,
F2 = 2 * E::F1 as u8,
F3 = 3 * E::F2 as u8,
}
const GOAL: i32 = E::F3 as u8;
"#,
6,
);
check_number(
r#"
enum E { F1 = 1, F2, }
const GOAL: i32 = E::F2 as u8;
"#,
2,
);
check_number(
r#"
enum E { F1, }
const GOAL: i32 = E::F1 as u8;
"#,
0,
);
let r = eval_goal(
r#"
enum E { A = 1, }
const GOAL: E = E::A;
"#,
)
.unwrap();
match r {
ComputedExpr::Enum(name, _, Literal::Uint(val, _)) => {
assert_eq!(name, "E::A");
assert_eq!(val, 1);
}
x => panic!("Expected enum but found {:?}", x),
}
}
#[test]
fn const_loop() {
check_fail(

View file

@ -6,8 +6,8 @@ use std::sync::Arc;
use arrayvec::ArrayVec;
use base_db::{impl_intern_key, salsa, CrateId, Upcast};
use hir_def::{
db::DefDatabase, expr::ExprId, BlockId, ConstId, ConstParamId, DefWithBodyId, FunctionId,
GenericDefId, ImplId, LifetimeParamId, LocalFieldId, TypeOrConstParamId, VariantId,
db::DefDatabase, expr::ExprId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumVariantId,
FunctionId, GenericDefId, ImplId, LifetimeParamId, LocalFieldId, TypeOrConstParamId, VariantId,
};
use la_arena::ArenaMap;
@ -43,10 +43,14 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::lower::const_param_ty_query)]
fn const_param_ty(&self, def: ConstParamId) -> Ty;
#[salsa::invoke(crate::consteval::const_eval_query)]
#[salsa::invoke(crate::consteval::const_eval_variant_query)]
#[salsa::cycle(crate::consteval::const_eval_recover)]
fn const_eval(&self, def: ConstId) -> Result<ComputedExpr, ConstEvalError>;
#[salsa::invoke(crate::consteval::const_eval_query_variant)]
#[salsa::cycle(crate::consteval::const_eval_variant_recover)]
fn const_eval_variant(&self, def: EnumVariantId) -> Result<ComputedExpr, ConstEvalError>;
#[salsa::invoke(crate::lower::impl_trait_query)]
fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
@ -116,6 +120,8 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId;
#[salsa::interned]
fn intern_closure(&self, id: (DefWithBodyId, ExprId)) -> InternedClosureId;
#[salsa::interned]
fn intern_generator(&self, id: (DefWithBodyId, ExprId)) -> InternedGeneratorId;
#[salsa::invoke(chalk_db::associated_ty_data_query)]
fn associated_ty_data(&self, id: chalk_db::AssocTypeId) -> Arc<chalk_db::AssociatedTyDatum>;
@ -188,6 +194,9 @@ fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult>
DefWithBodyId::ConstId(it) => {
db.const_data(it).name.clone().unwrap_or_else(Name::missing).to_string()
}
DefWithBodyId::VariantId(it) => {
db.enum_data(it.parent).variants[it.local_id].name.to_string()
}
});
db.infer_query(def)
}
@ -226,6 +235,10 @@ impl_intern_key!(InternedOpaqueTyId);
pub struct InternedClosureId(salsa::InternId);
impl_intern_key!(InternedClosureId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct InternedGeneratorId(salsa::InternId);
impl_intern_key!(InternedGeneratorId);
/// This exists just for Chalk, because Chalk just has a single `FnDefId` where
/// we have different IDs for struct and enum variant constructors.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]

View file

@ -18,7 +18,9 @@ pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> Vec<ExprId> {
let is_unsafe = match def {
DefWithBodyId::FunctionId(it) => db.function_data(it).has_unsafe_kw(),
DefWithBodyId::StaticId(_) | DefWithBodyId::ConstId(_) => false,
DefWithBodyId::StaticId(_) | DefWithBodyId::ConstId(_) | DefWithBodyId::VariantId(_) => {
false
}
};
if is_unsafe {
return res;

View file

@ -20,13 +20,14 @@ use hir_def::{
};
use hir_expand::{hygiene::Hygiene, name::Name};
use itertools::Itertools;
use smallvec::SmallVec;
use syntax::SmolStr;
use crate::{
db::HirDatabase,
from_assoc_type_id, from_foreign_def_id, from_placeholder_idx, lt_from_placeholder_idx,
mapping::from_chalk,
primitive, subst_prefix, to_assoc_type_id,
primitive, to_assoc_type_id,
utils::{self, generics},
AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Const, ConstValue, DomainGoal,
GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, LifetimeOutlives, Mutability,
@ -221,6 +222,7 @@ pub enum DisplaySourceCodeError {
PathNotFound,
UnknownType,
Closure,
Generator,
}
pub enum HirDisplayError {
@ -504,8 +506,15 @@ impl HirDisplay for Ty {
let total_len = parent_params + self_param + type_params + const_params;
// We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
if total_len > 0 {
// `parameters` are in the order of fn's params (including impl traits),
// parent's params (those from enclosing impl or trait, if any).
let parameters = parameters.as_slice(Interner);
let fn_params_len = self_param + type_params + const_params;
let fn_params = parameters.get(..fn_params_len);
let parent_params = parameters.get(parameters.len() - parent_params..);
let params = parent_params.into_iter().chain(fn_params).flatten();
write!(f, "<")?;
f.write_joined(&parameters.as_slice(Interner)[..total_len], ", ")?;
f.write_joined(params, ", ")?;
write!(f, ">")?;
}
}
@ -577,9 +586,8 @@ impl HirDisplay for Ty {
Some(x) => x,
None => return true,
};
let actual_default = default_parameter
.clone()
.substitute(Interner, &subst_prefix(parameters, i));
let actual_default =
default_parameter.clone().substitute(Interner, &parameters);
parameter != &actual_default
}
let mut default_from = 0;
@ -783,7 +791,34 @@ impl HirDisplay for Ty {
write!(f, "{{unknown}}")?;
}
TyKind::InferenceVar(..) => write!(f, "_")?,
TyKind::Generator(..) => write!(f, "{{generator}}")?,
TyKind::Generator(_, subst) => {
if f.display_target.is_source_code() {
return Err(HirDisplayError::DisplaySourceCodeError(
DisplaySourceCodeError::Generator,
));
}
let subst = subst.as_slice(Interner);
let a: Option<SmallVec<[&Ty; 3]>> = subst
.get(subst.len() - 3..)
.map(|args| args.iter().map(|arg| arg.ty(Interner)).collect())
.flatten();
if let Some([resume_ty, yield_ty, ret_ty]) = a.as_deref() {
write!(f, "|")?;
resume_ty.hir_fmt(f)?;
write!(f, "|")?;
write!(f, " yields ")?;
yield_ty.hir_fmt(f)?;
write!(f, " -> ")?;
ret_ty.hir_fmt(f)?;
} else {
// This *should* be unreachable, but fallback just in case.
write!(f, "{{generator}}")?;
}
}
TyKind::GeneratorWitness(..) => write!(f, "{{generator witness}}")?,
}
Ok(())

View file

@ -2,7 +2,7 @@
//! the type of each expression and pattern.
//!
//! For type inference, compare the implementations in rustc (the various
//! check_* methods in librustc_typeck/check/mod.rs are a good entry point) and
//! check_* methods in rustc_hir_analysis/check/mod.rs are a good entry point) and
//! IntelliJ-Rust (org.rust.lang.core.types.infer). Our entry point for
//! inference here is the `infer` function, which infers the types of all
//! expressions in a given function.
@ -19,14 +19,15 @@ use std::sync::Arc;
use chalk_ir::{cast::Cast, ConstValue, DebruijnIndex, Mutability, Safety, Scalar, TypeFlags};
use hir_def::{
body::Body,
builtin_type::BuiltinType,
data::{ConstData, StaticData},
expr::{BindingAnnotation, ExprId, PatId},
lang_item::LangItemTarget,
path::{path, Path},
resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs},
type_ref::TypeRef,
AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule, Lookup,
TraitId, TypeAliasId, VariantId,
AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule,
ItemContainerId, Lookup, TraitId, TypeAliasId, VariantId,
};
use hir_expand::name::{name, Name};
use itertools::Either;
@ -67,6 +68,12 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)),
DefWithBodyId::FunctionId(f) => ctx.collect_fn(f),
DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)),
DefWithBodyId::VariantId(v) => {
ctx.return_ty = TyBuilder::builtin(match db.enum_data(v.parent).variant_body_type() {
Either::Left(builtin) => BuiltinType::Int(builtin),
Either::Right(builtin) => BuiltinType::Uint(builtin),
});
}
}
ctx.infer_body();
@ -332,7 +339,7 @@ pub struct InferenceResult {
/// unresolved or missing subpatterns or subpatterns of mismatched types.
pub type_of_pat: ArenaMap<PatId, Ty>,
type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch>,
/// Interned Unknown to return references to.
/// Interned common types to return references to.
standard_types: InternedStandardTypes,
/// Stores the types which were implicitly dereferenced in pattern binding modes.
pub pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
@ -412,6 +419,8 @@ pub(crate) struct InferenceContext<'a> {
/// closures, but currently this is the only field that will change there,
/// so it doesn't make sense.
return_ty: Ty,
/// The resume type and the yield type, respectively, of the generator being inferred.
resume_yield_tys: Option<(Ty, Ty)>,
diverges: Diverges,
breakables: Vec<BreakableContext>,
}
@ -476,6 +485,7 @@ impl<'a> InferenceContext<'a> {
table: unify::InferenceTable::new(db, trait_env.clone()),
trait_env,
return_ty: TyKind::Error.intern(Interner), // set in collect_fn_signature
resume_yield_tys: None,
db,
owner,
body,
@ -703,6 +713,8 @@ impl<'a> InferenceContext<'a> {
&mut self,
inner_ty: Ty,
assoc_ty: Option<TypeAliasId>,
// FIXME(GATs): these are args for the trait ref, args for assoc type itself should be
// handled when we support them.
params: &[GenericArg],
) -> Ty {
match assoc_ty {
@ -794,7 +806,18 @@ impl<'a> InferenceContext<'a> {
self.resolve_variant_on_alias(ty, unresolved, path)
}
TypeNs::TypeAliasId(it) => {
let ty = TyBuilder::def_ty(self.db, it.into())
let container = it.lookup(self.db.upcast()).container;
let parent_subst = match container {
ItemContainerId::TraitId(id) => {
let subst = TyBuilder::subst_for_def(self.db, id, None)
.fill_with_inference_vars(&mut self.table)
.build();
Some(subst)
}
// Type aliases do not exist in impls.
_ => None,
};
let ty = TyBuilder::def_ty(self.db, it.into(), parent_subst)
.fill_with_inference_vars(&mut self.table)
.build();
self.resolve_variant_on_alias(ty, unresolved, path)
@ -873,6 +896,12 @@ impl<'a> InferenceContext<'a> {
fn resolve_into_iter_item(&self) -> Option<TypeAliasId> {
let path = path![core::iter::IntoIterator];
let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
self.db.trait_data(trait_).associated_type_by_name(&name![IntoIter])
}
fn resolve_iterator_item(&self) -> Option<TypeAliasId> {
let path = path![core::iter::Iterator];
let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
self.db.trait_data(trait_).associated_type_by_name(&name![Item])
}

View file

@ -12,6 +12,7 @@ use crate::{
use super::{Expectation, InferenceContext};
impl InferenceContext<'_> {
// This function handles both closures and generators.
pub(super) fn deduce_closure_type_from_expectations(
&mut self,
closure_expr: ExprId,
@ -27,6 +28,11 @@ impl InferenceContext<'_> {
// Deduction from where-clauses in scope, as well as fn-pointer coercion are handled here.
let _ = self.coerce(Some(closure_expr), closure_ty, &expected_ty);
// Generators are not Fn* so return early.
if matches!(closure_ty.kind(Interner), TyKind::Generator(..)) {
return;
}
// Deduction based on the expected `dyn Fn` is done separately.
if let TyKind::Dyn(dyn_ty) = expected_ty.kind(Interner) {
if let Some(sig) = self.deduce_sig_from_dyn_ty(dyn_ty) {

View file

@ -3,7 +3,7 @@
//! like going from `&Vec<T>` to `&[T]`.
//!
//! See <https://doc.rust-lang.org/nomicon/coercions.html> and
//! `librustc_typeck/check/coercion.rs`.
//! `rustc_hir_analysis/check/coercion.rs`.
use std::{iter, sync::Arc};

View file

@ -10,7 +10,10 @@ use chalk_ir::{
cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyVariableKind,
};
use hir_def::{
expr::{ArithOp, Array, BinaryOp, CmpOp, Expr, ExprId, LabelId, Literal, Statement, UnaryOp},
expr::{
ArithOp, Array, BinaryOp, ClosureKind, CmpOp, Expr, ExprId, LabelId, Literal, Statement,
UnaryOp,
},
generics::TypeOrConstParamData,
path::{GenericArg, GenericArgs},
resolver::resolver_for_expr,
@ -204,8 +207,10 @@ impl<'a> InferenceContext<'a> {
}
&Expr::For { iterable, body, pat, label } => {
let iterable_ty = self.infer_expr(iterable, &Expectation::none());
let pat_ty =
let into_iter_ty =
self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item());
let pat_ty =
self.resolve_associated_type(into_iter_ty, self.resolve_iterator_item());
self.infer_pat(pat, &pat_ty, BindingMode::default());
self.with_breakable_ctx(BreakableKind::Loop, self.err_ty(), label, |this| {
@ -216,7 +221,7 @@ impl<'a> InferenceContext<'a> {
self.diverges = Diverges::Maybe;
TyBuilder::unit()
}
Expr::Closure { body, args, ret_type, arg_types } => {
Expr::Closure { body, args, ret_type, arg_types, closure_kind } => {
assert_eq!(args.len(), arg_types.len());
let mut sig_tys = Vec::new();
@ -244,20 +249,40 @@ impl<'a> InferenceContext<'a> {
),
})
.intern(Interner);
let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into();
let closure_ty =
TyKind::Closure(closure_id, Substitution::from1(Interner, sig_ty.clone()))
.intern(Interner);
let (ty, resume_yield_tys) = if matches!(closure_kind, ClosureKind::Generator(_)) {
// FIXME: report error when there are more than 1 parameter.
let resume_ty = match sig_tys.first() {
// When `sig_tys.len() == 1` the first type is the return type, not the
// first parameter type.
Some(ty) if sig_tys.len() > 1 => ty.clone(),
_ => self.result.standard_types.unit.clone(),
};
let yield_ty = self.table.new_type_var();
let subst = TyBuilder::subst_for_generator(self.db, self.owner)
.push(resume_ty.clone())
.push(yield_ty.clone())
.push(ret_ty.clone())
.build();
let generator_id = self.db.intern_generator((self.owner, tgt_expr)).into();
let generator_ty = TyKind::Generator(generator_id, subst).intern(Interner);
(generator_ty, Some((resume_ty, yield_ty)))
} else {
let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into();
let closure_ty =
TyKind::Closure(closure_id, Substitution::from1(Interner, sig_ty.clone()))
.intern(Interner);
(closure_ty, None)
};
// Eagerly try to relate the closure type with the expected
// type, otherwise we often won't have enough information to
// infer the body.
self.deduce_closure_type_from_expectations(
tgt_expr,
&closure_ty,
&sig_ty,
expected,
);
self.deduce_closure_type_from_expectations(tgt_expr, &ty, &sig_ty, expected);
// Now go through the argument patterns
for (arg_pat, arg_ty) in args.iter().zip(sig_tys) {
@ -266,6 +291,8 @@ impl<'a> InferenceContext<'a> {
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
let prev_resume_yield_tys =
mem::replace(&mut self.resume_yield_tys, resume_yield_tys);
self.with_breakable_ctx(BreakableKind::Border, self.err_ty(), None, |this| {
this.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
@ -273,8 +300,9 @@ impl<'a> InferenceContext<'a> {
self.diverges = prev_diverges;
self.return_ty = prev_ret_ty;
self.resume_yield_tys = prev_resume_yield_tys;
closure_ty
ty
}
Expr::Call { callee, args, .. } => {
let callee_ty = self.infer_expr(*callee, &Expectation::none());
@ -423,11 +451,18 @@ impl<'a> InferenceContext<'a> {
TyKind::Never.intern(Interner)
}
Expr::Yield { expr } => {
// FIXME: track yield type for coercion
if let Some(expr) = expr {
self.infer_expr(*expr, &Expectation::none());
if let Some((resume_ty, yield_ty)) = self.resume_yield_tys.clone() {
if let Some(expr) = expr {
self.infer_expr_coerce(*expr, &Expectation::has_type(yield_ty));
} else {
let unit = self.result.standard_types.unit.clone();
let _ = self.coerce(Some(tgt_expr), &unit, &yield_ty);
}
resume_ty
} else {
// FIXME: report error (yield expr in non-generator)
TyKind::Error.intern(Interner)
}
TyKind::Never.intern(Interner)
}
Expr::RecordLit { path, fields, spread, .. } => {
let (ty, def_id) = self.resolve_variant(path.as_deref(), false);
@ -952,11 +987,13 @@ impl<'a> InferenceContext<'a> {
let lhs_ty = self.infer_expr(lhs, &lhs_expectation);
let rhs_ty = self.table.new_type_var();
let func = lang_names_for_bin_op(op).and_then(|(name, lang_item)| {
self.db.trait_data(self.resolve_lang_item(lang_item)?.as_trait()?).method_by_name(&name)
let trait_func = lang_names_for_bin_op(op).and_then(|(name, lang_item)| {
let trait_id = self.resolve_lang_item(lang_item)?.as_trait()?;
let func = self.db.trait_data(trait_id).method_by_name(&name)?;
Some((trait_id, func))
});
let func = match func {
Some(func) => func,
let (trait_, func) = match trait_func {
Some(it) => it,
None => {
let rhs_ty = self.builtin_binary_op_rhs_expectation(op, lhs_ty.clone());
let rhs_ty = self.infer_expr_coerce(rhs, &Expectation::from_option(rhs_ty));
@ -966,7 +1003,9 @@ impl<'a> InferenceContext<'a> {
}
};
let subst = TyBuilder::subst_for_def(self.db, func)
// HACK: We can use this substitution for the function because the function itself doesn't
// have its own generic parameters.
let subst = TyBuilder::subst_for_def(self.db, trait_, None)
.push(lhs_ty.clone())
.push(rhs_ty.clone())
.build();
@ -1245,19 +1284,7 @@ impl<'a> InferenceContext<'a> {
assert_eq!(self_params, 0); // method shouldn't have another Self param
let total_len = parent_params + type_params + const_params + impl_trait_params;
let mut substs = Vec::with_capacity(total_len);
// Parent arguments are unknown
for (id, param) in def_generics.iter_parent() {
match param {
TypeOrConstParamData::TypeParamData(_) => {
substs.push(GenericArgData::Ty(self.table.new_type_var()).intern(Interner));
}
TypeOrConstParamData::ConstParamData(_) => {
let ty = self.db.const_param_ty(ConstParamId::from_unchecked(id));
substs
.push(GenericArgData::Const(self.table.new_const_var(ty)).intern(Interner));
}
}
}
// handle provided arguments
if let Some(generic_args) = generic_args {
// if args are provided, it should be all of them, but we can't rely on that
@ -1266,7 +1293,7 @@ impl<'a> InferenceContext<'a> {
.iter()
.filter(|arg| !matches!(arg, GenericArg::Lifetime(_)))
.take(type_params + const_params)
.zip(def_generics.iter_id().skip(parent_params))
.zip(def_generics.iter_id())
{
if let Some(g) = generic_arg_to_chalk(
self.db,
@ -1290,6 +1317,9 @@ impl<'a> InferenceContext<'a> {
}
}
};
// Handle everything else as unknown. This also handles generic arguments for the method's
// parent (impl or trait), which should come after those for the method.
for (id, data) in def_generics.iter().skip(substs.len()) {
match data {
TypeOrConstParamData::TypeParamData(_) => {
@ -1327,9 +1357,13 @@ impl<'a> InferenceContext<'a> {
CallableDefId::FunctionId(f) => {
if let ItemContainerId::TraitId(trait_) = f.lookup(self.db.upcast()).container {
// construct a TraitRef
let substs = crate::subst_prefix(
&*parameters,
generics(self.db.upcast(), trait_.into()).len(),
let params_len = parameters.len(Interner);
let trait_params_len = generics(self.db.upcast(), trait_.into()).len();
let substs = Substitution::from_iter(
Interner,
// The generic parameters for the trait come after those for the
// function.
&parameters.as_slice(Interner)[params_len - trait_params_len..],
);
self.push_obligation(
TraitRef { trait_id: to_chalk_trait_id(trait_), substitution: substs }

View file

@ -12,8 +12,8 @@ use crate::{
builder::ParamKind,
consteval,
method_resolution::{self, VisibleFromModule},
GenericArgData, Interner, Substitution, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
ValueTyDefId,
utils::generics,
Interner, Substitution, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, ValueTyDefId,
};
use super::{ExprOrPatId, InferenceContext, TraitRef};
@ -96,17 +96,21 @@ impl<'a> InferenceContext<'a> {
ValueNs::GenericParam(it) => return Some(self.db.const_param_ty(it)),
};
let parent_substs = self_subst.unwrap_or_else(|| Substitution::empty(Interner));
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
let substs = ctx.substs_from_path(path, typable, true);
let mut it = substs.as_slice(Interner)[parent_substs.len(Interner)..].iter().cloned();
let ty = TyBuilder::value_ty(self.db, typable)
.use_parent_substs(&parent_substs)
let substs = substs.as_slice(Interner);
let parent_substs = self_subst.or_else(|| {
let generics = generics(self.db.upcast(), typable.to_generic_def_id()?);
let parent_params_len = generics.parent_generics()?.len();
let parent_args = &substs[substs.len() - parent_params_len..];
Some(Substitution::from_iter(Interner, parent_args))
});
let parent_substs_len = parent_substs.as_ref().map_or(0, |s| s.len(Interner));
let mut it = substs.iter().take(substs.len() - parent_substs_len).cloned();
let ty = TyBuilder::value_ty(self.db, typable, parent_substs)
.fill(|x| {
it.next().unwrap_or_else(|| match x {
ParamKind::Type => {
GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
}
ParamKind::Type => TyKind::Error.intern(Interner).cast(Interner),
ParamKind::Const(ty) => consteval::unknown_const_as_generic(ty.clone()),
})
})
@ -249,7 +253,7 @@ impl<'a> InferenceContext<'a> {
};
let substs = match container {
ItemContainerId::ImplId(impl_id) => {
let impl_substs = TyBuilder::subst_for_def(self.db, impl_id)
let impl_substs = TyBuilder::subst_for_def(self.db, impl_id, None)
.fill_with_inference_vars(&mut self.table)
.build();
let impl_self_ty =

View file

@ -598,11 +598,14 @@ impl<'a> InferenceTable<'a> {
.build();
let projection = {
let b = TyBuilder::assoc_type_projection(self.db, output_assoc_type);
let b = TyBuilder::subst_for_def(self.db, fn_once_trait, None);
if b.remaining() != 2 {
return None;
}
b.push(ty.clone()).push(arg_ty).build()
let fn_once_subst = b.push(ty.clone()).push(arg_ty).build();
TyBuilder::assoc_type_projection(self.db, output_assoc_type, Some(fn_once_subst))
.build()
};
let trait_env = self.trait_env.env.clone();

View file

@ -306,7 +306,7 @@ impl<'a> TyLoweringContext<'a> {
// FIXME we're probably doing something wrong here
self.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16);
let (
parent_params,
_parent_params,
self_params,
list_params,
const_params,
@ -319,7 +319,7 @@ impl<'a> TyLoweringContext<'a> {
};
TyKind::BoundVar(BoundVar::new(
self.in_binders,
idx as usize + parent_params + self_params + list_params + const_params,
idx as usize + self_params + list_params + const_params,
))
.intern(Interner)
}
@ -499,14 +499,31 @@ impl<'a> TyLoweringContext<'a> {
.intern(Interner)
}
TypeNs::SelfType(impl_id) => {
let generics = generics(self.db.upcast(), impl_id.into());
let substs = match self.type_param_mode {
ParamLoweringMode::Placeholder => generics.placeholder_subst(self.db),
ParamLoweringMode::Variable => {
generics.bound_vars_subst(self.db, self.in_binders)
let def =
self.resolver.generic_def().expect("impl should have generic param scope");
let generics = generics(self.db.upcast(), def);
match self.type_param_mode {
ParamLoweringMode::Placeholder => {
// `def` can be either impl itself or item within, and we need impl itself
// now.
let generics = generics.parent_generics().unwrap_or(&generics);
let subst = generics.placeholder_subst(self.db);
self.db.impl_self_ty(impl_id).substitute(Interner, &subst)
}
};
self.db.impl_self_ty(impl_id).substitute(Interner, &substs)
ParamLoweringMode::Variable => {
let starting_from = match def {
GenericDefId::ImplId(_) => 0,
// `def` is an item within impl. We need to substitute `BoundVar`s but
// remember that they are for parent (i.e. impl) generic params so they
// come after our own params.
_ => generics.len_self(),
};
TyBuilder::impl_self_ty(self.db, impl_id)
.fill_with_bound_vars(self.in_binders, starting_from)
.build()
}
}
}
TypeNs::AdtSelfType(adt) => {
let generics = generics(self.db.upcast(), adt.into());
@ -663,40 +680,31 @@ impl<'a> TyLoweringContext<'a> {
fn substs_from_path_segment(
&self,
segment: PathSegment<'_>,
def_generic: Option<GenericDefId>,
def: Option<GenericDefId>,
infer_args: bool,
explicit_self_ty: Option<Ty>,
) -> Substitution {
// Remember that the item's own generic args come before its parent's.
let mut substs = Vec::new();
let def_generics = if let Some(def) = def_generic {
generics(self.db.upcast(), def)
let def = if let Some(d) = def {
d
} else {
return Substitution::empty(Interner);
};
let def_generics = generics(self.db.upcast(), def);
let (parent_params, self_params, type_params, const_params, impl_trait_params) =
def_generics.provenance_split();
let total_len =
parent_params + self_params + type_params + const_params + impl_trait_params;
let item_len = self_params + type_params + const_params + impl_trait_params;
let total_len = parent_params + item_len;
let ty_error = GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner);
let ty_error = TyKind::Error.intern(Interner).cast(Interner);
let mut def_generic_iter = def_generics.iter_id();
for _ in 0..parent_params {
if let Some(eid) = def_generic_iter.next() {
match eid {
Either::Left(_) => substs.push(ty_error.clone()),
Either::Right(x) => {
substs.push(unknown_const_as_generic(self.db.const_param_ty(x)))
}
}
}
}
let fill_self_params = || {
for x in explicit_self_ty
.into_iter()
.map(|x| GenericArgData::Ty(x).intern(Interner))
.map(|x| x.cast(Interner))
.chain(iter::repeat(ty_error.clone()))
.take(self_params)
{
@ -757,37 +765,40 @@ impl<'a> TyLoweringContext<'a> {
fill_self_params();
}
// These params include those of parent.
let remaining_params: SmallVec<[_; 2]> = def_generic_iter
.map(|eid| match eid {
Either::Left(_) => ty_error.clone(),
Either::Right(x) => unknown_const_as_generic(self.db.const_param_ty(x)),
})
.collect();
assert_eq!(remaining_params.len() + substs.len(), total_len);
// handle defaults. In expression or pattern path segments without
// explicitly specified type arguments, missing type arguments are inferred
// (i.e. defaults aren't used).
if !infer_args || had_explicit_args {
if let Some(def_generic) = def_generic {
let defaults = self.db.generic_defaults(def_generic);
assert_eq!(total_len, defaults.len());
let defaults = self.db.generic_defaults(def);
assert_eq!(total_len, defaults.len());
let parent_from = item_len - substs.len();
for default_ty in defaults.iter().skip(substs.len()) {
// each default can depend on the previous parameters
let substs_so_far = Substitution::from_iter(Interner, substs.clone());
if let Some(_id) = def_generic_iter.next() {
substs.push(default_ty.clone().substitute(Interner, &substs_so_far));
}
}
for (idx, default_ty) in defaults[substs.len()..item_len].iter().enumerate() {
// each default can depend on the previous parameters
let substs_so_far = Substitution::from_iter(
Interner,
substs.iter().cloned().chain(remaining_params[idx..].iter().cloned()),
);
substs.push(default_ty.clone().substitute(Interner, &substs_so_far));
}
// Keep parent's params as unknown.
let mut remaining_params = remaining_params;
substs.extend(remaining_params.drain(parent_from..));
} else {
substs.extend(remaining_params);
}
// add placeholders for args that were not provided
// FIXME: emit diagnostics in contexts where this is not allowed
for eid in def_generic_iter {
match eid {
Either::Left(_) => substs.push(ty_error.clone()),
Either::Right(x) => {
substs.push(unknown_const_as_generic(self.db.const_param_ty(x)))
}
}
}
// If this assert fails, it means you pushed into subst but didn't call .next() of def_generic_iter
assert_eq!(substs.len(), total_len);
Substitution::from_iter(Interner, substs)
}
@ -981,10 +992,11 @@ impl<'a> TyLoweringContext<'a> {
fn lower_dyn_trait(&self, bounds: &[Interned<TypeBound>]) -> Ty {
let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner);
// INVARIANT: The principal trait bound must come first. Others may be in any order but
// should be in the same order for the same set but possibly different order of bounds in
// the input.
// This invariant is used by `TyExt::dyn_trait()` and chalk.
// INVARIANT: The principal trait bound, if present, must come first. Others may be in any
// order but should be in the same order for the same set but possibly different order of
// bounds in the input.
// INVARIANT: If this function returns `DynTy`, there should be at least one trait bound.
// These invariants are utilized by `TyExt::dyn_trait()` and chalk.
let bounds = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
let mut bounds: Vec<_> = bounds
.iter()
@ -1035,6 +1047,12 @@ impl<'a> TyLoweringContext<'a> {
return None;
}
if bounds.first().and_then(|b| b.trait_id()).is_none() {
// When there's no trait bound, that's an error. This happens when the trait refs
// are unresolved.
return None;
}
// As multiple occurrences of the same auto traits *are* permitted, we dedulicate the
// bounds. We shouldn't have repeated elements besides auto traits at this point.
bounds.dedup();
@ -1046,7 +1064,8 @@ impl<'a> TyLoweringContext<'a> {
let bounds = crate::make_single_type_binders(bounds);
TyKind::Dyn(DynTy { bounds, lifetime: static_lifetime() }).intern(Interner)
} else {
// FIXME: report error (additional non-auto traits or associated type rebound)
// FIXME: report error
// (additional non-auto traits, associated type rebound, or no resolved trait)
TyKind::Error.intern(Interner)
}
}
@ -1139,11 +1158,28 @@ fn named_associated_type_shorthand_candidates<R>(
};
match res {
TypeNs::SelfType(impl_id) => search(
TypeNs::SelfType(impl_id) => {
// we're _in_ the impl -- the binders get added back later. Correct,
// but it would be nice to make this more explicit
db.impl_trait(impl_id)?.into_value_and_skipped_binders().0,
),
let trait_ref = db.impl_trait(impl_id)?.into_value_and_skipped_binders().0;
let impl_id_as_generic_def: GenericDefId = impl_id.into();
if impl_id_as_generic_def != def {
// `trait_ref` contains `BoundVar`s bound by impl's `Binders`, but here we need
// `BoundVar`s from `def`'s point of view.
// FIXME: A `HirDatabase` query may be handy if this process is needed in more
// places. It'd be almost identical as `impl_trait_query` where `resolver` would be
// of `def` instead of `impl_id`.
let starting_idx = generics(db.upcast(), def).len_self();
let subst = TyBuilder::subst_for_def(db, impl_id, None)
.fill_with_bound_vars(DebruijnIndex::INNERMOST, starting_idx)
.build();
let trait_ref = subst.apply(trait_ref, Interner);
search(trait_ref)
} else {
search(trait_ref)
}
}
TypeNs::GenericParam(param_id) => {
let predicates = db.generic_predicates_for_param(def, param_id.into(), assoc_name);
let res = predicates.iter().find_map(|pred| match pred.skip_binders().skip_binders() {
@ -1160,10 +1196,18 @@ fn named_associated_type_shorthand_candidates<R>(
}
// Handle `Self::Type` referring to own associated type in trait definitions
if let GenericDefId::TraitId(trait_id) = param_id.parent() {
let generics = generics(db.upcast(), trait_id.into());
if generics.params.type_or_consts[param_id.local_id()].is_trait_self() {
let trait_generics = generics(db.upcast(), trait_id.into());
if trait_generics.params.type_or_consts[param_id.local_id()].is_trait_self() {
let def_generics = generics(db.upcast(), def);
let starting_idx = match def {
GenericDefId::TraitId(_) => 0,
// `def` is an item within trait. We need to substitute `BoundVar`s but
// remember that they are for parent (i.e. trait) generic params so they
// come after our own params.
_ => def_generics.len_self(),
};
let trait_ref = TyBuilder::trait_ref(db, trait_id)
.fill_with_bound_vars(DebruijnIndex::INNERMOST, 0)
.fill_with_bound_vars(DebruijnIndex::INNERMOST, starting_idx)
.build();
return search(trait_ref);
}
@ -1405,6 +1449,7 @@ pub(crate) fn generic_defaults_query(
let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
let generic_params = generics(db.upcast(), def);
let parent_start_idx = generic_params.len_self();
let defaults = generic_params
.iter()
@ -1417,19 +1462,17 @@ pub(crate) fn generic_defaults_query(
let val = unknown_const_as_generic(
db.const_param_ty(ConstParamId::from_unchecked(id)),
);
return crate::make_binders_with_count(db, idx, &generic_params, val);
return make_binders(db, &generic_params, val);
}
};
let mut ty =
p.default.as_ref().map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t));
// Each default can only refer to previous parameters.
// type variable default referring to parameter coming
// after it. This is forbidden (FIXME: report
// diagnostic)
ty = fallback_bound_vars(ty, idx);
let val = GenericArgData::Ty(ty).intern(Interner);
crate::make_binders_with_count(db, idx, &generic_params, val)
// Type variable default referring to parameter coming
// after it is forbidden (FIXME: report diagnostic)
ty = fallback_bound_vars(ty, idx, parent_start_idx);
crate::make_binders(db, &generic_params, ty.cast(Interner))
})
.collect();
@ -1446,15 +1489,14 @@ pub(crate) fn generic_defaults_recover(
// we still need one default per parameter
let defaults = generic_params
.iter_id()
.enumerate()
.map(|(count, id)| {
.map(|id| {
let val = match id {
itertools::Either::Left(_) => {
GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
}
itertools::Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
};
crate::make_binders_with_count(db, count, &generic_params, val)
crate::make_binders(db, &generic_params, val)
})
.collect();
@ -1633,6 +1675,19 @@ pub enum ValueTyDefId {
}
impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId);
impl ValueTyDefId {
pub(crate) fn to_generic_def_id(self) -> Option<GenericDefId> {
match self {
Self::FunctionId(id) => Some(id.into()),
Self::StructId(id) => Some(id.into()),
Self::UnionId(id) => Some(id.into()),
Self::EnumVariantId(var) => Some(var.into()),
Self::ConstId(id) => Some(id.into()),
Self::StaticId(_) => None,
}
}
}
/// Build the declared type of an item. This depends on the namespace; e.g. for
/// `struct Foo(usize)`, we have two types: The type of the struct itself, and
/// the constructor function `(usize) -> Foo` which lives in the values
@ -1816,26 +1871,48 @@ pub(crate) fn const_or_path_to_chalk(
}
}
/// This replaces any 'free' Bound vars in `s` (i.e. those with indices past
/// num_vars_to_keep) by `TyKind::Unknown`.
/// Replaces any 'free' `BoundVar`s in `s` by `TyKind::Error` from the perspective of generic
/// parameter whose index is `param_index`. A `BoundVar` is free when it is or (syntactically)
/// appears after the generic parameter of `param_index`.
fn fallback_bound_vars<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
s: T,
num_vars_to_keep: usize,
param_index: usize,
parent_start: usize,
) -> T {
// Keep in mind that parent generic parameters, if any, come *after* those of the item in
// question. In the diagrams below, `c*` and `p*` represent generic parameters of the item and
// its parent respectively.
let is_allowed = |index| {
if param_index < parent_start {
// The parameter of `param_index` is one from the item in question. Any parent generic
// parameters or the item's generic parameters that come before `param_index` is
// allowed.
// [c1, .., cj, .., ck, p1, .., pl] where cj is `param_index`
// ^^^^^^ ^^^^^^^^^^ these are allowed
!(param_index..parent_start).contains(&index)
} else {
// The parameter of `param_index` is one from the parent generics. Only parent generic
// parameters that come before `param_index` are allowed.
// [c1, .., ck, p1, .., pj, .., pl] where pj is `param_index`
// ^^^^^^ these are allowed
(parent_start..param_index).contains(&index)
}
};
crate::fold_free_vars(
s,
|bound, binders| {
if bound.index >= num_vars_to_keep && bound.debruijn == DebruijnIndex::INNERMOST {
TyKind::Error.intern(Interner)
} else {
if bound.index_if_innermost().map_or(true, is_allowed) {
bound.shifted_in_from(binders).to_ty(Interner)
} else {
TyKind::Error.intern(Interner)
}
},
|ty, bound, binders| {
if bound.index >= num_vars_to_keep && bound.debruijn == DebruijnIndex::INNERMOST {
unknown_const(ty.clone())
} else {
if bound.index_if_innermost().map_or(true, is_allowed) {
bound.shifted_in_from(binders).to_const(Interner, ty)
} else {
unknown_const(ty.clone())
}
},
)

View file

@ -103,6 +103,18 @@ impl From<crate::db::InternedClosureId> for chalk_ir::ClosureId<Interner> {
}
}
impl From<chalk_ir::GeneratorId<Interner>> for crate::db::InternedGeneratorId {
fn from(id: chalk_ir::GeneratorId<Interner>) -> Self {
Self::from_intern_id(id.0)
}
}
impl From<crate::db::InternedGeneratorId> for chalk_ir::GeneratorId<Interner> {
fn from(id: crate::db::InternedGeneratorId) -> Self {
chalk_ir::GeneratorId(id.as_intern_id())
}
}
pub fn to_foreign_def_id(id: TypeAliasId) -> ForeignDefId {
chalk_ir::ForeignDefId(salsa::InternKey::as_intern_id(&id))
}

View file

@ -1,7 +1,7 @@
//! This module is concerned with finding methods that a given type provides.
//! For details about how this works in rustc, see the method lookup page in the
//! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html)
//! and the corresponding code mostly in librustc_typeck/check/method/probe.rs.
//! and the corresponding code mostly in rustc_hir_analysis/check/method/probe.rs.
use std::{iter, ops::ControlFlow, sync::Arc};
use arrayvec::ArrayVec;
@ -654,7 +654,7 @@ fn find_matching_impl(
let r = table.run_in_snapshot(|table| {
let impl_data = db.impl_data(impl_);
let substs =
TyBuilder::subst_for_def(db, impl_).fill_with_inference_vars(table).build();
TyBuilder::subst_for_def(db, impl_, None).fill_with_inference_vars(table).build();
let impl_ty = db.impl_self_ty(impl_).substitute(Interner, &substs);
table
@ -1147,10 +1147,9 @@ fn is_valid_candidate(
}));
if let ItemContainerId::ImplId(impl_id) = c.lookup(db.upcast()).container {
let self_ty_matches = table.run_in_snapshot(|table| {
let subst =
TyBuilder::subst_for_def(db, c).fill_with_inference_vars(table).build();
let expected_self_ty =
subst.apply(db.impl_self_ty(impl_id).skip_binders().clone(), Interner);
let expected_self_ty = TyBuilder::impl_self_ty(db, impl_id)
.fill_with_inference_vars(table)
.build();
table.unify(&expected_self_ty, &self_ty)
});
if !self_ty_matches {
@ -1186,31 +1185,26 @@ fn is_valid_fn_candidate(
table.run_in_snapshot(|table| {
let container = fn_id.lookup(db.upcast()).container;
let impl_subst = match container {
let (impl_subst, expect_self_ty) = match container {
ItemContainerId::ImplId(it) => {
TyBuilder::subst_for_def(db, it).fill_with_inference_vars(table).build()
let subst =
TyBuilder::subst_for_def(db, it, None).fill_with_inference_vars(table).build();
let self_ty = db.impl_self_ty(it).substitute(Interner, &subst);
(subst, self_ty)
}
ItemContainerId::TraitId(it) => {
TyBuilder::subst_for_def(db, it).fill_with_inference_vars(table).build()
let subst =
TyBuilder::subst_for_def(db, it, None).fill_with_inference_vars(table).build();
let self_ty = subst.at(Interner, 0).assert_ty_ref(Interner).clone();
(subst, self_ty)
}
_ => unreachable!(),
};
let fn_subst = TyBuilder::subst_for_def(db, fn_id)
.use_parent_substs(&impl_subst)
let fn_subst = TyBuilder::subst_for_def(db, fn_id, Some(impl_subst.clone()))
.fill_with_inference_vars(table)
.build();
let expect_self_ty = match container {
ItemContainerId::TraitId(_) => fn_subst.at(Interner, 0).assert_ty_ref(Interner).clone(),
ItemContainerId::ImplId(impl_id) => {
fn_subst.apply(db.impl_self_ty(impl_id).skip_binders().clone(), Interner)
}
// We should only get called for associated items (impl/trait)
ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
unreachable!()
}
};
check_that!(table.unify(&expect_self_ty, self_ty));
if let Some(receiver_ty) = receiver_ty {

View file

@ -16,7 +16,7 @@ use base_db::{fixture::WithFixture, FileRange, SourceDatabaseExt};
use expect_test::Expect;
use hir_def::{
body::{Body, BodySourceMap, SyntheticSyntax},
db::DefDatabase,
db::{DefDatabase, InternDatabase},
expr::{ExprId, PatId},
item_scope::ItemScope,
nameres::DefMap,
@ -135,6 +135,10 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
DefWithBodyId::VariantId(it) => {
let loc = db.lookup_intern_enum(it.parent);
loc.source(&db).value.syntax().text_range().start()
}
});
let mut unexpected_type_mismatches = String::new();
for def in defs {
@ -388,6 +392,10 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
DefWithBodyId::VariantId(it) => {
let loc = db.lookup_intern_enum(it.parent);
loc.source(&db).value.syntax().text_range().start()
}
});
for def in defs {
let (_body, source_map) = db.body_with_source_map(def);
@ -453,6 +461,18 @@ fn visit_module(
let body = db.body(def);
visit_body(db, &body, cb);
}
ModuleDefId::AdtId(hir_def::AdtId::EnumId(it)) => {
db.enum_data(it)
.variants
.iter()
.map(|(id, _)| hir_def::EnumVariantId { parent: it, local_id: id })
.for_each(|it| {
let def = it.into();
cb(def);
let body = db.body(def);
visit_body(db, &body, cb);
});
}
ModuleDefId::TraitId(it) => {
let trait_data = db.trait_data(it);
for &(_, item) in trait_data.items.iter() {

View file

@ -294,6 +294,24 @@ fn foo() {
);
}
#[test]
fn generator_yield_return_coerce() {
check_no_mismatches(
r#"
fn test() {
let g = || {
yield &1u32;
yield &&1u32;
if true {
return &1u32;
}
&&1u32
};
}
"#,
);
}
#[test]
fn assign_coerce() {
check_no_mismatches(

View file

@ -1488,7 +1488,6 @@ fn regression_11688_4() {
#[test]
fn gat_crash_1() {
cov_mark::check!(ignore_gats);
check_no_mismatches(
r#"
trait ATrait {}
@ -1527,30 +1526,22 @@ unsafe impl Storage for InlineStorage {
#[test]
fn gat_crash_3() {
// FIXME: This test currently crashes rust analyzer in a debug build but not in a
// release build (i.e. for the user). With the assumption that tests will always be run
// in debug mode, we catch the unwind and expect that it panicked. See the
// [`crate::utils::generics`] function for more information.
cov_mark::check!(ignore_gats);
std::panic::catch_unwind(|| {
check_no_mismatches(
r#"
check_no_mismatches(
r#"
trait Collection {
type Item;
type Member<T>: Collection<Item = T>;
fn add(&mut self, value: Self::Item) -> Result<(), Self::Error>;
type Item;
type Member<T>: Collection<Item = T>;
fn add(&mut self, value: Self::Item) -> Result<(), Self::Error>;
}
struct ConstGen<T, const N: usize> {
data: [T; N],
data: [T; N],
}
impl<T, const N: usize> Collection for ConstGen<T, N> {
type Item = T;
type Member<U> = ConstGen<U, N>;
type Item = T;
type Member<U> = ConstGen<U, N>;
}
"#,
);
})
.expect_err("must panic");
"#,
);
}
#[test]
@ -1691,3 +1682,28 @@ fn macrostmts() -> u8 {
"#,
);
}
#[test]
fn dyn_with_unresolved_trait() {
check_types(
r#"
fn foo(a: &dyn DoesNotExist) {
a.bar();
//^&{unknown}
}
"#,
);
}
#[test]
fn self_assoc_with_const_generics_crash() {
check_no_mismatches(
r#"
trait Trait { type Item; }
impl<T, const N: usize> Trait for [T; N] {
type Item = ();
fn f<U>(_: Self::Item) {}
}
"#,
);
}

View file

@ -1693,16 +1693,16 @@ fn infer_type_param() {
fn infer_const() {
check_infer(
r#"
struct Foo;
impl Foo { const ASSOC_CONST: u32 = 0; }
const GLOBAL_CONST: u32 = 101;
fn test() {
const LOCAL_CONST: u32 = 99;
let x = LOCAL_CONST;
let z = GLOBAL_CONST;
let id = Foo::ASSOC_CONST;
}
"#,
struct Foo;
impl Foo { const ASSOC_CONST: u32 = 0; }
const GLOBAL_CONST: u32 = 101;
fn test() {
const LOCAL_CONST: u32 = 99;
let x = LOCAL_CONST;
let z = GLOBAL_CONST;
let id = Foo::ASSOC_CONST;
}
"#,
expect![[r#"
48..49 '0': u32
79..82 '101': u32
@ -1722,17 +1722,17 @@ fn infer_const() {
fn infer_static() {
check_infer(
r#"
static GLOBAL_STATIC: u32 = 101;
static mut GLOBAL_STATIC_MUT: u32 = 101;
fn test() {
static LOCAL_STATIC: u32 = 99;
static mut LOCAL_STATIC_MUT: u32 = 99;
let x = LOCAL_STATIC;
let y = LOCAL_STATIC_MUT;
let z = GLOBAL_STATIC;
let w = GLOBAL_STATIC_MUT;
}
"#,
static GLOBAL_STATIC: u32 = 101;
static mut GLOBAL_STATIC_MUT: u32 = 101;
fn test() {
static LOCAL_STATIC: u32 = 99;
static mut LOCAL_STATIC_MUT: u32 = 99;
let x = LOCAL_STATIC;
let y = LOCAL_STATIC_MUT;
let z = GLOBAL_STATIC;
let w = GLOBAL_STATIC_MUT;
}
"#,
expect![[r#"
28..31 '101': u32
69..72 '101': u32
@ -1751,6 +1751,41 @@ fn infer_static() {
);
}
#[test]
fn infer_enum_variant() {
check_infer(
r#"
enum Foo {
A = 15,
B = Foo::A as isize + 1
}
"#,
expect![[r#"
19..21 '15': isize
31..37 'Foo::A': Foo
31..46 'Foo::A as isize': isize
31..50 'Foo::A...ze + 1': isize
49..50 '1': isize
"#]],
);
check_infer(
r#"
#[repr(u32)]
enum Foo {
A = 15,
B = Foo::A as u32 + 1
}
"#,
expect![[r#"
32..34 '15': u32
44..50 'Foo::A': Foo
44..57 'Foo::A as u32': u32
44..61 'Foo::A...32 + 1': u32
60..61 '1': u32
"#]],
);
}
#[test]
fn shadowing_primitive() {
check_types(
@ -1917,6 +1952,88 @@ fn closure_return_inferred() {
);
}
#[test]
fn generator_types_inferred() {
check_infer(
r#"
//- minicore: generator, deref
use core::ops::{Generator, GeneratorState};
use core::pin::Pin;
fn f(v: i64) {}
fn test() {
let mut g = |r| {
let a = yield 0;
let a = yield 1;
let a = yield 2;
"return value"
};
match Pin::new(&mut g).resume(0usize) {
GeneratorState::Yielded(y) => { f(y); }
GeneratorState::Complete(r) => {}
}
}
"#,
expect![[r#"
70..71 'v': i64
78..80 '{}': ()
91..362 '{ ... } }': ()
101..106 'mut g': |usize| yields i64 -> &str
109..218 '|r| { ... }': |usize| yields i64 -> &str
110..111 'r': usize
113..218 '{ ... }': &str
127..128 'a': usize
131..138 'yield 0': usize
137..138 '0': i64
152..153 'a': usize
156..163 'yield 1': usize
162..163 '1': i64
177..178 'a': usize
181..188 'yield 2': usize
187..188 '2': i64
198..212 '"return value"': &str
225..360 'match ... }': ()
231..239 'Pin::new': fn new<&mut |usize| yields i64 -> &str>(&mut |usize| yields i64 -> &str) -> Pin<&mut |usize| yields i64 -> &str>
231..247 'Pin::n...mut g)': Pin<&mut |usize| yields i64 -> &str>
231..262 'Pin::n...usize)': GeneratorState<i64, &str>
240..246 '&mut g': &mut |usize| yields i64 -> &str
245..246 'g': |usize| yields i64 -> &str
255..261 '0usize': usize
273..299 'Genera...ded(y)': GeneratorState<i64, &str>
297..298 'y': i64
303..312 '{ f(y); }': ()
305..306 'f': fn f(i64)
305..309 'f(y)': ()
307..308 'y': i64
321..348 'Genera...ete(r)': GeneratorState<i64, &str>
346..347 'r': &str
352..354 '{}': ()
"#]],
);
}
#[test]
fn generator_resume_yield_return_unit() {
check_no_mismatches(
r#"
//- minicore: generator, deref
use core::ops::{Generator, GeneratorState};
use core::pin::Pin;
fn test() {
let mut g = || {
let () = yield;
};
match Pin::new(&mut g).resume(()) {
GeneratorState::Yielded(()) => {}
GeneratorState::Complete(()) => {}
}
}
"#,
);
}
#[test]
fn fn_pointer_return() {
check_infer(

View file

@ -279,6 +279,10 @@ fn test() {
pub mod iter {
pub trait IntoIterator {
type Item;
type IntoIter: Iterator<Item = Self::Item>;
}
pub trait Iterator {
type Item;
}
}
pub mod prelude {
@ -297,7 +301,13 @@ pub mod collections {
}
impl<T> IntoIterator for Vec<T> {
type Item=T;
type Item = T;
type IntoIter = IntoIter<T>;
}
struct IntoIter<T> {}
impl<T> Iterator for IntoIter<T> {
type Item = T;
}
}
"#,

View file

@ -4,7 +4,7 @@
use std::iter;
use base_db::CrateId;
use chalk_ir::{fold::Shift, BoundVar, DebruijnIndex};
use chalk_ir::{cast::Cast, fold::Shift, BoundVar, DebruijnIndex};
use hir_def::{
db::DefDatabase,
generics::{
@ -24,8 +24,7 @@ use smallvec::{smallvec, SmallVec};
use syntax::SmolStr;
use crate::{
db::HirDatabase, ChalkTraitId, ConstData, ConstValue, GenericArgData, Interner, Substitution,
TraitRef, TraitRefExt, TyKind, WhereClause,
db::HirDatabase, ChalkTraitId, Interner, Substitution, TraitRef, TraitRefExt, WhereClause,
};
pub(crate) fn fn_traits(db: &dyn DefDatabase, krate: CrateId) -> impl Iterator<Item = TraitId> {
@ -174,31 +173,6 @@ pub(super) fn associated_type_by_name_including_super_traits(
pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
if parent_generics.is_some() && matches!(def, GenericDefId::TypeAliasId(_)) {
let params = db.generic_params(def);
let parent_params = &parent_generics.as_ref().unwrap().params;
let has_consts =
params.iter().any(|(_, x)| matches!(x, TypeOrConstParamData::ConstParamData(_)));
let parent_has_consts =
parent_params.iter().any(|(_, x)| matches!(x, TypeOrConstParamData::ConstParamData(_)));
return if has_consts || parent_has_consts {
// XXX: treat const generic associated types as not existing to avoid crashes
// (#11769)
//
// Note: Also crashes when the parent has const generics (also even if the GAT
// doesn't use them), see `tests::regression::gat_crash_3` for an example.
// Avoids that by disabling GATs when the parent (i.e. `impl` block) has
// const generics (#12193).
//
// Chalk expects the inner associated type's parameters to come
// *before*, not after the trait's generics as we've always done it.
// Adapting to this requires a larger refactoring
cov_mark::hit!(ignore_gats);
Generics { def, params: Interned::new(Default::default()), parent_generics }
} else {
Generics { def, params, parent_generics }
};
}
Generics { def, params: db.generic_params(def), parent_generics }
}
@ -221,23 +195,30 @@ impl Generics {
})
}
/// Iterator over types and const params of parent, then self.
/// Iterator over types and const params of self, then parent.
pub(crate) fn iter<'a>(
&'a self,
) -> impl DoubleEndedIterator<Item = (TypeOrConstParamId, &'a TypeOrConstParamData)> + 'a {
let to_toc_id = |it: &'a Generics| {
move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p)
};
self.parent_generics()
.into_iter()
.flat_map(move |it| it.params.iter().map(to_toc_id(it)))
.chain(self.params.iter().map(to_toc_id(self)))
self.params.iter().map(to_toc_id(self)).chain(self.iter_parent())
}
/// Iterate over types and const params without parent params.
pub(crate) fn iter_self<'a>(
&'a self,
) -> impl DoubleEndedIterator<Item = (TypeOrConstParamId, &'a TypeOrConstParamData)> + 'a {
let to_toc_id = |it: &'a Generics| {
move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p)
};
self.params.iter().map(to_toc_id(self))
}
/// Iterator over types and const params of parent.
pub(crate) fn iter_parent<'a>(
&'a self,
) -> impl Iterator<Item = (TypeOrConstParamId, &'a TypeOrConstParamData)> + 'a {
) -> impl DoubleEndedIterator<Item = (TypeOrConstParamId, &'a TypeOrConstParamData)> + 'a {
self.parent_generics().into_iter().flat_map(|it| {
let to_toc_id =
move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p);
@ -245,12 +226,18 @@ impl Generics {
})
}
/// Returns total number of generic parameters in scope, including those from parent.
pub(crate) fn len(&self) -> usize {
let parent = self.parent_generics().map_or(0, Generics::len);
let child = self.params.type_or_consts.len();
parent + child
}
/// Returns numbers of generic parameters excluding those from parent.
pub(crate) fn len_self(&self) -> usize {
self.params.type_or_consts.len()
}
/// (parent total, self param, type param list, const param list, impl trait)
pub(crate) fn provenance_split(&self) -> (usize, usize, usize, usize, usize) {
let ty_iter = || self.params.iter().filter_map(|x| x.1.type_param());
@ -275,15 +262,17 @@ impl Generics {
if param.parent == self.def {
let (idx, (_local_id, data)) =
self.params.iter().enumerate().find(|(_, (idx, _))| *idx == param.local_id)?;
let parent_len = self.parent_generics().map_or(0, Generics::len);
Some((parent_len + idx, data))
Some((idx, data))
} else {
self.parent_generics().and_then(|g| g.find_param(param))
self.parent_generics()
.and_then(|g| g.find_param(param))
// Remember that parent parameters come after parameters for self.
.map(|(idx, data)| (self.len_self() + idx, data))
}
}
fn parent_generics(&self) -> Option<&Generics> {
self.parent_generics.as_ref().map(|it| &**it)
pub(crate) fn parent_generics(&self) -> Option<&Generics> {
self.parent_generics.as_deref()
}
/// Returns a Substitution that replaces each parameter by a bound variable.
@ -295,18 +284,10 @@ impl Generics {
Substitution::from_iter(
Interner,
self.iter_id().enumerate().map(|(idx, id)| match id {
Either::Left(_) => GenericArgData::Ty(
TyKind::BoundVar(BoundVar::new(debruijn, idx)).intern(Interner),
)
.intern(Interner),
Either::Right(id) => GenericArgData::Const(
ConstData {
value: ConstValue::BoundVar(BoundVar::new(debruijn, idx)),
ty: db.const_param_ty(id),
}
.intern(Interner),
)
.intern(Interner),
Either::Left(_) => BoundVar::new(debruijn, idx).to_ty(Interner).cast(Interner),
Either::Right(id) => BoundVar::new(debruijn, idx)
.to_const(Interner, db.const_param_ty(id))
.cast(Interner),
}),
)
}
@ -316,18 +297,12 @@ impl Generics {
Substitution::from_iter(
Interner,
self.iter_id().map(|id| match id {
Either::Left(id) => GenericArgData::Ty(
TyKind::Placeholder(crate::to_placeholder_idx(db, id.into())).intern(Interner),
)
.intern(Interner),
Either::Right(id) => GenericArgData::Const(
ConstData {
value: ConstValue::Placeholder(crate::to_placeholder_idx(db, id.into())),
ty: db.const_param_ty(id),
}
.intern(Interner),
)
.intern(Interner),
Either::Left(id) => {
crate::to_placeholder_idx(db, id.into()).to_ty(Interner).cast(Interner)
}
Either::Right(id) => crate::to_placeholder_idx(db, id.into())
.to_const(Interner, db.const_param_ty(id))
.cast(Interner),
}),
)
}

View file

@ -1,7 +1,7 @@
//! Re-export diagnostics such that clients of `hir` don't have to depend on
//! low-level crates.
//!
//! This probably isn't the best way to do this -- ideally, diagnistics should
//! This probably isn't the best way to do this -- ideally, diagnostics should
//! be expressed in terms of hir types themselves.
use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions};

View file

@ -492,6 +492,9 @@ impl HirDisplay for TypeAlias {
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
let data = f.db.type_alias_data(self.id);
write!(f, "type {}", data.name)?;
let def_id = GenericDefId::TypeAliasId(self.id);
write_generic_params(def_id, f)?;
write_where_clause(def_id, f)?;
if !data.bounds.is_empty() {
f.write_str(": ")?;
f.write_joined(&data.bounds, " + ")?;

View file

@ -140,6 +140,7 @@ impl From<DefWithBody> for DefWithBodyId {
DefWithBody::Function(it) => DefWithBodyId::FunctionId(it.id),
DefWithBody::Static(it) => DefWithBodyId::StaticId(it.id),
DefWithBody::Const(it) => DefWithBodyId::ConstId(it.id),
DefWithBody::Variant(it) => DefWithBodyId::VariantId(it.into()),
}
}
}
@ -150,6 +151,7 @@ impl From<DefWithBodyId> for DefWithBody {
DefWithBodyId::FunctionId(it) => DefWithBody::Function(it.into()),
DefWithBodyId::StaticId(it) => DefWithBody::Static(it.into()),
DefWithBodyId::ConstId(it) => DefWithBody::Const(it.into()),
DefWithBodyId::VariantId(it) => DefWithBody::Variant(it.into()),
}
}
}
@ -172,9 +174,7 @@ impl From<GenericDef> for GenericDefId {
GenericDef::Trait(it) => GenericDefId::TraitId(it.id),
GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id),
GenericDef::Impl(it) => GenericDefId::ImplId(it.id),
GenericDef::Variant(it) => {
GenericDefId::EnumVariantId(EnumVariantId { parent: it.parent.id, local_id: it.id })
}
GenericDef::Variant(it) => GenericDefId::EnumVariantId(it.into()),
GenericDef::Const(it) => GenericDefId::ConstId(it.id),
}
}
@ -188,9 +188,7 @@ impl From<GenericDefId> for GenericDef {
GenericDefId::TraitId(it) => GenericDef::Trait(it.into()),
GenericDefId::TypeAliasId(it) => GenericDef::TypeAlias(it.into()),
GenericDefId::ImplId(it) => GenericDef::Impl(it.into()),
GenericDefId::EnumVariantId(it) => {
GenericDef::Variant(Variant { parent: it.parent.into(), id: it.local_id })
}
GenericDefId::EnumVariantId(it) => GenericDef::Variant(it.into()),
GenericDefId::ConstId(it) => GenericDef::Const(it.into()),
}
}

View file

@ -39,7 +39,7 @@ use arrayvec::ArrayVec;
use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId, ProcMacroKind};
use either::Either;
use hir_def::{
adt::{ReprKind, VariantData},
adt::{ReprData, VariantData},
body::{BodyDiagnostic, SyntheticSyntax},
expr::{BindingAnnotation, LabelId, Pat, PatId},
generics::{TypeOrConstParamData, TypeParamProvenance},
@ -50,7 +50,7 @@ use hir_def::{
resolver::{HasResolver, Resolver},
src::HasSource as _,
AdtId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, DefWithBodyId, EnumId,
FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
LocalEnumVariantId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId,
TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
};
@ -61,7 +61,6 @@ use hir_ty::{
diagnostics::BodyValidationDiagnostic,
method_resolution::{self, TyFingerprint},
primitive::UintTy,
subst_prefix,
traits::FnTrait,
AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId,
GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution,
@ -73,7 +72,7 @@ use once_cell::unsync::Lazy;
use rustc_hash::FxHashSet;
use stdx::{impl_from, never};
use syntax::{
ast::{self, HasAttrs as _, HasDocComments, HasName},
ast::{self, Expr, HasAttrs as _, HasDocComments, HasName},
AstNode, AstPtr, SmolStr, SyntaxNodePtr, TextRange, T,
};
@ -348,7 +347,10 @@ impl ModuleDef {
ModuleDef::Module(it) => it.id.into(),
ModuleDef::Const(it) => it.id.into(),
ModuleDef::Static(it) => it.id.into(),
_ => return Vec::new(),
ModuleDef::Variant(it) => {
EnumVariantId { parent: it.parent.into(), local_id: it.id }.into()
}
ModuleDef::BuiltinType(_) | ModuleDef::Macro(_) => return Vec::new(),
};
let module = match self.module(db) {
@ -377,10 +379,10 @@ impl ModuleDef {
ModuleDef::Function(it) => Some(it.into()),
ModuleDef::Const(it) => Some(it.into()),
ModuleDef::Static(it) => Some(it.into()),
ModuleDef::Variant(it) => Some(it.into()),
ModuleDef::Module(_)
| ModuleDef::Adt(_)
| ModuleDef::Variant(_)
| ModuleDef::Trait(_)
| ModuleDef::TypeAlias(_)
| ModuleDef::Macro(_)
@ -537,6 +539,30 @@ impl Module {
}
acc.extend(decl.diagnostics(db))
}
ModuleDef::Adt(adt) => {
match adt {
Adt::Struct(s) => {
for diag in db.struct_data_with_diagnostics(s.id).1.iter() {
emit_def_diagnostic(db, acc, diag);
}
}
Adt::Union(u) => {
for diag in db.union_data_with_diagnostics(u.id).1.iter() {
emit_def_diagnostic(db, acc, diag);
}
}
Adt::Enum(e) => {
for v in e.variants(db) {
acc.extend(ModuleDef::Variant(v).diagnostics(db));
}
for diag in db.enum_data_with_diagnostics(e.id).1.iter() {
emit_def_diagnostic(db, acc, diag);
}
}
}
acc.extend(decl.diagnostics(db))
}
_ => acc.extend(decl.diagnostics(db)),
}
}
@ -874,7 +900,7 @@ impl Struct {
Type::from_def(db, self.id)
}
pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprKind> {
pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprData> {
db.struct_data(self.id).repr.clone()
}
@ -952,6 +978,21 @@ impl Enum {
pub fn ty(self, db: &dyn HirDatabase) -> Type {
Type::from_def(db, self.id)
}
/// The type of the enum variant bodies.
pub fn variant_body_ty(self, db: &dyn HirDatabase) -> Type {
Type::new_for_crate(
self.id.lookup(db.upcast()).container.krate(),
TyBuilder::builtin(match db.enum_data(self.id).variant_body_type() {
Either::Left(builtin) => hir_def::builtin_type::BuiltinType::Int(builtin),
Either::Right(builtin) => hir_def::builtin_type::BuiltinType::Uint(builtin),
}),
)
}
pub fn is_data_carrying(self, db: &dyn HirDatabase) -> bool {
self.variants(db).iter().any(|v| !matches!(v.kind(db), StructKind::Unit))
}
}
impl HasVisibility for Enum {
@ -960,6 +1001,12 @@ impl HasVisibility for Enum {
}
}
impl From<&Variant> for DefWithBodyId {
fn from(&v: &Variant) -> Self {
DefWithBodyId::VariantId(v.into())
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Variant {
pub(crate) parent: Enum,
@ -994,6 +1041,14 @@ impl Variant {
pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
db.enum_data(self.parent.id).variants[self.id].variant_data.clone()
}
pub fn value(self, db: &dyn HirDatabase) -> Option<Expr> {
self.source(db)?.value.expr()
}
pub fn eval(self, db: &dyn HirDatabase) -> Result<ComputedExpr, ConstEvalError> {
db.const_eval_variant(self.into())
}
}
/// Variants inherit visibility from the parent enum.
@ -1034,7 +1089,7 @@ impl Adt {
pub fn ty_with_args(self, db: &dyn HirDatabase, args: &[Type]) -> Type {
let id = AdtId::from(self);
let mut it = args.iter().map(|t| t.ty.clone());
let ty = TyBuilder::def_ty(db, id.into())
let ty = TyBuilder::def_ty(db, id.into(), None)
.fill(|x| {
let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
match x {
@ -1129,8 +1184,9 @@ pub enum DefWithBody {
Function(Function),
Static(Static),
Const(Const),
Variant(Variant),
}
impl_from!(Function, Const, Static for DefWithBody);
impl_from!(Function, Const, Static, Variant for DefWithBody);
impl DefWithBody {
pub fn module(self, db: &dyn HirDatabase) -> Module {
@ -1138,6 +1194,7 @@ impl DefWithBody {
DefWithBody::Const(c) => c.module(db),
DefWithBody::Function(f) => f.module(db),
DefWithBody::Static(s) => s.module(db),
DefWithBody::Variant(v) => v.module(db),
}
}
@ -1146,6 +1203,7 @@ impl DefWithBody {
DefWithBody::Function(f) => Some(f.name(db)),
DefWithBody::Static(s) => Some(s.name(db)),
DefWithBody::Const(c) => c.name(db),
DefWithBody::Variant(v) => Some(v.name(db)),
}
}
@ -1155,6 +1213,7 @@ impl DefWithBody {
DefWithBody::Function(it) => it.ret_type(db),
DefWithBody::Static(it) => it.ty(db),
DefWithBody::Const(it) => it.ty(db),
DefWithBody::Variant(it) => it.parent.variant_body_ty(db),
}
}
@ -1163,6 +1222,7 @@ impl DefWithBody {
DefWithBody::Function(it) => it.id.into(),
DefWithBody::Static(it) => it.id.into(),
DefWithBody::Const(it) => it.id.into(),
DefWithBody::Variant(it) => it.into(),
}
}
@ -1379,6 +1439,7 @@ impl DefWithBody {
DefWithBody::Function(it) => it.into(),
DefWithBody::Static(it) => it.into(),
DefWithBody::Const(it) => it.into(),
DefWithBody::Variant(it) => it.into(),
};
for diag in hir_ty::diagnostics::incorrect_case(db, krate, def.into()) {
acc.push(diag.into())
@ -2485,7 +2546,7 @@ impl TypeParam {
let resolver = self.id.parent().resolver(db.upcast());
let ty = params.get(local_idx)?.clone();
let subst = TyBuilder::placeholder_subst(db, self.id.parent());
let ty = ty.substitute(Interner, &subst_prefix(&subst, local_idx));
let ty = ty.substitute(Interner, &subst);
match ty.data(Interner) {
GenericArgData::Ty(x) => Some(Type::new_with_resolver_inner(db, &resolver, x.clone())),
_ => None,
@ -2739,7 +2800,22 @@ impl Type {
}
fn from_def(db: &dyn HirDatabase, def: impl HasResolver + Into<TyDefId>) -> Type {
let ty = TyBuilder::def_ty(db, def.into()).fill_with_unknown().build();
let ty_def = def.into();
let parent_subst = match ty_def {
TyDefId::TypeAliasId(id) => match id.lookup(db.upcast()).container {
ItemContainerId::TraitId(id) => {
let subst = TyBuilder::subst_for_def(db, id, None).fill_with_unknown().build();
Some(subst)
}
ItemContainerId::ImplId(id) => {
let subst = TyBuilder::subst_for_def(db, id, None).fill_with_unknown().build();
Some(subst)
}
_ => None,
},
_ => None,
};
let ty = TyBuilder::def_ty(db, ty_def, parent_subst).fill_with_unknown().build();
Type::new(db, def, ty)
}
@ -2879,7 +2955,11 @@ impl Type {
alias: TypeAlias,
) -> Option<Type> {
let mut args = args.iter();
let projection = TyBuilder::assoc_type_projection(db, alias.id)
let trait_id = match alias.id.lookup(db.upcast()).container {
ItemContainerId::TraitId(id) => id,
_ => unreachable!("non assoc type alias reached in normalize_trait_assoc_type()"),
};
let parent_subst = TyBuilder::subst_for_def(db, trait_id, None)
.push(self.ty.clone())
.fill(|x| {
// FIXME: this code is not covered in tests.
@ -2891,6 +2971,8 @@ impl Type {
}
})
.build();
// FIXME: We don't handle GATs yet.
let projection = TyBuilder::assoc_type_projection(db, alias.id, Some(parent_subst)).build();
let ty = db.normalize_projection(projection, self.env.clone());
if ty.is_unknown() {
@ -2940,7 +3022,7 @@ impl Type {
let adt = adt_id.into();
match adt {
Adt::Struct(s) => matches!(s.repr(db), Some(ReprKind::Packed)),
Adt::Struct(s) => matches!(s.repr(db), Some(ReprData { packed: true, .. })),
_ => false,
}
}

View file

@ -257,6 +257,11 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
self.imp.original_ast_node(node)
}
/// Attempts to map the node out of macro expanded files.
/// This only work for attribute expansions, as other ones do not have nodes as input.
pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
self.imp.original_syntax_node(node)
}
pub fn diagnostics_display_range(&self, diagnostics: InFile<SyntaxNodePtr>) -> FileRange {
self.imp.diagnostics_display_range(diagnostics)
@ -956,6 +961,16 @@ impl<'db> SemanticsImpl<'db> {
)
}
fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
let InFile { file_id, .. } = self.find_file(node);
InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map(
|InFile { file_id, value }| {
self.cache(find_root(&value), file_id);
value
},
)
}
fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
let root = self.parse_or_expand(src.file_id).unwrap();
let node = src.map(|it| it.to_node(&root));

View file

@ -115,7 +115,7 @@ pub(super) struct SourceToDefCtx<'a, 'b> {
}
impl SourceToDefCtx<'_, '_> {
pub(super) fn file_to_def(&mut self, file: FileId) -> SmallVec<[ModuleId; 1]> {
pub(super) fn file_to_def(&self, file: FileId) -> SmallVec<[ModuleId; 1]> {
let _p = profile::span("SourceBinder::to_module_def");
let mut mods = SmallVec::new();
for &crate_id in self.db.relevant_crates(file).iter() {
@ -130,7 +130,7 @@ impl SourceToDefCtx<'_, '_> {
mods
}
pub(super) fn module_to_def(&mut self, src: InFile<ast::Module>) -> Option<ModuleId> {
pub(super) fn module_to_def(&self, src: InFile<ast::Module>) -> Option<ModuleId> {
let _p = profile::span("module_to_def");
let parent_declaration = src
.syntax()
@ -151,7 +151,7 @@ impl SourceToDefCtx<'_, '_> {
Some(def_map.module_id(child_id))
}
pub(super) fn source_file_to_def(&mut self, src: InFile<ast::SourceFile>) -> Option<ModuleId> {
pub(super) fn source_file_to_def(&self, src: InFile<ast::SourceFile>) -> Option<ModuleId> {
let _p = profile::span("source_file_to_def");
let file_id = src.file_id.original_file(self.db.upcast());
self.file_to_def(file_id).get(0).copied()
@ -384,7 +384,7 @@ impl SourceToDefCtx<'_, '_> {
} else {
let it = ast::Variant::cast(container.value)?;
let def = self.enum_variant_to_def(InFile::new(container.file_id, it))?;
VariantId::from(def).into()
DefWithBodyId::from(def).into()
};
Some(cont)
}

View file

@ -22,7 +22,7 @@ use hir_def::{
resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
type_ref::Mutability,
AsMacroCall, AssocItemId, DefWithBodyId, FieldId, FunctionId, ItemContainerId, LocalFieldId,
Lookup, ModuleDefId, VariantId,
Lookup, ModuleDefId, TraitId, VariantId,
};
use hir_expand::{
builtin_fn_macro::BuiltinFnLikeExpander,
@ -302,10 +302,15 @@ impl SourceAnalyzer {
}
}
let future_trait = db
.lang_item(self.resolver.krate(), hir_expand::name![future_trait].to_smol_str())?
.as_trait()?;
let poll_fn = db
.lang_item(self.resolver.krate(), hir_expand::name![poll].to_smol_str())?
.as_function()?;
let substs = hir_ty::TyBuilder::subst_for_def(db, poll_fn).push(ty.clone()).build();
// HACK: subst for `poll()` coincides with that for `Future` because `poll()` itself
// doesn't have any generic parameters, so we skip building another subst for `poll()`.
let substs = hir_ty::TyBuilder::subst_for_def(db, future_trait, None).push(ty).build();
Some(self.resolve_impl_method_or_trait_def(db, poll_fn, &substs))
}
@ -321,8 +326,10 @@ impl SourceAnalyzer {
};
let ty = self.ty_of_expr(db, &prefix_expr.expr()?.into())?;
let op_fn = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?;
let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn).push(ty.clone()).build();
let (op_trait, op_fn) = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?;
// HACK: subst for all methods coincides with that for their trait because the methods
// don't have any generic parameters, so we skip building another subst for the methods.
let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None).push(ty.clone()).build();
Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
}
@ -337,8 +344,10 @@ impl SourceAnalyzer {
let lang_item_name = name![index];
let op_fn = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?;
let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn)
let (op_trait, op_fn) = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?;
// HACK: subst for all methods coincides with that for their trait because the methods
// don't have any generic parameters, so we skip building another subst for the methods.
let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None)
.push(base_ty.clone())
.push(index_ty.clone())
.build();
@ -354,10 +363,14 @@ impl SourceAnalyzer {
let lhs = self.ty_of_expr(db, &binop_expr.lhs()?.into())?;
let rhs = self.ty_of_expr(db, &binop_expr.rhs()?.into())?;
let op_fn = lang_names_for_bin_op(op)
let (op_trait, op_fn) = lang_names_for_bin_op(op)
.and_then(|(name, lang_item)| self.lang_trait_fn(db, &lang_item, &name))?;
let substs =
hir_ty::TyBuilder::subst_for_def(db, op_fn).push(lhs.clone()).push(rhs.clone()).build();
// HACK: subst for `index()` coincides with that for `Index` because `index()` itself
// doesn't have any generic parameters, so we skip building another subst for `index()`.
let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None)
.push(lhs.clone())
.push(rhs.clone())
.build();
Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
}
@ -371,7 +384,13 @@ impl SourceAnalyzer {
let op_fn =
db.lang_item(self.resolver.krate(), name![branch].to_smol_str())?.as_function()?;
let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn).push(ty.clone()).build();
let op_trait = match op_fn.lookup(db.upcast()).container {
ItemContainerId::TraitId(id) => id,
_ => return None,
};
// HACK: subst for `branch()` coincides with that for `Try` because `branch()` itself
// doesn't have any generic parameters, so we skip building another subst for `branch()`.
let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None).push(ty.clone()).build();
Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
}
@ -799,9 +818,10 @@ impl SourceAnalyzer {
db: &dyn HirDatabase,
lang_trait: &Name,
method_name: &Name,
) -> Option<FunctionId> {
db.trait_data(db.lang_item(self.resolver.krate(), lang_trait.to_smol_str())?.as_trait()?)
.method_by_name(method_name)
) -> Option<(TraitId, FunctionId)> {
let trait_id = db.lang_item(self.resolver.krate(), lang_trait.to_smol_str())?.as_trait()?;
let fn_id = db.trait_data(trait_id).method_by_name(method_name)?;
Some((trait_id, fn_id))
}
fn ty_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<&Ty> {

View file

@ -244,6 +244,10 @@ impl<'a> SymbolCollector<'a> {
DefWithBodyId::ConstId(id) => Some(
id.lookup(self.db.upcast()).source(self.db.upcast()).value.name()?.text().into(),
),
DefWithBodyId::VariantId(id) => Some({
let db = self.db.upcast();
id.parent.lookup(db).source(db).value.name()?.text().into()
}),
}
}

View file

@ -7,6 +7,7 @@ use ide_db::{
},
};
use itertools::Itertools;
use stdx::format_to;
use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange};
// Assist: move_format_string_arg
@ -78,20 +79,26 @@ pub(crate) fn move_format_string_arg(acc: &mut Assists, ctx: &AssistContext<'_>)
// Extract existing arguments in macro
let tokens =
tt.token_trees_and_tokens().filter_map(NodeOrToken::into_token).collect_vec();
tt.token_trees_and_tokens().collect_vec();
let mut existing_args: Vec<String> = vec![];
let mut current_arg = String::new();
if let [_opening_bracket, format_string, _args_start_comma, tokens @ .., end_bracket] =
if let [_opening_bracket, NodeOrToken::Token(format_string), _args_start_comma, tokens @ .., NodeOrToken::Token(end_bracket)] =
tokens.as_slice()
{
for t in tokens {
if t.kind() == COMMA {
existing_args.push(current_arg.trim().into());
current_arg.clear();
} else {
current_arg.push_str(t.text());
match t {
NodeOrToken::Node(n) => {
format_to!(current_arg, "{n}");
},
NodeOrToken::Token(t) if t.kind() == COMMA=> {
existing_args.push(current_arg.trim().into());
current_arg.clear();
},
NodeOrToken::Token(t) => {
current_arg.push_str(t.text());
},
}
}
existing_args.push(current_arg.trim().into());
@ -261,6 +268,27 @@ fn main() {
fn main() {
print!("{} {:b} {}"$0, 1, x + 1, Struct(1, 2));
}
"#,
),
);
}
#[test]
fn nested_tt() {
check_assist(
move_format_string_arg,
&add_macro_decl(
r#"
fn main() {
print!("My name is {} {x$0 + x}", stringify!(Paperino))
}
"#,
),
&add_macro_decl(
r#"
fn main() {
print!("My name is {} {}"$0, stringify!(Paperino), x + x)
}
"#,
),
);

View file

@ -0,0 +1,159 @@
use syntax::{
ast::{self, edit::AstNodeEdit},
AstNode, T,
};
use crate::{AssistContext, AssistId, AssistKind, Assists};
// Assist: unwrap_tuple
//
// Unwrap the tuple to different variables.
//
// ```
// # //- minicore: result
// fn main() {
// $0let (foo, bar) = ("Foo", "Bar");
// }
// ```
// ->
// ```
// fn main() {
// let foo = "Foo";
// let bar = "Bar";
// }
// ```
pub(crate) fn unwrap_tuple(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let let_kw = ctx.find_token_syntax_at_offset(T![let])?;
let let_stmt = let_kw.parent().and_then(ast::LetStmt::cast)?;
let indent_level = let_stmt.indent_level().0 as usize;
let pat = let_stmt.pat()?;
let ty = let_stmt.ty();
let init = let_stmt.initializer()?;
// This only applies for tuple patterns, types, and initializers.
let tuple_pat = match pat {
ast::Pat::TuplePat(pat) => pat,
_ => return None,
};
let tuple_ty = ty.and_then(|it| match it {
ast::Type::TupleType(ty) => Some(ty),
_ => None,
});
let tuple_init = match init {
ast::Expr::TupleExpr(expr) => expr,
_ => return None,
};
if tuple_pat.fields().count() != tuple_init.fields().count() {
return None;
}
if let Some(tys) = &tuple_ty {
if tuple_pat.fields().count() != tys.fields().count() {
return None;
}
}
let parent = let_kw.parent()?;
acc.add(
AssistId("unwrap_tuple", AssistKind::RefactorRewrite),
"Unwrap tuple",
let_kw.text_range(),
|edit| {
let indents = " ".repeat(indent_level);
// If there is an ascribed type, insert that type for each declaration,
// otherwise, omit that type.
if let Some(tys) = tuple_ty {
let mut zipped_decls = String::new();
for (pat, ty, expr) in
itertools::izip!(tuple_pat.fields(), tys.fields(), tuple_init.fields())
{
zipped_decls.push_str(&format!("{}let {pat}: {ty} = {expr};\n", indents))
}
edit.replace(parent.text_range(), zipped_decls.trim());
} else {
let mut zipped_decls = String::new();
for (pat, expr) in itertools::izip!(tuple_pat.fields(), tuple_init.fields()) {
zipped_decls.push_str(&format!("{}let {pat} = {expr};\n", indents));
}
edit.replace(parent.text_range(), zipped_decls.trim());
}
},
)
}
#[cfg(test)]
mod tests {
use crate::tests::check_assist;
use super::*;
#[test]
fn unwrap_tuples() {
check_assist(
unwrap_tuple,
r#"
fn main() {
$0let (foo, bar) = ("Foo", "Bar");
}
"#,
r#"
fn main() {
let foo = "Foo";
let bar = "Bar";
}
"#,
);
check_assist(
unwrap_tuple,
r#"
fn main() {
$0let (foo, bar, baz) = ("Foo", "Bar", "Baz");
}
"#,
r#"
fn main() {
let foo = "Foo";
let bar = "Bar";
let baz = "Baz";
}
"#,
);
}
#[test]
fn unwrap_tuple_with_types() {
check_assist(
unwrap_tuple,
r#"
fn main() {
$0let (foo, bar): (u8, i32) = (5, 10);
}
"#,
r#"
fn main() {
let foo: u8 = 5;
let bar: i32 = 10;
}
"#,
);
check_assist(
unwrap_tuple,
r#"
fn main() {
$0let (foo, bar, baz): (u8, i32, f64) = (5, 10, 17.5);
}
"#,
r#"
fn main() {
let foo: u8 = 5;
let bar: i32 = 10;
let baz: f64 = 17.5;
}
"#,
);
}
}

View file

@ -189,6 +189,7 @@ mod handlers {
mod replace_turbofish_with_explicit_type;
mod split_import;
mod unmerge_match_arm;
mod unwrap_tuple;
mod sort_items;
mod toggle_ignore;
mod unmerge_use;
@ -291,6 +292,7 @@ mod handlers {
unnecessary_async::unnecessary_async,
unwrap_block::unwrap_block,
unwrap_result_return_type::unwrap_result_return_type,
unwrap_tuple::unwrap_tuple,
wrap_return_type_in_result::wrap_return_type_in_result,
// These are manually sorted for better priorities. By default,
// priority is determined by the size of the target range (smaller

View file

@ -96,8 +96,10 @@ fn check_doc_test(assist_id: &str, before: &str, after: &str) {
});
let actual = {
let source_change =
assist.source_change.expect("Assist did not contain any source changes");
let source_change = assist
.source_change
.filter(|it| !it.source_file_edits.is_empty() || !it.file_system_edits.is_empty())
.expect("Assist did not contain any source changes");
let mut actual = before;
if let Some(source_file_edit) = source_change.get_source_edit(file_id) {
source_file_edit.apply(&mut actual);
@ -140,8 +142,10 @@ fn check(handler: Handler, before: &str, expected: ExpectedResult<'_>, assist_la
match (assist, expected) {
(Some(assist), ExpectedResult::After(after)) => {
let source_change =
assist.source_change.expect("Assist did not contain any source changes");
let source_change = assist
.source_change
.filter(|it| !it.source_file_edits.is_empty() || !it.file_system_edits.is_empty())
.expect("Assist did not contain any source changes");
let skip_header = source_change.source_file_edits.len() == 1
&& source_change.file_system_edits.len() == 0;

View file

@ -2386,6 +2386,25 @@ fn foo() -> i32 { 42i32 }
)
}
#[test]
fn doctest_unwrap_tuple() {
check_doc_test(
"unwrap_tuple",
r#####"
//- minicore: result
fn main() {
$0let (foo, bar) = ("Foo", "Bar");
}
"#####,
r#####"
fn main() {
let foo = "Foo";
let bar = "Bar";
}
"#####,
)
}
#[test]
fn doctest_wrap_return_type_in_result() {
check_doc_test(

View file

@ -38,7 +38,7 @@ use ide_db::{
};
use syntax::{
ast::{self, edit_in_place::AttrsOwnerEdit},
AstNode, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, T,
AstNode, SyntaxElement, SyntaxKind, TextRange, T,
};
use text_edit::TextEdit;
@ -85,20 +85,36 @@ fn complete_trait_impl_name(
name: &Option<ast::Name>,
kind: ImplCompletionKind,
) -> Option<()> {
let token = ctx.token.clone();
let item = match name {
Some(name) => name.syntax().parent(),
None => if token.kind() == SyntaxKind::WHITESPACE { token.prev_token()? } else { token }
.parent(),
None => {
let token = &ctx.token;
match token.kind() {
SyntaxKind::WHITESPACE => token.prev_token()?,
_ => token.clone(),
}
.parent()
}
}?;
complete_trait_impl(
acc,
ctx,
kind,
replacement_range(ctx, &item),
// item -> ASSOC_ITEM_LIST -> IMPL
&ast::Impl::cast(item.parent()?.parent()?)?,
);
let item = ctx.sema.original_syntax_node(&item)?;
// item -> ASSOC_ITEM_LIST -> IMPL
let impl_def = ast::Impl::cast(item.parent()?.parent()?)?;
let replacement_range = {
// ctx.sema.original_ast_node(item)?;
let first_child = item
.children_with_tokens()
.find(|child| {
!matches!(
child.kind(),
SyntaxKind::COMMENT | SyntaxKind::WHITESPACE | SyntaxKind::ATTR
)
})
.unwrap_or_else(|| SyntaxElement::Node(item.clone()));
TextRange::new(first_child.text_range().start(), ctx.source_range().end())
};
complete_trait_impl(acc, ctx, kind, replacement_range, &impl_def);
Some(())
}
@ -341,17 +357,6 @@ fn function_declaration(node: &ast::Fn, needs_whitespace: bool) -> String {
syntax.trim_end().to_owned()
}
fn replacement_range(ctx: &CompletionContext<'_>, item: &SyntaxNode) -> TextRange {
let first_child = item
.children_with_tokens()
.find(|child| {
!matches!(child.kind(), SyntaxKind::COMMENT | SyntaxKind::WHITESPACE | SyntaxKind::ATTR)
})
.unwrap_or_else(|| SyntaxElement::Node(item.clone()));
TextRange::new(first_child.text_range().start(), ctx.source_range().end())
}
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};

View file

@ -225,7 +225,7 @@ fn path_cmp_short(a: &ast::Path, b: &ast::Path) -> Ordering {
}
/// Compares two paths, if one ends earlier than the other the has_tl parameters decide which is
/// greater as a a path that has a tree list should be greater, while one that just ends without
/// greater as a path that has a tree list should be greater, while one that just ends without
/// a tree list should be considered less.
pub(super) fn use_tree_path_cmp(
a: &ast::Path,

View file

@ -239,6 +239,7 @@ impl Definition {
DefWithBody::Function(f) => f.source(db).map(|src| src.syntax().cloned()),
DefWithBody::Const(c) => c.source(db).map(|src| src.syntax().cloned()),
DefWithBody::Static(s) => s.source(db).map(|src| src.syntax().cloned()),
DefWithBody::Variant(v) => v.source(db).map(|src| src.syntax().cloned()),
};
return match def {
Some(def) => SearchScope::file_range(def.as_ref().original_file_range(db)),

View file

@ -137,6 +137,37 @@ trait Bar {
#[cfg_attr(not(never), inline, cfg(no))] fn h() {}
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled
"#,
);
}
#[test]
fn inactive_fields_and_variants() {
check(
r#"
enum Foo {
#[cfg(a)] Bar,
//^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
Baz {
#[cfg(a)] baz: String,
//^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
},
Qux(#[cfg(a)] String),
//^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
}
struct Baz {
#[cfg(a)] baz: String,
//^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
}
struct Qux(#[cfg(a)] String);
//^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
union FooBar {
#[cfg(a)] baz: u32,
//^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
}
"#,
);
}

View file

@ -8,13 +8,15 @@ use ide_db::{
use syntax::{ast::HasName, AstNode, TextRange};
use crate::{
fn_references::find_all_methods,
annotations::fn_references::find_all_methods,
goto_implementation::goto_implementation,
references::find_all_refs,
runnables::{runnables, Runnable},
NavigationTarget, RunnableKind,
};
mod fn_references;
// Feature: Annotations
//
// Provides user with annotations above items for looking up references or impl blocks
@ -30,8 +32,8 @@ pub struct Annotation {
#[derive(Debug)]
pub enum AnnotationKind {
Runnable(Runnable),
HasImpls { file_id: FileId, data: Option<Vec<NavigationTarget>> },
HasReferences { file_id: FileId, data: Option<Vec<FileRange>> },
HasImpls { pos: FilePosition, data: Option<Vec<NavigationTarget>> },
HasReferences { pos: FilePosition, data: Option<Vec<FileRange>> },
}
pub struct AnnotationConfig {
@ -68,13 +70,23 @@ pub(crate) fn annotations(
}
}
let mk_ranges = |(range, focus): (_, Option<_>)| {
let cmd_target: TextRange = focus.unwrap_or(range);
let annotation_range = match config.location {
AnnotationLocation::AboveName => cmd_target,
AnnotationLocation::AboveWholeItem => range,
};
let target_pos = FilePosition { file_id, offset: cmd_target.start() };
(annotation_range, target_pos)
};
visit_file_defs(&Semantics::new(db), file_id, &mut |def| {
let range = match def {
Definition::Const(konst) if config.annotate_references => {
konst.source(db).and_then(|node| name_range(db, config, node, file_id))
konst.source(db).and_then(|node| name_range(db, node, file_id))
}
Definition::Trait(trait_) if config.annotate_references || config.annotate_impls => {
trait_.source(db).and_then(|node| name_range(db, config, node, file_id))
trait_.source(db).and_then(|node| name_range(db, node, file_id))
}
Definition::Adt(adt) => match adt {
hir::Adt::Enum(enum_) => {
@ -83,27 +95,29 @@ pub(crate) fn annotations(
.variants(db)
.into_iter()
.map(|variant| {
variant
.source(db)
.and_then(|node| name_range(db, config, node, file_id))
variant.source(db).and_then(|node| name_range(db, node, file_id))
})
.flatten()
.for_each(|range| {
let (annotation_range, target_position) = mk_ranges(range);
annotations.push(Annotation {
range,
kind: AnnotationKind::HasReferences { file_id, data: None },
range: annotation_range,
kind: AnnotationKind::HasReferences {
pos: target_position,
data: None,
},
})
})
}
if config.annotate_references || config.annotate_impls {
enum_.source(db).and_then(|node| name_range(db, config, node, file_id))
enum_.source(db).and_then(|node| name_range(db, node, file_id))
} else {
None
}
}
_ => {
if config.annotate_references || config.annotate_impls {
adt.source(db).and_then(|node| name_range(db, config, node, file_id))
adt.source(db).and_then(|node| name_range(db, node, file_id))
} else {
None
}
@ -116,33 +130,32 @@ pub(crate) fn annotations(
Some(range) => range,
None => return,
};
let (annotation_range, target_pos) = mk_ranges(range);
if config.annotate_impls && !matches!(def, Definition::Const(_)) {
annotations
.push(Annotation { range, kind: AnnotationKind::HasImpls { file_id, data: None } });
annotations.push(Annotation {
range: annotation_range,
kind: AnnotationKind::HasImpls { pos: target_pos, data: None },
});
}
if config.annotate_references {
annotations.push(Annotation {
range,
kind: AnnotationKind::HasReferences { file_id, data: None },
range: annotation_range,
kind: AnnotationKind::HasReferences { pos: target_pos, data: None },
});
}
fn name_range<T: HasName>(
db: &RootDatabase,
config: &AnnotationConfig,
node: InFile<T>,
source_file_id: FileId,
) -> Option<TextRange> {
) -> Option<(TextRange, Option<TextRange>)> {
if let Some(InFile { file_id, value }) = node.original_ast_node(db) {
if file_id == source_file_id.into() {
return match config.location {
AnnotationLocation::AboveName => {
value.name().map(|name| name.syntax().text_range())
}
AnnotationLocation::AboveWholeItem => Some(value.syntax().text_range()),
};
return Some((
value.syntax().text_range(),
value.name().map(|name| name.syntax().text_range()),
));
}
}
None
@ -150,12 +163,13 @@ pub(crate) fn annotations(
});
if config.annotate_method_references {
annotations.extend(find_all_methods(db, file_id).into_iter().map(
|FileRange { file_id, range }| Annotation {
range,
kind: AnnotationKind::HasReferences { file_id, data: None },
},
));
annotations.extend(find_all_methods(db, file_id).into_iter().map(|range| {
let (annotation_range, target_range) = mk_ranges(range);
Annotation {
range: annotation_range,
kind: AnnotationKind::HasReferences { pos: target_range, data: None },
}
}));
}
annotations
@ -163,18 +177,11 @@ pub(crate) fn annotations(
pub(crate) fn resolve_annotation(db: &RootDatabase, mut annotation: Annotation) -> Annotation {
match annotation.kind {
AnnotationKind::HasImpls { file_id, ref mut data } => {
*data =
goto_implementation(db, FilePosition { file_id, offset: annotation.range.start() })
.map(|range| range.info);
AnnotationKind::HasImpls { pos, ref mut data } => {
*data = goto_implementation(db, pos).map(|range| range.info);
}
AnnotationKind::HasReferences { file_id, ref mut data } => {
*data = find_all_refs(
&Semantics::new(db),
FilePosition { file_id, offset: annotation.range.start() },
None,
)
.map(|result| {
AnnotationKind::HasReferences { pos, ref mut data } => {
*data = find_all_refs(&Semantics::new(db), pos, None).map(|result| {
result
.into_iter()
.flat_map(|res| res.references)
@ -268,9 +275,12 @@ fn main() {
Annotation {
range: 6..10,
kind: HasReferences {
file_id: FileId(
0,
),
pos: FilePosition {
file_id: FileId(
0,
),
offset: 6,
},
data: Some(
[
FileRange {
@ -286,9 +296,12 @@ fn main() {
Annotation {
range: 30..36,
kind: HasReferences {
file_id: FileId(
0,
),
pos: FilePosition {
file_id: FileId(
0,
),
offset: 30,
},
data: Some(
[],
),
@ -297,9 +310,12 @@ fn main() {
Annotation {
range: 53..57,
kind: HasReferences {
file_id: FileId(
0,
),
pos: FilePosition {
file_id: FileId(
0,
),
offset: 53,
},
data: Some(
[],
),
@ -344,9 +360,12 @@ fn main() {
Annotation {
range: 7..11,
kind: HasImpls {
file_id: FileId(
0,
),
pos: FilePosition {
file_id: FileId(
0,
),
offset: 7,
},
data: Some(
[],
),
@ -355,9 +374,12 @@ fn main() {
Annotation {
range: 7..11,
kind: HasReferences {
file_id: FileId(
0,
),
pos: FilePosition {
file_id: FileId(
0,
),
offset: 7,
},
data: Some(
[
FileRange {
@ -373,9 +395,12 @@ fn main() {
Annotation {
range: 17..21,
kind: HasReferences {
file_id: FileId(
0,
),
pos: FilePosition {
file_id: FileId(
0,
),
offset: 17,
},
data: Some(
[],
),
@ -424,9 +449,12 @@ fn main() {
Annotation {
range: 7..11,
kind: HasImpls {
file_id: FileId(
0,
),
pos: FilePosition {
file_id: FileId(
0,
),
offset: 7,
},
data: Some(
[
NavigationTarget {
@ -445,9 +473,12 @@ fn main() {
Annotation {
range: 7..11,
kind: HasReferences {
file_id: FileId(
0,
),
pos: FilePosition {
file_id: FileId(
0,
),
offset: 7,
},
data: Some(
[
FileRange {
@ -469,9 +500,12 @@ fn main() {
Annotation {
range: 20..31,
kind: HasImpls {
file_id: FileId(
0,
),
pos: FilePosition {
file_id: FileId(
0,
),
offset: 20,
},
data: Some(
[
NavigationTarget {
@ -490,9 +524,12 @@ fn main() {
Annotation {
range: 20..31,
kind: HasReferences {
file_id: FileId(
0,
),
pos: FilePosition {
file_id: FileId(
0,
),
offset: 20,
},
data: Some(
[
FileRange {
@ -508,9 +545,12 @@ fn main() {
Annotation {
range: 69..73,
kind: HasReferences {
file_id: FileId(
0,
),
pos: FilePosition {
file_id: FileId(
0,
),
offset: 69,
},
data: Some(
[],
),
@ -551,9 +591,12 @@ fn main() {}
Annotation {
range: 3..7,
kind: HasReferences {
file_id: FileId(
0,
),
pos: FilePosition {
file_id: FileId(
0,
),
offset: 3,
},
data: Some(
[],
),
@ -602,9 +645,12 @@ fn main() {
Annotation {
range: 7..11,
kind: HasImpls {
file_id: FileId(
0,
),
pos: FilePosition {
file_id: FileId(
0,
),
offset: 7,
},
data: Some(
[
NavigationTarget {
@ -623,9 +669,12 @@ fn main() {
Annotation {
range: 7..11,
kind: HasReferences {
file_id: FileId(
0,
),
pos: FilePosition {
file_id: FileId(
0,
),
offset: 7,
},
data: Some(
[
FileRange {
@ -647,9 +696,12 @@ fn main() {
Annotation {
range: 33..44,
kind: HasReferences {
file_id: FileId(
0,
),
pos: FilePosition {
file_id: FileId(
0,
),
offset: 33,
},
data: Some(
[
FileRange {
@ -665,9 +717,12 @@ fn main() {
Annotation {
range: 61..65,
kind: HasReferences {
file_id: FileId(
0,
),
pos: FilePosition {
file_id: FileId(
0,
),
offset: 61,
},
data: Some(
[],
),
@ -761,9 +816,12 @@ mod tests {
Annotation {
range: 3..7,
kind: HasReferences {
file_id: FileId(
0,
),
pos: FilePosition {
file_id: FileId(
0,
),
offset: 3,
},
data: Some(
[],
),
@ -821,9 +879,12 @@ struct Foo;
Annotation {
range: 0..71,
kind: HasImpls {
file_id: FileId(
0,
),
pos: FilePosition {
file_id: FileId(
0,
),
offset: 67,
},
data: Some(
[],
),
@ -832,10 +893,15 @@ struct Foo;
Annotation {
range: 0..71,
kind: HasReferences {
file_id: FileId(
0,
pos: FilePosition {
file_id: FileId(
0,
),
offset: 67,
},
data: Some(
[],
),
data: None,
},
},
]

View file

@ -4,30 +4,38 @@
use hir::Semantics;
use ide_assists::utils::test_related_attribute;
use ide_db::RootDatabase;
use syntax::{ast, ast::HasName, AstNode, SyntaxNode};
use syntax::{ast, ast::HasName, AstNode, SyntaxNode, TextRange};
use crate::{FileId, FileRange};
use crate::FileId;
pub(crate) fn find_all_methods(db: &RootDatabase, file_id: FileId) -> Vec<FileRange> {
pub(super) fn find_all_methods(
db: &RootDatabase,
file_id: FileId,
) -> Vec<(TextRange, Option<TextRange>)> {
let sema = Semantics::new(db);
let source_file = sema.parse(file_id);
source_file.syntax().descendants().filter_map(|it| method_range(it, file_id)).collect()
source_file.syntax().descendants().filter_map(|it| method_range(it)).collect()
}
fn method_range(item: SyntaxNode, file_id: FileId) -> Option<FileRange> {
fn method_range(item: SyntaxNode) -> Option<(TextRange, Option<TextRange>)> {
ast::Fn::cast(item).and_then(|fn_def| {
if test_related_attribute(&fn_def).is_some() {
None
} else {
fn_def.name().map(|name| FileRange { file_id, range: name.syntax().text_range() })
Some((
fn_def.syntax().text_range(),
fn_def.name().map(|name| name.syntax().text_range()),
))
}
})
}
#[cfg(test)]
mod tests {
use syntax::TextRange;
use crate::fixture;
use crate::{FileRange, TextSize};
use crate::TextSize;
use std::ops::RangeInclusive;
#[test]
@ -42,7 +50,7 @@ mod tests {
"#,
);
let refs = analysis.find_all_methods(pos.file_id).unwrap();
let refs = super::find_all_methods(&analysis.db, pos.file_id);
check_result(&refs, &[3..=13, 27..=33, 47..=57]);
}
@ -57,7 +65,7 @@ mod tests {
"#,
);
let refs = analysis.find_all_methods(pos.file_id).unwrap();
let refs = super::find_all_methods(&analysis.db, pos.file_id);
check_result(&refs, &[19..=22, 35..=38]);
}
@ -78,17 +86,18 @@ mod tests {
"#,
);
let refs = analysis.find_all_methods(pos.file_id).unwrap();
let refs = super::find_all_methods(&analysis.db, pos.file_id);
check_result(&refs, &[28..=34]);
}
fn check_result(refs: &[FileRange], expected: &[RangeInclusive<u32>]) {
fn check_result(refs: &[(TextRange, Option<TextRange>)], expected: &[RangeInclusive<u32>]) {
assert_eq!(refs.len(), expected.len());
for (i, item) in refs.iter().enumerate() {
for (i, &(full, focus)) in refs.iter().enumerate() {
let range = &expected[i];
assert_eq!(TextSize::from(*range.start()), item.range.start());
assert_eq!(TextSize::from(*range.end()), item.range.end());
let item = focus.unwrap_or(full);
assert_eq!(TextSize::from(*range.start()), item.start());
assert_eq!(TextSize::from(*range.end()), item.end());
}
}
}

View file

@ -232,8 +232,13 @@ pub(crate) fn token_as_doc_comment(doc_token: &SyntaxToken) -> Option<DocComment
(match_ast! {
match doc_token {
ast::Comment(comment) => TextSize::try_from(comment.prefix().len()).ok(),
ast::String(string) => doc_token.parent_ancestors().find_map(ast::Attr::cast)
.filter(|attr| attr.simple_name().as_deref() == Some("doc")).and_then(|_| string.open_quote_text_range().map(|it| it.len())),
ast::String(string) => {
doc_token.parent_ancestors().find_map(ast::Attr::cast).filter(|attr| attr.simple_name().as_deref() == Some("doc"))?;
if doc_token.parent_ancestors().find_map(ast::MacroCall::cast).filter(|mac| mac.path().and_then(|p| p.segment()?.name_ref()).as_ref().map(|n| n.text()).as_deref() == Some("include_str")).is_some() {
return None;
}
string.open_quote_text_range().map(|it| it.len())
},
_ => None,
}
}).map(|prefix_len| DocCommentToken { prefix_len, doc_token: doc_token.clone() })

View file

@ -95,6 +95,14 @@ fn try_lookup_include_path(
if !matches!(&*name.text(), "include" | "include_str" | "include_bytes") {
return None;
}
// Ignore non-built-in macros to account for shadowing
if let Some(it) = sema.resolve_macro_call(&macro_call) {
if !matches!(it.kind(sema.db), hir::MacroKind::BuiltIn) {
return None;
}
}
let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
let size = sema.db.file_text(file_id).len().try_into().ok()?;
Some(NavigationTarget {
@ -156,9 +164,6 @@ mod tests {
fn check(ra_fixture: &str) {
let (analysis, position, expected) = fixture::annotations(ra_fixture);
let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info;
if navs.is_empty() {
panic!("unresolved reference")
}
let cmp = |&FileRange { file_id, range }: &_| (file_id, range.start());
let navs = navs
@ -1348,6 +1353,10 @@ fn f(e: Enum) {
check(
r#"
//- /main.rs
#[rustc_builtin_macro]
macro_rules! include_str {}
fn main() {
let str = include_str!("foo.txt$0");
}
@ -1357,6 +1366,42 @@ fn main() {
"#,
);
}
#[test]
fn goto_doc_include_str() {
check(
r#"
//- /main.rs
#[rustc_builtin_macro]
macro_rules! include_str {}
#[doc = include_str!("docs.md$0")]
struct Item;
//- /docs.md
// docs
//^file
"#,
);
}
#[test]
fn goto_shadow_include() {
check(
r#"
//- /main.rs
macro_rules! include {
("included.rs") => {}
}
include!("included.rs$0");
//- /included.rs
// empty
"#,
);
}
#[cfg(test)]
mod goto_impl_of_trait_fn {
use super::check;

View file

@ -1373,6 +1373,22 @@ fn main() {
().func$0();
//^^^^
}
"#,
);
}
#[test]
fn test_assoc_type_highlighting() {
check(
r#"
trait Trait {
type Output;
// ^^^^^^
}
impl Trait for () {
type Output$0 = ();
// ^^^^^^
}
"#,
);
}

View file

@ -346,7 +346,16 @@ pub(super) fn definition(
Definition::Module(it) => label_and_docs(db, it),
Definition::Function(it) => label_and_docs(db, it),
Definition::Adt(it) => label_and_docs(db, it),
Definition::Variant(it) => label_and_docs(db, it),
Definition::Variant(it) => label_value_and_docs(db, it, |&it| {
if !it.parent_enum(db).is_data_carrying(db) {
match it.eval(db) {
Ok(x) => Some(format!("{}", x)),
Err(_) => it.value(db).map(|x| format!("{:?}", x)),
}
} else {
None
}
}),
Definition::Const(it) => label_value_and_docs(db, it, |it| {
let body = it.eval(db);
match body {

View file

@ -698,6 +698,7 @@ fn hover_enum_variant() {
check(
r#"
enum Option<T> {
Some(T)
/// The None variant
Non$0e
}
@ -3527,6 +3528,112 @@ impl<const LEN: usize> Foo<LEN$0> {}
);
}
#[test]
fn hover_const_eval_variant() {
// show hex for <10
check(
r#"
#[repr(u8)]
enum E {
/// This is a doc
A$0 = 1 << 3,
}
"#,
expect![[r#"
*A*
```rust
test::E
```
```rust
A = 8
```
---
This is a doc
"#]],
);
// show hex for >10
check(
r#"
#[repr(u8)]
enum E {
/// This is a doc
A$0 = (1 << 3) + (1 << 2),
}
"#,
expect![[r#"
*A*
```rust
test::E
```
```rust
A = 12 (0xC)
```
---
This is a doc
"#]],
);
// enums in const eval
check(
r#"
#[repr(u8)]
enum E {
A = 1,
/// This is a doc
B$0 = E::A as u8 + 1,
}
"#,
expect![[r#"
*B*
```rust
test::E
```
```rust
B = 2
```
---
This is a doc
"#]],
);
// unspecified variant should increment by one
check(
r#"
#[repr(u8)]
enum E {
A = 4,
/// This is a doc
B$0,
}
"#,
expect![[r#"
*B*
```rust
test::E
```
```rust
B = 5
```
---
This is a doc
"#]],
);
}
#[test]
fn hover_const_eval() {
// show hex for <10
@ -3820,6 +3927,35 @@ fn foo() {
---
This is a doc
"#]],
);
check(
r#"
enum E {
/// This is a doc
A = 3,
}
fn foo(e: E) {
match e {
E::A$0 => (),
_ => ()
}
}
"#,
expect![[r#"
*A*
```rust
test::E
```
```rust
A = 3
```
---
This is a doc
"#]],
);

View file

@ -176,12 +176,6 @@ impl fmt::Debug for InlayHintLabelPart {
// * elided lifetimes
// * compiler inserted reborrows
//
// |===
// | Editor | Action Name
//
// | VS Code | **rust-analyzer: Toggle inlay hints*
// |===
//
// image::https://user-images.githubusercontent.com/48062697/113020660-b5f98b80-917a-11eb-8d70-3be3fd558cdd.png[]
pub(crate) fn inlay_hints(
db: &RootDatabase,
@ -2030,7 +2024,14 @@ impl<T> Vec<T> {
}
impl<T> IntoIterator for Vec<T> {
type Item=T;
type Item = T;
type IntoIter = IntoIter<T>;
}
struct IntoIter<T> {}
impl<T> Iterator for IntoIter<T> {
type Item = T;
}
fn main() {

View file

@ -31,7 +31,6 @@ mod highlight_related;
mod expand_macro;
mod extend_selection;
mod file_structure;
mod fn_references;
mod folding_ranges;
mod goto_declaration;
mod goto_definition;
@ -236,7 +235,7 @@ impl Analysis {
Env::default(),
Ok(Vec::new()),
false,
CrateOrigin::CratesIo { repo: None },
CrateOrigin::CratesIo { repo: None, name: None },
);
change.change_file(file_id, Some(Arc::new(text)));
change.set_crate_graph(crate_graph);
@ -429,11 +428,6 @@ impl Analysis {
self.with_db(|db| references::find_all_refs(&Semantics::new(db), position, search_scope))
}
/// Finds all methods and free functions for the file. Does not return tests!
pub fn find_all_methods(&self, file_id: FileId) -> Cancellable<Vec<FileRange>> {
self.with_db(|db| fn_references::find_all_methods(db, file_id))
}
/// Returns a short text describing element at position.
pub fn hover(
&self,

View file

@ -253,10 +253,14 @@ pub(crate) fn def_to_moniker(
},
kind: if krate == from_crate { MonikerKind::Export } else { MonikerKind::Import },
package_information: {
let name = krate.display_name(db)?.to_string();
let (repo, version) = match krate.origin(db) {
CrateOrigin::CratesIo { repo } => (repo?, krate.version(db)?),
let (name, repo, version) = match krate.origin(db) {
CrateOrigin::CratesIo { repo, name } => (
name.unwrap_or(krate.display_name(db)?.canonical_name().to_string()),
repo?,
krate.version(db)?,
),
CrateOrigin::Lang(lang) => (
krate.display_name(db)?.canonical_name().to_string(),
"https://github.com/rust-lang/rust/".to_string(),
match lang {
LangCrateOrigin::Other => {

View file

@ -87,9 +87,9 @@ fn punctuation(
let parent = token.parent();
let parent_kind = parent.as_ref().map_or(EOF, SyntaxNode::kind);
match (kind, parent_kind) {
(T![?], _) => HlTag::Operator(HlOperator::Other) | HlMod::ControlFlow,
(T![?], TRY_EXPR) => HlTag::Operator(HlOperator::Other) | HlMod::ControlFlow,
(T![&], BIN_EXPR) => HlOperator::Bitwise.into(),
(T![&], _) => {
(T![&], REF_EXPR) => {
let h = HlTag::Operator(HlOperator::Other).into();
let is_unsafe = parent
.and_then(ast::RefExpr::cast)
@ -100,7 +100,9 @@ fn punctuation(
h
}
}
(T![::] | T![->] | T![=>] | T![..] | T![=] | T![@] | T![.], _) => HlOperator::Other.into(),
(T![::] | T![->] | T![=>] | T![..] | T![..=] | T![=] | T![@] | T![.], _) => {
HlOperator::Other.into()
}
(T![!], MACRO_CALL | MACRO_RULES) => HlPunct::MacroBang.into(),
(T![!], NEVER_TYPE) => HlTag::BuiltinType.into(),
(T![!], PREFIX_EXPR) => HlOperator::Logical.into(),
@ -129,7 +131,7 @@ fn punctuation(
(T![+=] | T![-=] | T![*=] | T![/=] | T![%=], BIN_EXPR) => {
Highlight::from(HlOperator::Arithmetic) | HlMod::Mutable
}
(T![|] | T![&] | T![!] | T![^] | T![>>] | T![<<], BIN_EXPR) => HlOperator::Bitwise.into(),
(T![|] | T![&] | T![^] | T![>>] | T![<<], BIN_EXPR) => HlOperator::Bitwise.into(),
(T![|=] | T![&=] | T![^=] | T![>>=] | T![<<=], BIN_EXPR) => {
Highlight::from(HlOperator::Bitwise) | HlMod::Mutable
}
@ -137,7 +139,6 @@ fn punctuation(
(T![>] | T![<] | T![==] | T![>=] | T![<=] | T![!=], BIN_EXPR) => {
HlOperator::Comparison.into()
}
(_, PREFIX_EXPR | BIN_EXPR | RANGE_EXPR | RANGE_PAT | REST_PAT) => HlOperator::Other.into(),
(_, ATTR) => HlTag::AttributeBracket.into(),
(kind, _) => match kind {
T!['['] | T![']'] => HlPunct::Bracket,

View file

@ -48,15 +48,15 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="keyword">impl</span> <span class="struct">foo</span> <span class="brace">{</span>
<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public static">is_static</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference">is_not_static</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference">is_not_static</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="brace">}</span>
<span class="keyword">trait</span> <span class="trait declaration">t</span> <span class="brace">{</span>
<span class="keyword">fn</span> <span class="function associated declaration static trait">t_is_static</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="keyword">fn</span> <span class="function associated declaration reference trait">t_is_not_static</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="keyword">fn</span> <span class="function associated declaration reference trait">t_is_not_static</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="brace">}</span>
<span class="keyword">impl</span> <span class="trait">t</span> <span class="keyword">for</span> <span class="struct">foo</span> <span class="brace">{</span>
<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public static trait">is_static</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference trait">is_not_static</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference trait">is_not_static</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="brace">}</span></code></pre>

View file

@ -125,7 +125,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="comment documentation">/// ```sh</span>
<span class="comment documentation">/// echo 1</span>
<span class="comment documentation">/// ```</span>
<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference">foo</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">bool</span> <span class="brace">{</span>
<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference">foo</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">bool</span> <span class="brace">{</span>
<span class="bool_literal">true</span>
<span class="brace">}</span>
<span class="brace">}</span>

View file

@ -61,11 +61,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="brace">}</span>
<span class="keyword">trait</span> <span class="trait declaration">Bar</span> <span class="brace">{</span>
<span class="keyword">fn</span> <span class="function associated declaration reference trait">bar</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span><span class="semicolon">;</span>
<span class="keyword">fn</span> <span class="function associated declaration reference trait">bar</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="keyword">impl</span> <span class="trait">Bar</span> <span class="keyword">for</span> <span class="struct">Foo</span> <span class="brace">{</span>
<span class="keyword">fn</span> <span class="function associated declaration reference trait">bar</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span> <span class="brace">{</span>
<span class="keyword">fn</span> <span class="function associated declaration reference trait">bar</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span> <span class="brace">{</span>
<span class="self_keyword reference">self</span><span class="operator">.</span><span class="field">x</span>
<span class="brace">}</span>
<span class="brace">}</span>
@ -75,11 +75,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="value_param">f</span><span class="operator">.</span><span class="function associated consuming">baz</span><span class="parenthesis">(</span><span class="self_keyword consuming mutable">self</span><span class="parenthesis">)</span>
<span class="brace">}</span>
<span class="keyword">fn</span> <span class="function associated declaration mutable reference">qux</span><span class="parenthesis">(</span><span class="operator">&</span><span class="keyword">mut</span> <span class="self_keyword declaration mutable reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="keyword">fn</span> <span class="function associated declaration mutable reference">qux</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="keyword">mut</span> <span class="self_keyword declaration mutable reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="self_keyword mutable reference">self</span><span class="operator">.</span><span class="field">x</span> <span class="operator">=</span> <span class="numeric_literal">0</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="keyword">fn</span> <span class="function associated declaration reference">quop</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span> <span class="brace">{</span>
<span class="keyword">fn</span> <span class="function associated declaration reference">quop</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span> <span class="brace">{</span>
<span class="self_keyword reference">self</span><span class="operator">.</span><span class="field">x</span>
<span class="brace">}</span>
<span class="brace">}</span>
@ -96,11 +96,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="value_param">f</span><span class="operator">.</span><span class="function associated">baz</span><span class="parenthesis">(</span><span class="self_keyword">self</span><span class="parenthesis">)</span>
<span class="brace">}</span>
<span class="keyword">fn</span> <span class="function associated declaration mutable reference">qux</span><span class="parenthesis">(</span><span class="operator">&</span><span class="keyword">mut</span> <span class="self_keyword declaration mutable reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="keyword">fn</span> <span class="function associated declaration mutable reference">qux</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="keyword">mut</span> <span class="self_keyword declaration mutable reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="self_keyword mutable reference">self</span><span class="operator">.</span><span class="field">x</span> <span class="operator">=</span> <span class="numeric_literal">0</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="keyword">fn</span> <span class="function associated declaration reference">quop</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">u32</span> <span class="brace">{</span>
<span class="keyword">fn</span> <span class="function associated declaration reference">quop</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">u32</span> <span class="brace">{</span>
<span class="self_keyword reference">self</span><span class="operator">.</span><span class="field">x</span>
<span class="brace">}</span>
<span class="brace">}</span>

View file

@ -42,7 +42,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">fn</span> <span class="function declaration">fixture</span><span class="parenthesis">(</span><span class="value_param declaration reference">ra_fixture</span><span class="colon">:</span> <span class="operator">&</span><span class="builtin_type">str</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<pre><code><span class="keyword">fn</span> <span class="function declaration">fixture</span><span class="parenthesis">(</span><span class="value_param declaration reference">ra_fixture</span><span class="colon">:</span> <span class="punctuation">&</span><span class="builtin_type">str</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="function">fixture</span><span class="parenthesis">(</span><span class="string_literal">r#"</span>

View file

@ -45,8 +45,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<pre><code>
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
<span class="keyword">struct</span> <span class="struct declaration">Foo</span><span class="angle">&lt;</span><span class="lifetime declaration">'a</span><span class="comma">,</span> <span class="lifetime declaration">'b</span><span class="comma">,</span> <span class="lifetime declaration">'c</span><span class="angle">&gt;</span> <span class="keyword">where</span> <span class="lifetime">'a</span><span class="colon">:</span> <span class="lifetime">'a</span><span class="comma">,</span> <span class="lifetime">'static</span><span class="colon">:</span> <span class="lifetime">'static</span> <span class="brace">{</span>
<span class="field declaration">field</span><span class="colon">:</span> <span class="operator">&</span><span class="lifetime">'a</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
<span class="field declaration">field2</span><span class="colon">:</span> <span class="operator">&</span><span class="lifetime">'static</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
<span class="field declaration">field</span><span class="colon">:</span> <span class="punctuation">&</span><span class="lifetime">'a</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
<span class="field declaration">field2</span><span class="colon">:</span> <span class="punctuation">&</span><span class="lifetime">'static</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
<span class="brace">}</span>
<span class="keyword">impl</span><span class="angle">&lt;</span><span class="lifetime declaration">'a</span><span class="angle">&gt;</span> <span class="struct">Foo</span><span class="angle">&lt;</span><span class="lifetime">'_</span><span class="comma">,</span> <span class="lifetime">'a</span><span class="comma">,</span> <span class="lifetime">'static</span><span class="angle">&gt;</span>
<span class="keyword">where</span>

View file

@ -62,16 +62,16 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
<span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic<span class="parenthesis">(</span><span class="string_literal">"explicit panic"</span><span class="parenthesis">)</span>
<span class="parenthesis">)</span><span class="comma">,</span>
<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="colon">:</span>literal <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="operator control">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="colon">:</span>literal <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="punctuation">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
<span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="parenthesis">)</span>
<span class="parenthesis">)</span><span class="comma">,</span>
<span class="comment">// Use `panic_str` instead of `panic_display::&lt;&str&gt;` for non_fmt_panic lint.</span>
<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="colon">:</span>expr <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="operator control">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="colon">:</span>expr <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="punctuation">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
<span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_str<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="parenthesis">)</span>
<span class="parenthesis">)</span><span class="comma">,</span>
<span class="comment">// Special-case the single-argument case for const_panic.</span>
<span class="parenthesis">(</span><span class="string_literal">"{}"</span><span class="comma">,</span> <span class="punctuation">$</span>arg<span class="colon">:</span>expr <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="operator control">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
<span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_display<span class="parenthesis">(</span><span class="operator">&</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span>
<span class="parenthesis">(</span><span class="string_literal">"{}"</span><span class="comma">,</span> <span class="punctuation">$</span>arg<span class="colon">:</span>expr <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="punctuation">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
<span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_display<span class="parenthesis">(</span><span class="punctuation">&</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span>
<span class="parenthesis">)</span><span class="comma">,</span>
<span class="parenthesis">(</span><span class="punctuation">$</span>fmt<span class="colon">:</span>expr<span class="comma">,</span> <span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
<span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_fmt<span class="parenthesis">(</span><span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>const_format_args<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span>fmt<span class="comma">,</span> <span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span><span class="parenthesis">)</span>

View file

@ -49,7 +49,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="brace">}</span>
<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">unsafe_deref</span> <span class="brace">{</span>
<span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span>
<span class="punctuation">*</span><span class="parenthesis">(</span><span class="operator">&</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="keyword">as</span> <span class="punctuation">*</span><span class="keyword">const</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="parenthesis">)</span>
<span class="punctuation">*</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="keyword">as</span> <span class="punctuation">*</span><span class="keyword">const</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="parenthesis">)</span>
<span class="brace">}</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="keyword">static</span> <span class="keyword">mut</span> <span class="static declaration mutable unsafe">MUT_GLOBAL</span><span class="colon">:</span> <span class="struct">Struct</span> <span class="operator">=</span> <span class="struct">Struct</span> <span class="brace">{</span> <span class="field">field</span><span class="colon">:</span> <span class="numeric_literal">0</span> <span class="brace">}</span><span class="semicolon">;</span>
@ -63,7 +63,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="keyword">struct</span> <span class="struct declaration">Struct</span> <span class="brace">{</span> <span class="field declaration">field</span><span class="colon">:</span> <span class="builtin_type">i32</span> <span class="brace">}</span>
<span class="keyword">impl</span> <span class="struct">Struct</span> <span class="brace">{</span>
<span class="keyword unsafe">unsafe</span> <span class="keyword">fn</span> <span class="function associated declaration reference unsafe">unsafe_method</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="keyword unsafe">unsafe</span> <span class="keyword">fn</span> <span class="function associated declaration reference unsafe">unsafe_method</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="brace">}</span>
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">repr</span><span class="parenthesis attribute">(</span><span class="none attribute">packed</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
@ -78,11 +78,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="keyword">fn</span> <span class="function declaration">unsafe_trait_bound</span><span class="angle">&lt;</span><span class="type_param declaration">T</span><span class="colon">:</span> <span class="trait">UnsafeTrait</span><span class="angle">&gt;</span><span class="parenthesis">(</span><span class="punctuation">_</span><span class="colon">:</span> <span class="type_param">T</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="keyword">trait</span> <span class="trait declaration">DoTheAutoref</span> <span class="brace">{</span>
<span class="keyword">fn</span> <span class="function associated declaration reference trait">calls_autoref</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="keyword">fn</span> <span class="function associated declaration reference trait">calls_autoref</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="brace">}</span>
<span class="keyword">impl</span> <span class="trait">DoTheAutoref</span> <span class="keyword">for</span> <span class="builtin_type">u16</span> <span class="brace">{</span>
<span class="keyword">fn</span> <span class="function associated declaration reference trait">calls_autoref</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="keyword">fn</span> <span class="function associated declaration reference trait">calls_autoref</span><span class="parenthesis">(</span><span class="punctuation">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
<span class="brace">}</span>
<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>

View file

@ -11,6 +11,8 @@ use proc_macro_api::ProcMacroKind;
use super::PanicMessage;
pub use ra_server::TokenStream;
pub(crate) struct Abi {
exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
}

View file

@ -32,8 +32,8 @@ mod abi_sysroot;
include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
// Used by `test/utils.rs`
#[cfg(test)]
pub(crate) use abi_1_63::TokenStream as TestTokenStream;
#[cfg(all(test, feature = "sysroot-abi"))]
pub(crate) use abi_sysroot::TokenStream as TestTokenStream;
use super::dylib::LoadProcMacroDylibError;
pub(crate) use abi_1_58::Abi as Abi_1_58;
@ -144,3 +144,10 @@ impl Abi {
}
}
}
#[test]
fn test_version_check() {
let path = paths::AbsPathBuf::assert(crate::proc_macro_test_dylib_path());
let info = proc_macro_api::read_dylib_info(&path).unwrap();
assert!(info.version.1 >= 50);
}

View file

@ -20,6 +20,8 @@
mod dylib;
mod abis;
pub mod cli;
use std::{
collections::{hash_map::Entry, HashMap},
env,
@ -149,7 +151,10 @@ impl EnvSnapshot {
}
}
pub mod cli;
#[cfg(all(feature = "sysroot-abi", test))]
mod tests;
#[cfg(test)]
mod tests;
pub fn proc_macro_test_dylib_path() -> std::path::PathBuf {
proc_macro_test::PROC_MACRO_TEST_LOCATION.into()
}

View file

@ -2,10 +2,10 @@
#[macro_use]
mod utils;
use expect_test::expect;
use paths::AbsPathBuf;
use utils::*;
use expect_test::expect;
#[test]
fn test_derive_empty() {
assert_expand("DeriveEmpty", r#"struct S;"#, expect![[r#"SUBTREE $"#]]);
@ -157,10 +157,3 @@ fn list_test_macros() {
DeriveError [CustomDerive]"#]]
.assert_eq(&res);
}
#[test]
fn test_version_check() {
let path = AbsPathBuf::assert(fixtures::proc_macro_test_dylib_path());
let info = proc_macro_api::read_dylib_info(&path).unwrap();
assert!(info.version.1 >= 50);
}

View file

@ -1,15 +1,9 @@
//! utils used in proc-macro tests
use crate::dylib;
use crate::ProcMacroSrv;
use expect_test::Expect;
use std::str::FromStr;
pub mod fixtures {
pub fn proc_macro_test_dylib_path() -> std::path::PathBuf {
proc_macro_test::PROC_MACRO_TEST_LOCATION.into()
}
}
use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv};
fn parse_string(code: &str) -> Option<crate::abis::TestTokenStream> {
// This is a bit strange. We need to parse a string into a token stream into
@ -30,7 +24,7 @@ pub fn assert_expand_attr(macro_name: &str, ra_fixture: &str, attr_args: &str, e
}
fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: Expect) {
let path = fixtures::proc_macro_test_dylib_path();
let path = proc_macro_test_dylib_path();
let expander = dylib::Expander::new(&path).unwrap();
let fixture = parse_string(input).unwrap();
let attr = attr.map(|attr| parse_string(attr).unwrap().into_subtree());
@ -40,7 +34,7 @@ fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect:
}
pub(crate) fn list() -> Vec<String> {
let dylib_path = fixtures::proc_macro_test_dylib_path();
let dylib_path = proc_macro_test_dylib_path();
let mut srv = ProcMacroSrv::default();
let res = srv.list_macros(&dylib_path).unwrap();
res.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect()

View file

@ -15,7 +15,7 @@ use rustc_hash::FxHashMap;
use semver::Version;
use serde::Deserialize;
use crate::{cfg_flag::CfgFlag, CargoConfig, CargoWorkspace, Package};
use crate::{cfg_flag::CfgFlag, CargoConfig, CargoFeatures, CargoWorkspace, Package};
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct WorkspaceBuildScripts {
@ -49,7 +49,6 @@ impl WorkspaceBuildScripts {
let mut cmd = Command::new(toolchain::cargo());
cmd.envs(&config.extra_env);
cmd.args(&["check", "--quiet", "--workspace", "--message-format=json"]);
// --all-targets includes tests, benches and examples in addition to the
@ -61,15 +60,18 @@ impl WorkspaceBuildScripts {
cmd.args(&["--target", target]);
}
if config.all_features {
cmd.arg("--all-features");
} else {
if config.no_default_features {
cmd.arg("--no-default-features");
match &config.features {
CargoFeatures::All => {
cmd.arg("--all-features");
}
if !config.features.is_empty() {
cmd.arg("--features");
cmd.arg(config.features.join(" "));
CargoFeatures::Selected { features, no_default_features } => {
if *no_default_features {
cmd.arg("--no-default-features");
}
if !features.is_empty() {
cmd.arg("--features");
cmd.arg(features.join(" "));
}
}
}

View file

@ -71,35 +71,40 @@ impl Default for UnsetTestCrates {
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum CargoFeatures {
All,
Selected {
/// List of features to activate.
features: Vec<String>,
/// Do not activate the `default` feature.
no_default_features: bool,
},
}
impl Default for CargoFeatures {
fn default() -> Self {
CargoFeatures::Selected { features: vec![], no_default_features: false }
}
}
#[derive(Default, Clone, Debug, PartialEq, Eq)]
pub struct CargoConfig {
/// Do not activate the `default` feature.
pub no_default_features: bool,
/// Activate all available features
pub all_features: bool,
/// List of features to activate.
/// This will be ignored if `cargo_all_features` is true.
pub features: Vec<String>,
pub features: CargoFeatures,
/// rustc target
pub target: Option<String>,
/// Don't load sysroot crates (`std`, `core` & friends). Might be useful
/// when debugging isolated issues.
pub no_sysroot: bool,
/// Sysroot loading behavior
pub sysroot: Option<RustcSource>,
/// rustc private crate source
pub rustc_source: Option<RustcSource>,
/// crates to disable `#[cfg(test)]` on
pub unset_test_crates: UnsetTestCrates,
/// Invoke `cargo check` through the RUSTC_WRAPPER.
pub wrap_rustc_in_build_scripts: bool,
/// The command to run instead of `cargo check` for building build scripts.
pub run_build_script_command: Option<Vec<String>>,
/// Extra env vars to set when invoking the cargo command
pub extra_env: FxHashMap<String, String>,
}
@ -143,7 +148,7 @@ pub struct PackageData {
pub targets: Vec<Target>,
/// Does this package come from the local filesystem (and is editable)?
pub is_local: bool,
// Whether this package is a member of the workspace
/// Whether this package is a member of the workspace
pub is_member: bool,
/// List of packages this package depends on
pub dependencies: Vec<PackageDependency>,
@ -249,8 +254,8 @@ impl TargetKind {
}
}
// Deserialize helper for the cargo metadata
#[derive(Deserialize, Default)]
// Deserialise helper for the cargo metadata
struct PackageMetadata {
#[serde(rename = "rust-analyzer")]
rust_analyzer: Option<RustAnalyzerPackageMetaData>,
@ -266,22 +271,25 @@ impl CargoWorkspace {
let target = config
.target
.clone()
.or_else(|| cargo_config_build_target(cargo_toml, config))
.or_else(|| rustc_discover_host_triple(cargo_toml, config));
.or_else(|| cargo_config_build_target(cargo_toml, &config.extra_env))
.or_else(|| rustc_discover_host_triple(cargo_toml, &config.extra_env));
let mut meta = MetadataCommand::new();
meta.cargo_path(toolchain::cargo());
meta.manifest_path(cargo_toml.to_path_buf());
if config.all_features {
meta.features(CargoOpt::AllFeatures);
} else {
if config.no_default_features {
// FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures`
// https://github.com/oli-obk/cargo_metadata/issues/79
meta.features(CargoOpt::NoDefaultFeatures);
match &config.features {
CargoFeatures::All => {
meta.features(CargoOpt::AllFeatures);
}
if !config.features.is_empty() {
meta.features(CargoOpt::SomeFeatures(config.features.clone()));
CargoFeatures::Selected { features, no_default_features } => {
if *no_default_features {
// FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures`
// https://github.com/oli-obk/cargo_metadata/issues/79
meta.features(CargoOpt::NoDefaultFeatures);
}
if !features.is_empty() {
meta.features(CargoOpt::SomeFeatures(features.clone()));
}
}
}
meta.current_dir(current_dir.as_os_str());
@ -295,12 +303,9 @@ impl CargoWorkspace {
// unclear whether cargo itself supports it.
progress("metadata".to_string());
fn exec_with_env(
command: &cargo_metadata::MetadataCommand,
extra_env: &FxHashMap<String, String>,
) -> Result<cargo_metadata::Metadata, cargo_metadata::Error> {
let mut command = command.cargo_command();
command.envs(extra_env);
(|| -> Result<cargo_metadata::Metadata, cargo_metadata::Error> {
let mut command = meta.cargo_command();
command.envs(&config.extra_env);
let output = command.output()?;
if !output.status.success() {
return Err(cargo_metadata::Error::CargoMetadata {
@ -312,12 +317,8 @@ impl CargoWorkspace {
.find(|line| line.starts_with('{'))
.ok_or(cargo_metadata::Error::NoJson)?;
cargo_metadata::MetadataCommand::parse(stdout)
}
let meta = exec_with_env(&meta, &config.extra_env)
.with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()))?;
Ok(meta)
})()
.with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()))
}
pub fn new(mut meta: cargo_metadata::Metadata) -> CargoWorkspace {
@ -386,32 +387,14 @@ impl CargoWorkspace {
}
let resolve = meta.resolve.expect("metadata executed with deps");
for mut node in resolve.nodes {
let source = match pkg_by_id.get(&node.id) {
Some(&src) => src,
// FIXME: replace this and a similar branch below with `.unwrap`, once
// https://github.com/rust-lang/cargo/issues/7841
// is fixed and hits stable (around 1.43-is probably?).
None => {
tracing::error!("Node id do not match in cargo metadata, ignoring {}", node.id);
continue;
}
};
let &source = pkg_by_id.get(&node.id).unwrap();
node.deps.sort_by(|a, b| a.pkg.cmp(&b.pkg));
for (dep_node, kind) in node
let dependencies = node
.deps
.iter()
.flat_map(|dep| DepKind::iter(&dep.dep_kinds).map(move |kind| (dep, kind)))
{
let pkg = match pkg_by_id.get(&dep_node.pkg) {
Some(&pkg) => pkg,
None => {
tracing::error!(
"Dep node id do not match in cargo metadata, ignoring {}",
dep_node.pkg
);
continue;
}
};
.flat_map(|dep| DepKind::iter(&dep.dep_kinds).map(move |kind| (dep, kind)));
for (dep_node, kind) in dependencies {
let &pkg = pkg_by_id.get(&dep_node.pkg).unwrap();
let dep = PackageDependency { name: dep_node.name.clone(), pkg, kind };
packages[source].dependencies.push(dep);
}
@ -456,10 +439,7 @@ impl CargoWorkspace {
found = true
}
self[pkg].dependencies.iter().find_map(|dep| {
if &self[dep.pkg].manifest == manifest_path {
return Some(self[pkg].manifest.clone());
}
None
(&self[dep.pkg].manifest == manifest_path).then(|| self[pkg].manifest.clone())
})
})
.collect::<Vec<ManifestPath>>();
@ -485,9 +465,12 @@ impl CargoWorkspace {
}
}
fn rustc_discover_host_triple(cargo_toml: &ManifestPath, config: &CargoConfig) -> Option<String> {
fn rustc_discover_host_triple(
cargo_toml: &ManifestPath,
extra_env: &FxHashMap<String, String>,
) -> Option<String> {
let mut rustc = Command::new(toolchain::rustc());
rustc.envs(&config.extra_env);
rustc.envs(extra_env);
rustc.current_dir(cargo_toml.parent()).arg("-vV");
tracing::debug!("Discovering host platform by {:?}", rustc);
match utf8_stdout(rustc) {
@ -509,9 +492,12 @@ fn rustc_discover_host_triple(cargo_toml: &ManifestPath, config: &CargoConfig) -
}
}
fn cargo_config_build_target(cargo_toml: &ManifestPath, config: &CargoConfig) -> Option<String> {
fn cargo_config_build_target(
cargo_toml: &ManifestPath,
extra_env: &FxHashMap<String, String>,
) -> Option<String> {
let mut cargo_config = Command::new(toolchain::cargo());
cargo_config.envs(&config.extra_env);
cargo_config.envs(extra_env);
cargo_config
.current_dir(cargo_toml.parent())
.args(&["-Z", "unstable-options", "config", "get", "build.target"])

View file

@ -42,8 +42,8 @@ use rustc_hash::FxHashSet;
pub use crate::{
build_scripts::WorkspaceBuildScripts,
cargo_workspace::{
CargoConfig, CargoWorkspace, Package, PackageData, PackageDependency, RustcSource, Target,
TargetData, TargetKind, UnsetTestCrates,
CargoConfig, CargoFeatures, CargoWorkspace, Package, PackageData, PackageDependency,
RustcSource, Target, TargetData, TargetKind, UnsetTestCrates,
},
manifest_path::ManifestPath,
project_json::{ProjectJson, ProjectJsonData},

View file

@ -110,14 +110,17 @@ impl ProjectJson {
.collect::<Vec<_>>(),
}
}
/// Returns the number of crates in the project.
pub fn n_crates(&self) -> usize {
self.crates.len()
}
/// Returns an iterator over the crates in the project.
pub fn crates(&self) -> impl Iterator<Item = (CrateId, &Crate)> + '_ {
self.crates.iter().enumerate().map(|(idx, krate)| (CrateId(idx as u32), krate))
}
/// Returns the path to the project's root folder.
pub fn path(&self) -> &AbsPath {
&self.project_root

View file

@ -3,13 +3,14 @@
use std::process::Command;
use anyhow::Result;
use rustc_hash::FxHashMap;
use crate::{cfg_flag::CfgFlag, utf8_stdout, CargoConfig, ManifestPath};
use crate::{cfg_flag::CfgFlag, utf8_stdout, ManifestPath};
pub(crate) fn get(
cargo_toml: Option<&ManifestPath>,
target: Option<&str>,
config: &CargoConfig,
extra_env: &FxHashMap<String, String>,
) -> Vec<CfgFlag> {
let _p = profile::span("rustc_cfg::get");
let mut res = Vec::with_capacity(6 * 2 + 1);
@ -22,7 +23,7 @@ pub(crate) fn get(
}
}
match get_rust_cfgs(cargo_toml, target, config) {
match get_rust_cfgs(cargo_toml, target, extra_env) {
Ok(rustc_cfgs) => {
tracing::debug!(
"rustc cfgs found: {:?}",
@ -42,11 +43,11 @@ pub(crate) fn get(
fn get_rust_cfgs(
cargo_toml: Option<&ManifestPath>,
target: Option<&str>,
config: &CargoConfig,
extra_env: &FxHashMap<String, String>,
) -> Result<String> {
if let Some(cargo_toml) = cargo_toml {
let mut cargo_config = Command::new(toolchain::cargo());
cargo_config.envs(&config.extra_env);
cargo_config.envs(extra_env);
cargo_config
.current_dir(cargo_toml.parent())
.args(&["-Z", "unstable-options", "rustc", "--print", "cfg"])
@ -61,7 +62,7 @@ fn get_rust_cfgs(
}
// using unstable cargo features failed, fall back to using plain rustc
let mut cmd = Command::new(toolchain::rustc());
cmd.envs(&config.extra_env);
cmd.envs(extra_env);
cmd.args(&["--print", "cfg", "-O"]);
if let Some(target) = target {
cmd.args(&["--target", target]);

View file

@ -9,8 +9,9 @@ use std::{env, fs, iter, ops, path::PathBuf, process::Command};
use anyhow::{format_err, Result};
use la_arena::{Arena, Idx};
use paths::{AbsPath, AbsPathBuf};
use rustc_hash::FxHashMap;
use crate::{utf8_stdout, CargoConfig, ManifestPath};
use crate::{utf8_stdout, ManifestPath};
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Sysroot {
@ -66,23 +67,37 @@ impl Sysroot {
pub fn crates<'a>(&'a self) -> impl Iterator<Item = SysrootCrate> + ExactSizeIterator + 'a {
self.crates.iter().map(|(id, _data)| id)
}
}
pub fn discover(dir: &AbsPath, config: &CargoConfig) -> Result<Sysroot> {
impl Sysroot {
pub fn discover(dir: &AbsPath, extra_env: &FxHashMap<String, String>) -> Result<Sysroot> {
tracing::debug!("Discovering sysroot for {}", dir.display());
let sysroot_dir = discover_sysroot_dir(dir, config)?;
let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir, dir, config)?;
let sysroot_dir = discover_sysroot_dir(dir, extra_env)?;
let sysroot_src_dir =
discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env)?;
let res = Sysroot::load(sysroot_dir, sysroot_src_dir)?;
Ok(res)
}
pub fn discover_rustc(cargo_toml: &ManifestPath, config: &CargoConfig) -> Option<ManifestPath> {
pub fn discover_rustc(
cargo_toml: &ManifestPath,
extra_env: &FxHashMap<String, String>,
) -> Option<ManifestPath> {
tracing::debug!("Discovering rustc source for {}", cargo_toml.display());
let current_dir = cargo_toml.parent();
discover_sysroot_dir(current_dir, config)
discover_sysroot_dir(current_dir, extra_env)
.ok()
.and_then(|sysroot_dir| get_rustc_src(&sysroot_dir))
}
pub fn with_sysroot_dir(sysroot_dir: AbsPathBuf) -> Result<Sysroot> {
let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir).ok_or_else(|| {
format_err!("can't load standard library from sysroot {}", sysroot_dir.display())
})?;
let res = Sysroot::load(sysroot_dir, sysroot_src_dir)?;
Ok(res)
}
pub fn load(sysroot_dir: AbsPathBuf, sysroot_src_dir: AbsPathBuf) -> Result<Sysroot> {
let mut sysroot =
Sysroot { root: sysroot_dir, src_root: sysroot_src_dir, crates: Arena::default() };
@ -146,35 +161,43 @@ impl Sysroot {
}
}
fn discover_sysroot_dir(current_dir: &AbsPath, config: &CargoConfig) -> Result<AbsPathBuf> {
fn discover_sysroot_dir(
current_dir: &AbsPath,
extra_env: &FxHashMap<String, String>,
) -> Result<AbsPathBuf> {
let mut rustc = Command::new(toolchain::rustc());
rustc.envs(&config.extra_env);
rustc.envs(extra_env);
rustc.current_dir(current_dir).args(&["--print", "sysroot"]);
tracing::debug!("Discovering sysroot by {:?}", rustc);
let stdout = utf8_stdout(rustc)?;
Ok(AbsPathBuf::assert(PathBuf::from(stdout)))
}
fn discover_sysroot_src_dir(
sysroot_path: &AbsPathBuf,
current_dir: &AbsPath,
config: &CargoConfig,
) -> Result<AbsPathBuf> {
fn discover_sysroot_src_dir(sysroot_path: &AbsPathBuf) -> Option<AbsPathBuf> {
if let Ok(path) = env::var("RUST_SRC_PATH") {
let path = AbsPathBuf::try_from(path.as_str())
.map_err(|path| format_err!("RUST_SRC_PATH must be absolute: {}", path.display()))?;
let core = path.join("core");
if fs::metadata(&core).is_ok() {
tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {}", path.display());
return Ok(path);
if let Ok(path) = AbsPathBuf::try_from(path.as_str()) {
let core = path.join("core");
if fs::metadata(&core).is_ok() {
tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {}", path.display());
return Some(path);
}
tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {:?}), ignoring", core);
} else {
tracing::debug!("RUST_SRC_PATH is set, but is invalid, ignoring");
}
tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {:?}), ignoring", core);
}
get_rust_src(sysroot_path)
}
fn discover_sysroot_src_dir_or_add_component(
sysroot_path: &AbsPathBuf,
current_dir: &AbsPath,
extra_env: &FxHashMap<String, String>,
) -> Result<AbsPathBuf> {
discover_sysroot_src_dir(sysroot_path)
.or_else(|| {
let mut rustup = Command::new(toolchain::rustup());
rustup.envs(&config.extra_env);
rustup.envs(extra_env);
rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]);
utf8_stdout(rustup).ok()?;
get_rust_src(sysroot_path)

View file

@ -10,8 +10,8 @@ use paths::{AbsPath, AbsPathBuf};
use serde::de::DeserializeOwned;
use crate::{
CargoConfig, CargoWorkspace, CfgOverrides, ProjectJson, ProjectJsonData, ProjectWorkspace,
Sysroot, WorkspaceBuildScripts,
CargoWorkspace, CfgOverrides, ProjectJson, ProjectJsonData, ProjectWorkspace, Sysroot,
WorkspaceBuildScripts,
};
fn load_cargo(file: &str) -> CrateGraph {
@ -101,7 +101,7 @@ fn to_crate_graph(project_workspace: ProjectWorkspace) -> CrateGraph {
Some(FileId(counter))
}
},
&CargoConfig::default(),
&Default::default(),
)
}
@ -185,6 +185,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() {
),
origin: CratesIo {
repo: None,
name: Some(
"hello-world",
),
},
is_proc_macro: false,
},
@ -260,6 +263,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() {
),
origin: CratesIo {
repo: None,
name: Some(
"hello-world",
),
},
is_proc_macro: false,
},
@ -335,6 +341,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() {
),
origin: CratesIo {
repo: None,
name: Some(
"hello-world",
),
},
is_proc_macro: false,
},
@ -410,6 +419,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() {
),
origin: CratesIo {
repo: None,
name: Some(
"hello-world",
),
},
is_proc_macro: false,
},
@ -477,6 +489,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() {
repo: Some(
"https://github.com/rust-lang/libc",
),
name: Some(
"libc",
),
},
is_proc_macro: false,
},
@ -567,6 +582,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() {
),
origin: CratesIo {
repo: None,
name: Some(
"hello-world",
),
},
is_proc_macro: false,
},
@ -644,6 +662,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() {
),
origin: CratesIo {
repo: None,
name: Some(
"hello-world",
),
},
is_proc_macro: false,
},
@ -721,6 +742,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() {
),
origin: CratesIo {
repo: None,
name: Some(
"hello-world",
),
},
is_proc_macro: false,
},
@ -798,6 +822,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() {
),
origin: CratesIo {
repo: None,
name: Some(
"hello-world",
),
},
is_proc_macro: false,
},
@ -865,6 +892,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() {
repo: Some(
"https://github.com/rust-lang/libc",
),
name: Some(
"libc",
),
},
is_proc_macro: false,
},
@ -946,6 +976,9 @@ fn cargo_hello_world_project_model() {
),
origin: CratesIo {
repo: None,
name: Some(
"hello-world",
),
},
is_proc_macro: false,
},
@ -1023,6 +1056,9 @@ fn cargo_hello_world_project_model() {
),
origin: CratesIo {
repo: None,
name: Some(
"hello-world",
),
},
is_proc_macro: false,
},
@ -1100,6 +1136,9 @@ fn cargo_hello_world_project_model() {
),
origin: CratesIo {
repo: None,
name: Some(
"hello-world",
),
},
is_proc_macro: false,
},
@ -1177,6 +1216,9 @@ fn cargo_hello_world_project_model() {
),
origin: CratesIo {
repo: None,
name: Some(
"hello-world",
),
},
is_proc_macro: false,
},
@ -1244,6 +1286,9 @@ fn cargo_hello_world_project_model() {
repo: Some(
"https://github.com/rust-lang/libc",
),
name: Some(
"libc",
),
},
is_proc_macro: false,
},
@ -1804,6 +1849,9 @@ fn rust_project_hello_world_project_model() {
),
origin: CratesIo {
repo: None,
name: Some(
"hello_world",
),
},
is_proc_macro: false,
},

View file

@ -21,8 +21,8 @@ use crate::{
cfg_flag::CfgFlag,
rustc_cfg,
sysroot::SysrootCrate,
utf8_stdout, CargoConfig, CargoWorkspace, ManifestPath, ProjectJson, ProjectManifest, Sysroot,
TargetKind, WorkspaceBuildScripts,
utf8_stdout, CargoConfig, CargoWorkspace, ManifestPath, Package, ProjectJson, ProjectManifest,
Sysroot, TargetKind, WorkspaceBuildScripts,
};
/// A set of cfg-overrides per crate.
@ -156,7 +156,11 @@ impl ProjectWorkspace {
})?;
let project_location = project_json.parent().to_path_buf();
let project_json = ProjectJson::new(&project_location, data);
ProjectWorkspace::load_inline(project_json, config.target.as_deref(), config)?
ProjectWorkspace::load_inline(
project_json,
config.target.as_deref(),
&config.extra_env,
)?
}
ProjectManifest::CargoToml(cargo_toml) => {
let cargo_version = utf8_stdout({
@ -184,20 +188,33 @@ impl ProjectWorkspace {
})?;
let cargo = CargoWorkspace::new(meta);
let sysroot = if config.no_sysroot {
None
} else {
Some(Sysroot::discover(cargo_toml.parent(), config).with_context(|| {
format!(
let sysroot = match &config.sysroot {
Some(RustcSource::Path(path)) => {
Some(Sysroot::with_sysroot_dir(path.clone()).with_context(|| {
format!(
"Failed to find sysroot for Cargo.toml file {}.",
cargo_toml.display()
)
})?)
}
Some(RustcSource::Discover) => Some(
Sysroot::discover(cargo_toml.parent(), &config.extra_env).with_context(
|| {
format!(
"Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?",
cargo_toml.display()
)
})?)
},
)?,
),
None => None,
};
let rustc_dir = match &config.rustc_source {
Some(RustcSource::Path(path)) => ManifestPath::try_from(path.clone()).ok(),
Some(RustcSource::Discover) => Sysroot::discover_rustc(&cargo_toml, config),
Some(RustcSource::Discover) => {
Sysroot::discover_rustc(&cargo_toml, &config.extra_env)
}
None => None,
};
@ -217,7 +234,8 @@ impl ProjectWorkspace {
None => None,
};
let rustc_cfg = rustc_cfg::get(Some(&cargo_toml), config.target.as_deref(), config);
let rustc_cfg =
rustc_cfg::get(Some(&cargo_toml), config.target.as_deref(), &config.extra_env);
let cfg_overrides = config.cfg_overrides();
ProjectWorkspace::Cargo {
@ -238,7 +256,7 @@ impl ProjectWorkspace {
pub fn load_inline(
project_json: ProjectJson,
target: Option<&str>,
config: &CargoConfig,
extra_env: &FxHashMap<String, String>,
) -> Result<ProjectWorkspace> {
let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) {
(Some(sysroot), Some(sysroot_src)) => Some(Sysroot::load(sysroot, sysroot_src)?),
@ -260,7 +278,7 @@ impl ProjectWorkspace {
(None, None) => None,
};
let rustc_cfg = rustc_cfg::get(None, target, config);
let rustc_cfg = rustc_cfg::get(None, target, extra_env);
Ok(ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg })
}
@ -270,9 +288,9 @@ impl ProjectWorkspace {
.first()
.and_then(|it| it.parent())
.ok_or_else(|| format_err!("No detached files to load"))?,
&CargoConfig::default(),
&Default::default(),
)?;
let rustc_cfg = rustc_cfg::get(None, None, &CargoConfig::default());
let rustc_cfg = rustc_cfg::get(None, None, &Default::default());
Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg })
}
@ -306,6 +324,13 @@ impl ProjectWorkspace {
/// The return type contains the path and whether or not
/// the root is a member of the current workspace
pub fn to_roots(&self) -> Vec<PackageRoot> {
let mk_sysroot = |sysroot: Option<&Sysroot>| {
sysroot.map(|sysroot| PackageRoot {
is_local: false,
include: vec![sysroot.src_root().to_path_buf()],
exclude: Vec::new(),
})
};
match self {
ProjectWorkspace::Json { project, sysroot, rustc_cfg: _ } => project
.crates()
@ -316,13 +341,7 @@ impl ProjectWorkspace {
})
.collect::<FxHashSet<_>>()
.into_iter()
.chain(sysroot.as_ref().into_iter().flat_map(|sysroot| {
sysroot.crates().map(move |krate| PackageRoot {
is_local: false,
include: vec![sysroot[krate].root.parent().to_path_buf()],
exclude: Vec::new(),
})
}))
.chain(mk_sysroot(sysroot.as_ref()))
.collect::<Vec<_>>(),
ProjectWorkspace::Cargo {
cargo,
@ -371,11 +390,7 @@ impl ProjectWorkspace {
}
PackageRoot { is_local, include, exclude }
})
.chain(sysroot.iter().map(|sysroot| PackageRoot {
is_local: false,
include: vec![sysroot.src_root().to_path_buf()],
exclude: Vec::new(),
}))
.chain(mk_sysroot(sysroot.as_ref()))
.chain(rustc.iter().flat_map(|rustc| {
rustc.packages().map(move |krate| PackageRoot {
is_local: false,
@ -392,11 +407,7 @@ impl ProjectWorkspace {
include: vec![detached_file.clone()],
exclude: Vec::new(),
})
.chain(sysroot.crates().map(|krate| PackageRoot {
is_local: false,
include: vec![sysroot[krate].root.parent().to_path_buf()],
exclude: Vec::new(),
}))
.chain(mk_sysroot(Some(sysroot)))
.collect(),
}
}
@ -419,7 +430,7 @@ impl ProjectWorkspace {
&self,
load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
config: &CargoConfig,
extra_env: &FxHashMap<String, String>,
) -> CrateGraph {
let _p = profile::span("ProjectWorkspace::to_crate_graph");
@ -430,7 +441,7 @@ impl ProjectWorkspace {
load,
project,
sysroot,
config,
extra_env,
),
ProjectWorkspace::Cargo {
cargo,
@ -469,7 +480,7 @@ fn project_json_to_crate_graph(
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
project: &ProjectJson,
sysroot: &Option<Sysroot>,
config: &CargoConfig,
extra_env: &FxHashMap<String, String>,
) -> CrateGraph {
let mut crate_graph = CrateGraph::default();
let sysroot_deps = sysroot
@ -497,7 +508,7 @@ fn project_json_to_crate_graph(
let target_cfgs = match krate.target.as_deref() {
Some(target) => cfg_cache
.entry(target)
.or_insert_with(|| rustc_cfg::get(None, Some(target), config)),
.or_insert_with(|| rustc_cfg::get(None, Some(target), extra_env)),
None => &rustc_cfg,
};
@ -516,9 +527,15 @@ fn project_json_to_crate_graph(
proc_macro,
krate.is_proc_macro,
if krate.display_name.is_some() {
CrateOrigin::CratesIo { repo: krate.repository.clone() }
CrateOrigin::CratesIo {
repo: krate.repository.clone(),
name: krate
.display_name
.clone()
.map(|n| n.canonical_name().to_string()),
}
} else {
CrateOrigin::CratesIo { repo: None }
CrateOrigin::CratesIo { repo: None, name: None }
},
),
)
@ -630,6 +647,8 @@ fn cargo_to_crate_graph(
lib_tgt = Some((crate_id, cargo[tgt].name.clone()));
pkg_to_lib_crate.insert(pkg, crate_id);
}
// Even crates that don't set proc-macro = true are allowed to depend on proc_macro
// (just none of the APIs work when called outside of a proc macro).
if let Some(proc_macro) = libproc_macro {
add_dep_with_prelude(
&mut crate_graph,
@ -645,19 +664,19 @@ fn cargo_to_crate_graph(
}
// Set deps to the core, std and to the lib target of the current package
for (from, kind) in pkg_crates.get(&pkg).into_iter().flatten() {
for &(from, kind) in pkg_crates.get(&pkg).into_iter().flatten() {
// Add sysroot deps first so that a lib target named `core` etc. can overwrite them.
public_deps.add(*from, &mut crate_graph);
public_deps.add(from, &mut crate_graph);
if let Some((to, name)) = lib_tgt.clone() {
if to != *from && *kind != TargetKind::BuildScript {
if to != from && kind != TargetKind::BuildScript {
// (build script can not depend on its library target)
// For root projects with dashes in their name,
// cargo metadata does not do any normalization,
// so we do it ourselves currently
let name = CrateName::normalize_dashes(&name);
add_dep(&mut crate_graph, *from, name, to);
add_dep(&mut crate_graph, from, name, to);
}
}
}
@ -669,17 +688,17 @@ fn cargo_to_crate_graph(
for dep in cargo[pkg].dependencies.iter() {
let name = CrateName::new(&dep.name).unwrap();
if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) {
for (from, kind) in pkg_crates.get(&pkg).into_iter().flatten() {
if dep.kind == DepKind::Build && *kind != TargetKind::BuildScript {
for &(from, kind) in pkg_crates.get(&pkg).into_iter().flatten() {
if dep.kind == DepKind::Build && kind != TargetKind::BuildScript {
// Only build scripts may depend on build dependencies.
continue;
}
if dep.kind != DepKind::Build && *kind == TargetKind::BuildScript {
if dep.kind != DepKind::Build && kind == TargetKind::BuildScript {
// Build scripts may only depend on build dependencies.
continue;
}
add_dep(&mut crate_graph, *from, name.clone(), to)
add_dep(&mut crate_graph, from, name.clone(), to)
}
}
}
@ -690,9 +709,9 @@ fn cargo_to_crate_graph(
// and create dependencies on them for the crates which opt-in to that
if let Some(rustc_workspace) = rustc {
handle_rustc_crates(
&mut crate_graph,
rustc_workspace,
load,
&mut crate_graph,
&cfg_options,
override_cfg,
load_proc_macro,
@ -736,14 +755,17 @@ fn detached_files_to_crate_graph(
let detached_file_crate = crate_graph.add_crate_root(
file_id,
Edition::CURRENT,
display_name,
display_name.clone(),
None,
cfg_options.clone(),
cfg_options.clone(),
Env::default(),
Ok(Vec::new()),
false,
CrateOrigin::CratesIo { repo: None },
CrateOrigin::CratesIo {
repo: None,
name: display_name.map(|n| n.canonical_name().to_string()),
},
);
public_deps.add(detached_file_crate, &mut crate_graph);
@ -752,16 +774,16 @@ fn detached_files_to_crate_graph(
}
fn handle_rustc_crates(
crate_graph: &mut CrateGraph,
rustc_workspace: &CargoWorkspace,
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
crate_graph: &mut CrateGraph,
cfg_options: &CfgOptions,
override_cfg: &CfgOverrides,
load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
pkg_to_lib_crate: &mut FxHashMap<la_arena::Idx<crate::PackageData>, CrateId>,
pkg_to_lib_crate: &mut FxHashMap<Package, CrateId>,
public_deps: &SysrootPublicDeps,
cargo: &CargoWorkspace,
pkg_crates: &FxHashMap<la_arena::Idx<crate::PackageData>, Vec<(CrateId, TargetKind)>>,
pkg_crates: &FxHashMap<Package, Vec<(CrateId, TargetKind)>>,
build_scripts: &WorkspaceBuildScripts,
) {
let mut rustc_pkg_crates = FxHashMap::default();
@ -775,8 +797,8 @@ fn handle_rustc_crates(
let mut queue = VecDeque::new();
queue.push_back(root_pkg);
while let Some(pkg) = queue.pop_front() {
// Don't duplicate packages if they are dependended on a diamond pattern
// N.B. if this line is omitted, we try to analyse over 4_800_000 crates
// Don't duplicate packages if they are dependent on a diamond pattern
// N.B. if this line is omitted, we try to analyze over 4_800_000 crates
// which is not ideal
if rustc_pkg_crates.contains_key(&pkg) {
continue;
@ -919,7 +941,7 @@ fn add_target_crate_root(
env,
proc_macro,
is_proc_macro,
CrateOrigin::CratesIo { repo: pkg.repository.clone() },
CrateOrigin::CratesIo { repo: pkg.repository.clone(), name: Some(pkg.name.clone()) },
)
}

View file

@ -25,7 +25,7 @@ itertools = "0.10.3"
scip = "0.1.1"
lsp-types = { version = "0.93.1", features = ["proposed"] }
parking_lot = "0.12.1"
xflags = "0.2.4"
xflags = "0.3.0"
oorandom = "11.1.3"
rustc-hash = "1.1.0"
serde = { version = "1.0.137", features = ["derive"] }
@ -34,7 +34,7 @@ threadpool = "1.8.1"
rayon = "1.5.3"
num_cpus = "1.13.1"
mimalloc = { version = "0.1.29", default-features = false, optional = true }
lsp-server = { version = "0.6.0", path = "../../lib/lsp-server" }
lsp-server = { version = "0.7.0", path = "../../lib/lsp-server" }
tracing = "0.1.35"
tracing-subscriber = { version = "0.3.14", default-features = false, features = [
"env-filter",
@ -87,7 +87,6 @@ jemalloc = ["jemallocator", "profile/jemalloc"]
force-always-assert = ["always-assert/force"]
in-rust-tree = [
"proc-macro-srv/sysroot-abi",
"sourcegen/in-rust-tree",
"ide/in-rust-tree",
"syntax/in-rust-tree",
]

View file

@ -132,7 +132,7 @@ where
let ext = span.extensions();
// `FormattedFields` is a a formatted representation of the span's
// `FormattedFields` is a formatted representation of the span's
// fields, which is stored in its extensions by the `fmt` layer's
// `new_span` method. The fields will have been formatted
// by the same field formatter that's provided to the event

View file

@ -37,16 +37,15 @@ fn main() {
process::exit(code);
}
if let Err(err) = try_main() {
let flags = flags::RustAnalyzer::from_env_or_exit();
if let Err(err) = try_main(flags) {
tracing::error!("Unexpected error: {}", err);
eprintln!("{}", err);
process::exit(101);
}
}
fn try_main() -> Result<()> {
let flags = flags::RustAnalyzer::from_env()?;
fn try_main(flags: flags::RustAnalyzer) -> Result<()> {
#[cfg(debug_assertions)]
if flags.wait_dbg || env::var("RA_WAIT_DBG").is_ok() {
#[allow(unused_mut)]
@ -76,10 +75,6 @@ fn try_main() -> Result<()> {
println!("rust-analyzer {}", rust_analyzer::version());
return Ok(());
}
if cmd.help {
println!("{}", flags::RustAnalyzer::HELP);
return Ok(());
}
with_extra_thread("LspServer", run_server)?;
}
flags::RustAnalyzerCmd::ProcMacro(flags::ProcMacro) => {

View file

@ -4,7 +4,7 @@ use std::mem;
use cfg::{CfgAtom, CfgExpr};
use ide::{FileId, RunnableKind, TestId};
use project_model::{self, ManifestPath, TargetKind};
use project_model::{self, CargoFeatures, ManifestPath, TargetKind};
use vfs::AbsPathBuf;
use crate::{global_state::GlobalStateSnapshot, Result};
@ -35,41 +35,41 @@ impl CargoTargetSpec {
match kind {
RunnableKind::Test { test_id, attr } => {
args.push("test".to_string());
args.push("test".to_owned());
extra_args.push(test_id.to_string());
if let TestId::Path(_) = test_id {
extra_args.push("--exact".to_string());
extra_args.push("--exact".to_owned());
}
extra_args.push("--nocapture".to_string());
extra_args.push("--nocapture".to_owned());
if attr.ignore {
extra_args.push("--ignored".to_string());
extra_args.push("--ignored".to_owned());
}
}
RunnableKind::TestMod { path } => {
args.push("test".to_string());
extra_args.push(path.to_string());
extra_args.push("--nocapture".to_string());
args.push("test".to_owned());
extra_args.push(path.clone());
extra_args.push("--nocapture".to_owned());
}
RunnableKind::Bench { test_id } => {
args.push("bench".to_string());
args.push("bench".to_owned());
extra_args.push(test_id.to_string());
if let TestId::Path(_) = test_id {
extra_args.push("--exact".to_string());
extra_args.push("--exact".to_owned());
}
extra_args.push("--nocapture".to_string());
extra_args.push("--nocapture".to_owned());
}
RunnableKind::DocTest { test_id } => {
args.push("test".to_string());
args.push("--doc".to_string());
args.push("test".to_owned());
args.push("--doc".to_owned());
extra_args.push(test_id.to_string());
extra_args.push("--nocapture".to_string());
extra_args.push("--nocapture".to_owned());
}
RunnableKind::Bin => {
let subcommand = match spec {
Some(CargoTargetSpec { target_kind: TargetKind::Test, .. }) => "test",
_ => "run",
};
args.push(subcommand.to_string());
args.push(subcommand.to_owned());
}
}
@ -82,29 +82,35 @@ impl CargoTargetSpec {
};
let cargo_config = snap.config.cargo();
if cargo_config.all_features {
args.push("--all-features".to_string());
for feature in target_required_features {
args.push("--features".to_string());
args.push(feature);
}
} else {
let mut features = Vec::new();
if let Some(cfg) = cfg.as_ref() {
required_features(cfg, &mut features);
match &cargo_config.features {
CargoFeatures::All => {
args.push("--all-features".to_owned());
for feature in target_required_features {
args.push("--features".to_owned());
args.push(feature);
}
}
CargoFeatures::Selected { features, no_default_features } => {
let mut feats = Vec::new();
if let Some(cfg) = cfg.as_ref() {
required_features(cfg, &mut feats);
}
features.extend(cargo_config.features);
features.extend(target_required_features);
feats.extend(features.iter().cloned());
feats.extend(target_required_features);
features.dedup();
for feature in features {
args.push("--features".to_string());
args.push(feature);
feats.dedup();
for feature in feats {
args.push("--features".to_owned());
args.push(feature);
}
if *no_default_features {
args.push("--no-default-features".to_owned());
}
}
}
Ok((args, extra_args))
}
@ -136,7 +142,7 @@ impl CargoTargetSpec {
}
pub(crate) fn push_to(self, buf: &mut Vec<String>, kind: &RunnableKind) {
buf.push("--package".to_string());
buf.push("--package".to_owned());
buf.push(self.package);
// Can't mix --doc with other target flags
@ -145,23 +151,23 @@ impl CargoTargetSpec {
}
match self.target_kind {
TargetKind::Bin => {
buf.push("--bin".to_string());
buf.push("--bin".to_owned());
buf.push(self.target);
}
TargetKind::Test => {
buf.push("--test".to_string());
buf.push("--test".to_owned());
buf.push(self.target);
}
TargetKind::Bench => {
buf.push("--bench".to_string());
buf.push("--bench".to_owned());
buf.push(self.target);
}
TargetKind::Example => {
buf.push("--example".to_string());
buf.push("--example".to_owned());
buf.push(self.target);
}
TargetKind::Lib => {
buf.push("--lib".to_string());
buf.push("--lib".to_owned());
}
TargetKind::Other | TargetKind::BuildScript => (),
}

View file

@ -24,7 +24,7 @@ use ide_db::base_db::{
use itertools::Itertools;
use oorandom::Rand32;
use profile::{Bytes, StopWatch};
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustcSource};
use rayon::prelude::*;
use rustc_hash::FxHashSet;
use stdx::format_to;
@ -55,7 +55,10 @@ impl flags::AnalysisStats {
};
let mut cargo_config = CargoConfig::default();
cargo_config.no_sysroot = self.no_sysroot;
cargo_config.sysroot = match self.no_sysroot {
true => None,
false => Some(RustcSource::Discover),
};
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: !self.disable_build_scripts,
with_proc_macro: !self.disable_proc_macros,
@ -81,7 +84,7 @@ impl flags::AnalysisStats {
};
let (host, vfs, _proc_macro) =
load_workspace(workspace, &cargo_config, &load_cargo_config)?;
load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
let db = host.raw_database();
eprint!("{:<20} {}", "Database loaded:", db_load_sw.elapsed());
eprint!(" (metadata {}", metadata_time);

Some files were not shown because too many files have changed in this diff Show more